From 9dfce8b0b54250cb9ceb21e7eb8addc7102989c7 Mon Sep 17 00:00:00 2001 From: Spencer Killen Date: Sat, 14 May 2022 22:11:20 -0600 Subject: [PATCH] Add frankenscript --- bphys/baked_physics_to_pc2.py | 422 ++++++++++++++++++++++++++++++++++ 1 file changed, 422 insertions(+) create mode 100644 bphys/baked_physics_to_pc2.py diff --git a/bphys/baked_physics_to_pc2.py b/bphys/baked_physics_to_pc2.py new file mode 100644 index 0000000..d6576db --- /dev/null +++ b/bphys/baked_physics_to_pc2.py @@ -0,0 +1,422 @@ +#!/usr/bin/env python + +""" +A frankenscript that converts baked softbody physics data to a pc2 file +""" + + + +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + + +# SOURCE: https://blenderartists.org/t/point-cache-doctor-script/593708 + +import os, struct, sys, argparse +from sys import argv, flags +from os import path + +_byteorder = 'little' +_byteorder_fmt = '<' + + +def dict_merge(d, other): + """Utility function for creating a modified dict""" + c = d.copy() + c.update(other) + return c + + +def print_progress_bar(msg, percent, size): + sys.stdout.write('{0}\r[{1}] {2}%'.format(msg, ('#'*int(percent/100.0*size)).ljust(size), percent)) + sys.stdout.flush() + + +def cache_files(directory, index=0): + """Cache frame files in a directory""" + for filename in os.listdir(directory): + try: + base, ext = path.splitext(filename) + parts = base.split('_') + if len(parts) in (2, 3): + cframe = int(parts[1]) + cindex = int(parts[2]) if len(parts) >= 3 else 0 + if cindex == index: + yield cframe, filename + except: + pass + +def cache_file_list(directory, index=0): + """Cache frame files in a directory, sorted by frame""" + return sorted(cache_files(directory, index), key=lambda item: item[0]) + + +def pack_string(v, size): + return struct.pack('%ds' % size, v.encode(encoding='UTF-8')) + +def unpack_string(b, size): + return struct.unpack('%ds' % size, b)[0].decode(encoding='UTF-8') + +def pack_uint(v): + return struct.pack('I', v) + +def unpack_uint(b): + return struct.unpack('I', b)[0] + +def pack_float(v): + return struct.pack('f', v) + +def unpack_float(b): + return struct.unpack('f', b)[0] + +def pack_vector(v): + return struct.pack('fff', v[0], v[1], v[2]) + +def unpack_vector(b): + return struct.unpack('fff', b) + +def pack_quaternion(v): + return struct.pack('ffff', v[0], v[1], v[2], v[3]) + +def unpack_quaternion(b): + return struct.unpack('ffff', b) + +def pack_color(v): + return struct.pack('ffff', v[0], v[1], v[2], v[3]) + +def unpack_color(b): + return struct.unpack('ffff', b) + +class ParticleTimes(): + __slots__ = ('birthtime', 'lifetime', 'dietime') + + def __init__(self, birthtime, lifetime, dietime): + self.birthtime = birthtime + self.lifetime = lifetime + self.dietime = dietime + +def pack_particle_times(v): + return struct.pack('fff', v.birthtime, v.lifetime, v.dietime) + +def unpack_particle_times(b): + birthtime, lifetime, dietime = struct.unpack('fff') + return ParticleTimes(birthtime, lifetime, dietime) + +class BoidData(): + __slots__ = ('health', 'acceleration', 'state_id', 'mode') + + def __init__(self, health, acceleration, state_id, mode): + self.health = health + self.acceleration = acceleration + self.state_id = state_id + self.mode = mode + +def pack_boid(v): + return struct.pack('ffffhh', v.health, v.acceleration[0], v.acceleration[1], v.acceleration[2], v.state_id, v.mode) + +def unpack_boid(b): + health, acc0, acc1, acc2, state_id, mode = struct.unpack('ffffhh') + return BoidData(health, (acc0, acc1, acc2), state_id, mode) + + +class TypeDesc(): + """Data type descriptor""" + + def __init__(self, index, name, size, pack, unpack): + self.index = index + self.name = name + self.size = size + self.pack = pack + self.unpack = unpack + + def __str__(self): + return self.name + + def __repr__(self): + return "TypeDesc(name=%r, size=%d)" % (self.name, self.size) + +_data_types_softbody = ( + TypeDesc(1, 'LOCATION', 12, pack_vector, unpack_vector), + TypeDesc(2, 'VELOCITY', 12, pack_vector, unpack_vector), + ) + +_data_types_particles = ( + TypeDesc(0, 'INDEX', 4, pack_uint, unpack_uint), + TypeDesc(1, 'LOCATION', 12, pack_vector, unpack_vector), + TypeDesc(2, 'VELOCITY', 12, pack_vector, unpack_vector), + TypeDesc(3, 'ROTATION', 16, pack_quaternion, unpack_quaternion), + TypeDesc(4, 'AVELOCITY', 12, pack_vector, unpack_vector), + TypeDesc(5, 'SIZE', 4, pack_float, unpack_float), + TypeDesc(6, 'TIMES', 12, pack_particle_times, unpack_particle_times), + TypeDesc(7, 'BOIDS', 20, pack_boid, unpack_boid), + ) + +_data_types_cloth = ( + TypeDesc(1, 'LOCATION', 12, pack_vector, unpack_vector), + TypeDesc(2, 'VELOCITY', 12, pack_vector, unpack_vector), + TypeDesc(4, 'XCONST', 12, pack_vector, unpack_vector), + ) + +_data_types_smoke = ( + TypeDesc(1, 'SMOKE_LOW', 12, pack_vector, unpack_vector), + TypeDesc(2, 'SMOKE_HIGH', 12, pack_vector, unpack_vector), + ) + +_data_types_dynamicpaint = ( + TypeDesc(3, 'DYNAMICPAINT', 16, pack_color, unpack_color), + ) + +_data_types_rigidbody = ( + TypeDesc(1, 'LOCATION', 12, pack_vector, unpack_vector), + TypeDesc(3, 'ROTATION', 16, pack_quaternion, unpack_quaternion), + ) + +_type_map = { + 0 : ('SOFTBODY', _data_types_softbody), + 1 : ('PARTICLES', _data_types_particles), + 2 : ('CLOTH', _data_types_cloth), + 3 : ('SMOKE_DOMAIN', _data_types_smoke), + 4 : ('SMOKE_HIGHRES', _data_types_smoke), + 5 : ('DYNAMICPAINT', _data_types_dynamicpaint), + 6 : ('RIGIDBODY', _data_types_rigidbody), + } + +_flag_map = { + 0x00010000 : 'compress', + 0x00020000 : 'extra_data', + } + +class CacheFrame(): + def __init__(self, filename): + self.filename = filename + self.totpoint = 0 + self.data_types = tuple() + self.data = tuple() + + def get_data_type(self, name): + for dt in self.data_types: + if dt.name == name: + return dt + return None + + def get_data(self, name): + for dt, data in zip(self.data_types, self.data): + if dt.name == name: + return data + return None + + def set_data(self, name, values): + self.data = tuple(data if dt.name != name else values for dt, data in zip(self.data_types, self.data)) + + def read(self, directory, read_data): + cachetype = "" + data_types = {} + + f = open(path.join(directory, self.filename), "rb") + try: + cachetype, data_types = self.read_header(f) + + if read_data: + self.read_points(f) + else: + self.data = None + + finally: + f.close() + + return cachetype, data_types + + def read_header(self, f): + bphysics = unpack_string(f.read(8), 8) + if bphysics != 'BPHYSICS': + raise Exception("Not a valid BPHYSICS cache file") + + typeflag = unpack_uint(f.read(4)) + + cachetype, data_types = _type_map[typeflag & 0x0000FFFF] + self.cachetype = cachetype + + for bits, flag in _flag_map.items(): + setattr(self, flag, bool(typeflag & bits)) + + self.totpoint = unpack_uint(f.read(4)) + + data_types_flag = unpack_uint(f.read(4)) + # frame has filtered data types list in case not all data types are stored + self.data_types = tuple(filter(lambda dt: ((1<