|
|
ExportSG.py
Here is the Python source as left off:
#******************************************************************************
# (above is 79 chars long)
# ExportSG.py
#
# ToDo: (pos)fix ref names to be rid of terminator (must be done late as when
# processing the sg, the name without the # terminator could be a valid
# non-definition name.
#
# Write about: Traits mechanism (External to module configurability at run time, ...)
#
# Future:
# >Load the attributes from traits base classes into derived ones if they are
# not overriden to speed up access to them.
# >Avoid placing objects in the nodes map if they are under local defs other
# than the nominated one. This would make sg.build even more gnarly so is
# only to be done if performance problems crop up for immense levels and a
# c++ re-write is deemed excessive.
#
# Copyright (c) Andrew H. Cox 2000 All rights reserved.
#
# Contact: [email protected]
#
print 'hello from Export.py'
import Blender
import string
import os
class ExportTraits:
dir_separator = os.sep
true = 1
false = 0
# Object Name related data & ops
def strip_filename(s, name):
extension_i = string.rfind(name, '.')
filestart_i = 1 + string.rfind(name, s.dir_separator, 0, extension_i)
return name[filestart_i:extension_i]
def is_definition(s, name):
i = string.find(name, '/')
if i < 1: return 0
ii = string.find(name, '#', i)
if ii - i < 2: return 0
return 1
def is_def_local(s, name, local_prefix):
#Assumes the name passed is in the definition form: file/object#whatever
assert(s.is_definition(name))
if string.find(name, local_prefix) == 0: return 1
return 0
def normalise_def_name(s, name):
#Assumes the name passed is in the definition form: file/object#whatever
assert(s.is_definition(name))
separator_i = string.find(name, '/')
terminator_i = string.find(name, '#', separator_i)
return name[0:terminator_i+1]
def convert_to_filename(s, name):
assert(s.is_definition(name))
separator_i = string.find(name, '/')
return name[separator_i + 1:]
# Utils
def search_parents_for_def(s, obj):
while (obj.parent != None):
obj = obj.parent
if s.is_definition(obj.name):
return obj
return None
_ExportTraits = ExportTraits()
from types import StringType # To be placed in seperate file with writers and bulder.
#import os
import errno
true = 1
false = 0
class WriterTraits:
pass
class TextWriterTraits(WriterTraits):
sg_extension = 'sg'
pass
_TextWriterTraits = TextWriterTraits()
class TextWriter:
def __init__(s, out_path, traits = _TextWriterTraits):
assert(type(out_path) == StringType)
s.tr = traits
s.set_out_path(out_path)
print s.out_path
try: os.makedirs(out_path)
except os.error, value:
if value[0] != errno.EEXIST:
raise os.error, (value[0], value[1])
log_name = s.out_path + os.sep + 'Building.log'
s.set_log_file(open(log_name, 'w'))
def begin_sg(s, name):
file_name = s.out_path + os.sep + name + '.' + s.tr.sg_extension
print file_name
s.set_sg_file(open(file_name, 'w'))
s.sg_file.write('# Ambar text format instantiable scene graph branch\n')
def emit_local_sg_refs(s, refs):
s.sg_file.write('\nlocal_sg_refs [\n')
for ref in refs:
s.sg_file.write('\t'+ref+',\n')
s.sg_file.write(']\n')
def emit_ext_sg_refs(s, refs):
s.sg_file.write('\next_sg_refs [\n')
for ref in refs:
s.sg_file.write('\t'+ref+',\n')
s.sg_file.write(']\n')
def emit_node(s, fields, level, has_children):
temp = '\n' + ('\t' * level) + 'node {'
print temp
s.sg_file.write(temp)
if fields.__dict__.has_key('name'):
s.sg_file.write('\n' + ('\t' * (level+1)) + 'name ' + fields.name)
def emit_end_of_node(s, level):
tabbed_curly = '\n' + ('\t' * level) + '}\n'
print tabbed_curly
s.sg_file.write(tabbed_curly)
def end_sg(s):
s.sg_file.flush()
s.sg_file.close()
s.log_file.flush() # Here For Now (should be in destructor (&close()))
def set_out_path(s, out_path):
s.out_path = out_path
def set_sg_file(s, file):
s.sg_file = file
def set_log_file(s, file):
s.log_file = file
def log(s, string):
s.log_file.write(string)
class BuilderTraits:
true = 1
false = 0
_BuilderTraits = BuilderTraits()
class Builder:
from types import IntType
from types import StringType
# The class node is empty by default but provides methods to set all the
# fields a node could possibly have. This will be passed to the writer which
# must decide on the type of the node based on the fields present or encode
# the presence of a particular set of fields in some filetype specific way
# (thats what it is for).
class NodeFields:
def set_name(s, name):
s.name = name
def __init__(s, writer, traits = _BuilderTraits):
s.states = ('waiting_for_sg', 'waiting_for_local_sg_refs',
'waiting_for_ext_sg_refs', 'waiting_for_first_sg_node',
'adding_sg_node_fields', 'just_popped_sg_node',
'finished_sg_nodes', 'invalid_state')
s.tr = traits
s.writer = writer
s.state = 0
# Load up state attributes in this way so new states can be inserted
# without manualy changing the values.
i = 1
for state in s.states:
code_string = 's.'+state+' = '+repr(i)
exec code_string
i = i + 1
print "Builder.__init__ executed"
s.set_state(s.waiting_for_sg)
# Set methods: mostly just to avoid name bugs for attributes.
def set_state(s, new_state):
assert(type(new_state) == s.IntType)
print 'BUILDER: SETTING STATE <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<'
print '\t', 'Old State =', s.states[s.state - 1]
print '\t', 'New State =', s.states[new_state - 1]
s.state = new_state
def state_string(s):
return s.states[s.state -1]
def validate_state(s, expected_state):
assert(s.state == expected_state)
# FixMe: Throw an exception instead of asserting.
def throw_invalid_event_for_state(s):
print 'Current State is:',s.state_string()
assert(1!=1)
#FixMe: raise an exception.
def clear_node_fields(s):
s.node_fields = s.NodeFields()
def begin_sg(s, name):
assert(type(name) == s.StringType)
s.validate_state(s.waiting_for_sg)
s.writer.begin_sg(name)
s.set_state(s.waiting_for_local_sg_refs)
def add_local_sg_refs(s, refs):
s.validate_state(s.waiting_for_local_sg_refs)
s.writer.emit_local_sg_refs(refs)
s.set_state(s.waiting_for_ext_sg_refs)
def add_ext_sg_refs(s, refs):
s.validate_state(s.waiting_for_ext_sg_refs)
s.writer.emit_ext_sg_refs(refs)
s.set_state(s.waiting_for_first_sg_node)
def push_node(s):
if s.state == s.adding_sg_node_fields:
# Send the node on to the writer with a flag of children to follow.
s.writer.emit_node(s.node_fields, s.node_level, s.tr.true)
s.clear_node_fields()
s.inc_node_level()
# State stays the same
return
if s.state == s.just_popped_sg_node:
# Send the node on to the writer with a flag of children to follow.
s.clear_node_fields()
s.inc_node_level()
s.set_state(s.adding_sg_node_fields)
return
if s.state == s.waiting_for_first_sg_node:
s.set_node_level(0)
s.clear_node_fields()
s.set_state(s.adding_sg_node_fields)
return
s.throw_invalid_event_for_state()
def add_name(s, name): # Move if needed other than for nodes.
assert(type(name) == s.StringType)
if s.state == s.adding_sg_node_fields:
s.node_fields.set_name(name)
return
s.throw_invalid_event_for_state()
def pop_node(s):
assert(s.node_level >= 0)
if s.state == s.adding_sg_node_fields:
# Reached for leaf nodes.
s.writer.emit_node(s.node_fields, s.node_level, false)
s.writer.emit_end_of_node(s.node_level)
s.dec_node_level()
if s.node_level >= 0:
s.set_state(s.just_popped_sg_node)
return
else:
s.set_state(s.finished_sg_nodes)
return
if s.state == s.just_popped_sg_node:
s.writer.emit_end_of_node(s.node_level)
s.dec_node_level()
if s.node_level < 0:
s.set_state(s.finished_sg_nodes)
return
# else: State stays the same
return
s.throw_invalid_event_for_state()
def end_sg(s):
s.validate_state(s.finished_sg_nodes)
s.writer.end_sg()
s.set_state(s.waiting_for_sg)
def set_node_level(s, level):
s.node_level = level
def inc_node_level(s):
s.set_node_level(s.node_level + 1)
def dec_node_level(s):
s.set_node_level(s.node_level - 1)
def log(s, string):
assert(type(string) == s.StringType)
s.writer.log(string)
TestBuilder = Builder(TextWriter('C:\Temp')) # Delete these lines
print TestBuilder
class ExporterTraits(ExportTraits):
true = 1
false = 0
_ExporterTraits = ExporterTraits()
class Exporter:
def __init__(s, builder, traits = _ExporterTraits):
s.tr = traits
s.builder = builder
print "Exporter.__init__ executed"
def export(s):
tr = s.tr
sg = SceneGraph()
objs = Blender.Object.Get()
local_prefix = tr.strip_filename(Blender.Get('filename'))
print 'stripped filename = ',
print local_prefix
sg.build(objs, local_prefix)
s.output_sg(sg, local_prefix)
# tests here for now
sg.test()
def output_sg(s, sg, local_prefix):
tr = s.tr
# Output Definitions.
# This is implemented as a simple multipass algo but would be more
# efficient using a data-recursive version based on a stack of definitions
# that output local refrences of the [top] definition before it itself.
defs_to_output = sg.definition_names()
num_to_output = len(defs_to_output)
already_output = {}
while num_to_output:
# Scan list of definition names and try to output any that haven't
# already been output.
#! check some are output in each iteration.
num_output = 0
for def_name in defs_to_output:
#if not num_to_output: break # kicks in on last scan.
print 'Scanning: ', def_name
if already_output.has_key(def_name): continue
definition = sg.definition(def_name)
print 'Checking dependancies of:', def_name
# Before output, check all local refrenced definitions have
# already been output & defer this one if not.
local_refs = definition.local_refs
for ref in local_refs:
if not already_output.has_key(ref): break
else: # all local refs have already been output.
print 'Outputing Definition of: ', def_name
#ToDo: The actual SceneGraph walk.
i = sg.PreorderIterator(definition.nominated, sg.nodes)
s.output_definition(i, definition, def_name, local_prefix)
already_output[def_name] = tr.true
num_to_output = num_to_output - 1
num_output = num_output + 1
if not num_output:
print '\nError: sg passed to output_sg has at least one definition',
print 'whose list of local refrences contains names which are not',
print 'ammong the sg.definitions.\n'
break
def output_definition(s, it, definition, def_name, local_prefix):
tr = s.tr
b = s.builder
# Start the new Model (definition).
print def_name
def_name = tr.convert_to_filename(def_name)
print '\t\t\t\t\t', def_name
b.begin_sg(def_name)
# Add the per definition data (internal& external refs, name...)
# Construct mappings from names of refrences to indexes in ref lists.
index = 0
local_ref_indexes = {}
for ref in definition.local_refs:
local_ref_indexes[ref] = index
index = index + 1
index = 0
ext_ref_indexes = {}
for ref in definition.ext_refs:
ext_ref_indexes[ref] = index
index = index + 1
b.add_local_sg_refs(definition.local_refs)
b.add_ext_sg_refs(definition.ext_refs)
#Push the root node of the definition onto the builder.
obj = definition.nominated
print 'Outputing Definition Root:', obj.name
level = 0
b.log('\n'+('\t'*level) + obj.name + '{')
b.push_node()
b.add_name(obj.name) # Remove when there is something more interesting to add.
# Don't add the Xform.
obj = it.next_down()
if not obj:
b.pop_node()
b.log('\n'+('\t'*level) + '}')
# Descend the scene Graph portion under the definition with ref, defs
# and 'true leaves' as the leaves of this walk.
while obj:
level = level + 1
b.log('\n'+('\t'*level) + obj.name + '{')
if tr.is_definition(obj.name):
b.push_node()
b.add_name(obj.name) # Temp
# ToDo: Add an Xform node
if tr.is_def_local(obj.name, local_prefix):
# ToDo: add a local ref
print 'Adding a local Ref to', obj.name
pass
else:
# ToDo: add an external ref.
print 'Adding an external Ref to', obj.name
pass
obj = None
else: # non-def anonymous node
print 'Outputing Node. Type:', type(obj),'name:', obj.name
b.push_node()
b.add_name(obj.name) # Temp
# ToDo: Add Fields to node
obj = it.next_down()
print 'Stact_top after i.next_down() ==', it.stack_top
while not obj:
b.log('\n'+('\t'*level) + '}')
level = it.pop()
print '>>>>>>>>> sg.ouput_definition(): level ==', level
b.pop_node()
obj = it.next_down()
if (level == 0) and (obj == None):
b.pop_node() # b.end_sg() ?
b.log('\n'+('\t'*level) + '}')
break
b.end_sg()
TestExporter = Exporter(TestBuilder)
print TestExporter
del TestExporter
del TestBuilder
class SceneGraphTraits(ExportTraits):
max_graph_depth = 256
def node_key(s, obj):
return obj.name
#return id(obj.name)#try for performance (untested)
#no: string would be dynamicaly created on heap every access.
#return id(obj) # returns different results for same obj.
_SceneGraphTraits = SceneGraphTraits()
class SceneGraph:
def __init__(s, traits = _SceneGraphTraits):
s.tr = traits
s.definitions = {}
s.ext_refs = {}
s.nodes = {}
class Definition:
# Use common traits unless something class specific comes up.
def __init__(s, nominated, traits = _SceneGraphTraits):
s.tr = traits
s.nominated = nominated
# s.name = None # Name stripped of variation past '#'.
s.nominated_name = nominated.name # Representative of duplicated nodes.
s.local_refs = []
s.ext_refs = []
def add_local_ref(s, def_name, ref_name):
#!ref_name is normalised, def_name is not.
if def_name == s.nominated_name:
# ref2_name = ref_name[0:len(ref_name)-1]
# print 'definition.add_local_ref: adding',ref_name, ref2_name
if ref_name not in s.local_refs:
s.local_refs.append(ref_name)
def add_ext_ref(s, def_name, ref_name):
#!ref_name is normalised, def_name is not.
if def_name == s.nominated_name:
# ref2_name = ref_name[0:len(ref_name)-1]
# print 'definition.add_ext_ref: adding',ref_name, ref2_name
if ref_name not in s.ext_refs:
s.ext_refs.append(ref_name)
# Definition ops.
def add_definition(s, normalised_name, nominated):
if s.definitions.has_key(normalised_name):
return s.definitions[normalised_name]
definition = s.Definition(nominated)
s.definitions[normalised_name] = definition
return definition
def definition_names(s):
return s.definitions.keys()
def definition(s, def_name):
#Future: Translate pos. exception to my own to hide dictionary imp.
return s.definitions[def_name]
class PreorderIterator:
# Usage: Create (iterator = PreorderIterator(...))
# Call iterator.next untill it returns None
# Future:
# Rewrite to take a class that abstracts it from the child map or use
# methods in it's traits for same abstraction (shouldn't be in
# SceneGraph if it is abstracted: perhaps a derived class here with a
# local traits class).
def __init__(s, root, child_map, traits = _SceneGraphTraits):
s.tr = traits
s.child_map = child_map
s.object_stack = range(32)
s.child_i_stack = range(32)
for i in xrange(32):
s.object_stack[i] = None
s.child_i_stack[i] = None
s.object_stack[0] = root
s.child_i_stack[0] = 0 #stores the _NEXT_ child at each level.
s.stack_top = 0
def step_to_child(s, top, child_list, child_i):
ret = child_list[child_i]
s.child_i_stack[top] = child_i + 1
top = top + 1
try:
s.child_i_stack[top] = 0
s.object_stack[top] = ret
except(IndexError):
assert(len (s.child_i_stack) == len(s.object_stack))
assert(top < s.tr.max_graph_depth)
s.child_i_stack.append(0)
s.object_stack.append(ret)
s.stack_top = top
return ret
def next(s):
tr = s.tr
key_func = tr.node_key
top = s.stack_top
print 'top == ', top #[Temp line]
ret = s.next_down()
if ret:
return ret
# Try to step to a child of the current stack top.
# obj_key = key_func(s.object_stack[top])
# if s.child_map.has_key(obj_key): # filter out leaves.
# child_list = s.child_map[obj_key]
# child_i = s.child_i_stack[top]
# # Check there is a child of the current node left to visit.
# if child_i < len(child_list):
# return s.step_to_child(top, child_list, child_i)
# Step back up Graph looking for a node with unvisited children.
while top > 0:
print 'Stepping up stack' #[Temp line]
s.object_stack[top] = None # Not strictly nescessary.
s.child_i_stack[top] = 32000 # Temp for debugging.
top = top - 1
obj_key = key_func(s.object_stack[top])
child_list = s.child_map[obj_key]
child_i = s.child_i_stack[top]
# Check there is a child of the current node left to visit.
if child_i < len(child_list):
s.stack_top = top
return s.step_to_child(top, child_list, child_i)
s.stack_top = top
assert(s.stack_top == 0)
assert(len (s.child_i_stack) == len(s.object_stack))
return None
def next_down(s):
tr = s.tr
key_func = tr.node_key
top = s.stack_top
print 'top == ', top #[Temp line]
# Try to step to a child of the current stack top.
obj_key = key_func(s.object_stack[top])
if s.child_map.has_key(obj_key): # filter out leaves.
child_list = s.child_map[obj_key]
child_i = s.child_i_stack[top]
# Check there is a child of the current node left to visit.
if child_i < len(child_list):
return s.step_to_child(top, child_list, child_i)
return None
def pop(s):
print 'Stepping up stack' #[Temp line]
top = s.stack_top
s.object_stack[top] = None # Not strictly nescessary.
s.child_i_stack[top] = 32000 # Temp for debugging.
s.stack_top = top - 1
return s.stack_top
def add_to_nodes(s, obj):
key = s.tr.node_key(obj.parent)
if s.nodes.has_key(key):
# print 'Key:', key
# print 'Parent:', obj.parent.name
s.nodes[key].append(obj)
else:
# print " nodes doesn't have key xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:",
# print key
# print 'Parent:', obj.parent.name
s.nodes[key] = [obj]
def add_ext_ref(s, name):
print 'ADDIING AN EXTERNAL REF TO SG:', name, '<------------######' #temp
s.ext_refs[name] = None # Later this will hold the corspnd. builder ID.
def build(s, objs, local_prefix):
tr = s.tr
true, false = (1, 0)
unset_bool = 32767
for obj in objs:
add_obj_to_nodes = unset_bool
if tr.is_definition(obj.name):
normalised = tr.normalise_def_name(obj.name)
obj_is_local = tr.is_def_local(obj.name, local_prefix)
# Add the object as a ref of it's first definition ancestor
# in parent path if it has one and it is local.
ancestor = tr.search_parents_for_def(obj)
if ancestor:
if tr.is_def_local(ancestor.name, local_prefix):
add_obj_to_nodes = true
if obj_is_local:
obj_def = s.add_definition(normalised, obj)
else:
s.add_ext_ref(normalised)
norm_ancestor = tr.normalise_def_name(ancestor.name)
ancestor_def = s.add_definition(norm_ancestor, ancestor)
# Adding a ref involves a linear search for duplicates
# so only do it for the one nominated version of the
# ancestor's definition.
if ancestor_def.nominated_name == ancestor.name:
if obj_is_local:
ancestor_def.add_local_ref(ancestor.name, normalised)
else:
ancestor_def.add_ext_ref(ancestor.name, normalised)
else: # obj is parented into a path with an external definition.
add_obj_to_nodes = false
if obj_is_local:
print 'Warning: Definition ignored. (Was parented to a '
print 'path with an external definition in it:\n ',
print obj.name, obj.parent.name
else: # Object has no definition in parent path.
add_obj_to_nodes = false
if obj.parent:
print 'Warning: Definition or external refrence ignored.'
print '(Was parented to a path with no Definition above it:\n ',
print obj.name, obj.parent.name
else:
if obj_is_local:
obj_def = s.add_definition(normalised, obj)
else: # Obj is NOT a definition of either kind.
ancestor = tr.search_parents_for_def(obj)
if ancestor:
if tr.is_def_local(ancestor.name, local_prefix):
add_obj_to_nodes = true
else:
add_obj_to_nodes = false
else:
add_obj_to_nodes = false
print 'Warning: Non-definition Object found with no definition'
print "above it in it's parent chain:\n ",
print obj.name
# add object to Nodes
assert(add_obj_to_nodes != unset_bool)
if add_obj_to_nodes:
# print 'Adding object to nodes:',obj.name
s.add_to_nodes(obj)
else: #temp
pass #temp: send this to a diagnostics log.
#print 'Not adding object to nodes:',obj.name #temp
# Test Func here: Scan definitions and check each has one of each name in its' ref lists...
def test(s):
print '\n\nPrinting Definitions'
def_keys = s.definitions.keys()
for key in def_keys:
print '\nDefinition Name:',
print key
definition = s.definitions[key]
print 'Nominated Name:',
print definition.nominated_name
print 'External Refs:'
for ref in definition.ext_refs:
print ref
print 'Local Refs:'
for ref in definition.local_refs:
print ref
TestSceneGraph = SceneGraph()
print TestSceneGraph
del TestSceneGraph
# Export Initiation:
def export():
temp_dirs = ['TEMP', 'TMP', 'TMPDIR']
for dir in temp_dirs:
try:
temp_dir = os.environ[dir]
except KeyError:
temp_dir = None
else: break
if temp_dir:
exporter = Exporter(Builder(TextWriter(temp_dir)))
exporter.export()
else: print 'Not able to find temporary directory for output'
export()
# Test Suite
# Function Tests
def test_is_definition(def_names, non_def_names):
print "Testing is_definition() with lists of good and bad names"
for name in def_names:
if _ExportTraits.is_definition(name) != 1:
print "Test Failure: (_ExportTraits.is_definition) Reported valid Definition name as not being a definition:",
print name
for name in non_def_names:
if _ExportTraits.is_definition(name):
print "Test Failure: (_ExportTraits.is_definition) Reported invalid name as being a definition:",
print name
print "Finished testing is_definition()"
def test_strip_filename():
full_filenames = [r'D:?temp?3d?file.blend', r'D:?temp?3d?file.blend001',
r'D:?temp?3d?file.part1.part2.blend']
stripped = ['file', 'file', 'file.part1.part2']
print 'Testing strip_filename() against a list of full path filenames'
i = 0
for name in full_filenames:
name = string.replace(name, '?', os.sep)
out = _ExportTraits.strip_filename(name)
if out != stripped[i]:
print 'Test Failure: (_ExportTraits.strip_filename) Test filename made equal to stripped version: ',
print name, ' to ', out, ' instead of ', stripped[i]
i = i+1
print 'Finished Testing strip_filename()'
def test_normalise_def_name(normalise_def_name, def_names, norm_def_names):
i = 0
for name in def_names:
out = normalise_def_name(name)
if out != norm_def_names[i]:
print 'Test Failure: normalise_def_name()',
print 'in:', name, 'out:', out, 'test:', norm_def_names[i]
i = i+1
def test():
def_names = ['Ar#ches/Sp/an1#001', 'Arches/Span1#', 'Arches/Span1#001', 'Arches/Span1#999', '1/2#', '1/2#1000']
norm_def_names = ['Ar#ches/Sp/an1#', 'Arches/Span1#', 'Arches/Span1#', 'Arches/Span1#', '1/2#', '1/2#']
non_def_names = ['/object1#001', 'Arches/#001', 'Arches/#', '/object#002', '/#', '/','#',' ','#Span1/Arches', '#/']
# Work: make functions to test a parameter of test functions.
print "\nStarting tests"
test_is_definition(def_names, non_def_names)
test_strip_filename()
print 'Testing normalise_def_name() against sample input.'
test_normalise_def_name(_ExportTraits.normalise_def_name, def_names, norm_def_names)
print 'Finished testing normalise_def_name()'
print "Ending Tests"
test()
|