Создать текстовый файл, вставить текст, сохранить с расширением .PY
Выполнить посредством консоли и установленных в системе
Pyffi утилит.
Этот скрипт оптимизирует Ниф файлы.
Удаляя лишний свойства, объединяя шейпы и т.п. и т.д.
Таким образом можно быстро повысить быстродействие МВ, пропустив через этот скрипт все ванильные модели.
Что и было сделано в том PerfomancePatche выложенным на Нексусе.
Возможно существует более новая версия скрипта.
meshes = "C:/Users/Jad/Games/Morrowind/Data Files/Meshes/g7/test" - указываем место где лежат файлы.
(С) Greatness7
#!/usr/bin/env python
# warning: will be slow on very large folders...
# "/meshes/i/" on my setup:
# [Finished in 85.63s] @ pypy27
# [Finished in 188.0s] @ python35
from __future__ import print_function
import os.path
from pyffi.utils import walk
from timeit import default_timer
from multiprocessing import Pool
from collections import defaultdict
from pyffi.formats.nif import NifFormat
meshes = "C:/Users/Jad/Games/Morrowind/Data Files/Meshes/g7/test"
def combine_properties(data):
count = 0
props = {i: p for i, p in enumerate(data.blocks)
if isinstance(p, NifFormat.NiProperty)}
for i, block in props.items():
if block is None: continue
for j, other in props.items():
if other is None: continue
if i != j and other.is_interchangeable(block):
data.replace_global_node(other, block)
props[j] = None
count += 1
return count
def clean_string_data(data):
count = 0
# these are the only valid string prefixes
keep = dict.fromkeys(["sgokeep", "nc", "mrk", "rcn"])
for block in data.blocks:
extra_data = getattr(block, "extra_data", None)
if type(extra_data) != NifFormat.NiStringExtraData:
continue
modified = False
string_data = extra_data.string_data.lower()
try:
if string_data not in keep:
modified = True
# possible useless string data
# compare it w/ valid prefixes
prefix = (p for p in keep if string_data.startswith(p))
string_data = extra_data.string_data = next(prefix)
# found a valid string, re-use any saved copy
saved = keep[string_data]
if saved is None:
keep[string_data] = extra_data
elif extra_data is not saved:
block.extra_data = saved
modified = True
except StopIteration:
data.replace_global_node(extra_data, None)
if modified:
count += 1
return count
def optimize_alpha_flags(data):
"""TODO"""
count = 0
for block in data.blocks:
if type(block) != NifFormat.NiAlphaProperty:
continue
if block.flags != 4844:
block.flags = 4844
if block.threshold < 133:
block.threshold = 133
count += 1
return count
def flatten_branches(data):
count = 0
links = (NifFormat.NiNode, NifFormat.NiTriShape)
for block in data.blocks:
if type(block) != NifFormat.NiTriShape:
continue
if block.controller or block.extra_data or block.skin_instance:
continue
chain = data.roots[0].find_chain(block, block_type=links)[::-1]
if len(chain) <= 2 or NifFormat.RootCollisionNode in map(type, chain):
continue
transform = block.get_transform()
for i, link in enumerate(chain[1:-1], start=1):
if any((link.has_bounding_box,
link.controller,
link.extra_data,
link.effects)):
break
parent = chain[i+1]
parent.add_child(block)
link.remove_child(block)
if link.num_children == 0:
parent.remove_child(link)
transform *= link.get_transform()
count += 1
block.set_transform(transform)
return count
def merge_trishapes(data):
count = 0
vec = NifFormat.Vector3()
mat = NifFormat.Matrix33()
queue = data.roots[:]
while len(queue):
node = queue.pop()
if type(node) != NifFormat.NiNode:
continue
# get compatible shapes
shapes = defaultdict(list)
for child in node.children:
if type(child) == NifFormat.NiNode:
queue.append(child)
elif type(child) == NifFormat.NiTriShape:
# skip skinned / etc
if child.controller or child.extra_data or child.skin_instance:
continue
# # skip alpha shapes
# if NifFormat.NiAlphaProperty in map(type, child.properties):
# continue
props = child.properties.get_hash()
if len(props) > 1:
props = tuple(sorted(props))
shapes[props].append(child)
# merge compatible shapes
for group in shapes.values():
if len(group) <= 1: continue
data = NifFormat.NiTriShapeData()
data.has_normals = any(s.data.has_normals for s in group)
data.has_vertex_colors = any(s.data.has_vertex_colors for s in group)
data.has_uv = data.num_uv_sets = max(s.data.num_uv_sets for s in group)
data.uv_sets.update_size()
for shape in group:
sdata = shape.data
index = len(data.vertices)
# increment num
data.num_vertices += sdata.num_vertices
data.num_triangles += sdata.num_triangles
data.num_triangle_points += sdata.num_triangle_points
# get transforms
transform = shape.get_transform()
transpose = transform.get_inverse()
transpose.set_rows(*(zip(*transpose.as_tuple())))
# merge vertices
data.vertices.extend(v * transform for v in sdata.vertices)
# merge normals
data.normals.extend(n * transpose for n in sdata.normals)
# merge uv sets
for i, uv_set in enumerate(sdata.uv_sets):
data.uv_sets[i].extend(uv_set)
# merge colors
if data.has_vertex_colors:
if sdata.has_vertex_colors:
c = sdata.vertex_colors
else: # generate colors
c = NifFormat.Color4()
c.r, c.g, c.b, c.a = [1] * 4
c = [c] * sdata.num_vertices
data.vertex_colors.extend(c)
# merge triangles
for tri in sdata.triangles:
new = NifFormat.Triangle()
new.v_1 = tri.v_1 + index
new.v_2 = tri.v_2 + index
new.v_3 = tri.v_3 + index
data.triangles.append(new)
# clean up
if shape is not group[-1]:
node.remove_child(shape)
# update arrays
data.vertices.update_size()
data.normals.update_size()
data.vertex_colors.update_size()
data.triangles.update_size()
data.update_center_radius()
shape.data = data
shape.translation = vec
shape.rotation = mat
shape.scale = 1
count += len(group) - 1
return count
def clean_collision(data):
""" clean unnecessary data
- vertex colors
- vertex uv sets
- controllers
- properties
- extra data
- skin data
"""
pass
def scan(file):
name = os.path.relpath(file, meshes)
try: # fails on some invalid nifs...
with open(file, 'rb') as stream:
data = NifFormat.Data()
data.read(stream)
results = {
"props": combine_properties(data),
"alphas": optimize_alpha_flags(data),
"strings": clean_string_data(data),
"nodes": flatten_branches(data),
"shapes": merge_trishapes(data),
}
if any(results.values()):
with open(file, 'wb') as stream:
data.write(stream)
return (name, results)
except Exception as error:
return "ERROR: (%s) %s" % (name, error)
def kf(f):
return os.path.splitext(f)[0] + ".kf"
def xnif(f):
return os.path.split(f)[0] + "/x" + os.path.basename(f)
if __name__ == '__main__':
start_time = default_timer()
files = [
file for file in walk(meshes)
# must be a nif
if file.lower().endswith(".nif")
# must not be too big
if os.path.getsize(file) < 10000000
# must not be in root dir
if not os.path.dirname(file).lower().endswith("data files/meshes")
# must not be animated
if not ".1st." in file.lower()
if not os.path.isfile(kf(file))
if not os.path.isfile(xnif(file))
]
pool = Pool()
results = pool.map(scan, files)
pool.close()
pool.join()
print("Time Elapsed: %.2fs" % (default_timer() - start_time))
for result in filter(None, results):
print(result)