Commit 69ecb194 authored by Vlad Gaydukov's avatar Vlad Gaydukov
Browse files

Merge branch 'AL-9352-lods-wizzard-effects' into 'master'

Wizard, collider, shape morph, manual lods, errors, https AL-9687 AL-9556  AL-9256  AL-9505  AL-9483  AL-9458  AL-9388  AL-9151  AL-9388  AL-9387  AL-9386  AL-9352  AL-9384  AL-9402

See merge request avvy/ucs!8
parents f4e9cdc0 312970a5
Showing with 978 additions and 36 deletions
+978 -36
......@@ -27,7 +27,8 @@ Vagrant.configure("2") do |config|
"vagrant_use_certbot" => 0,
"BRANCH" => "master",
"rabbitmq_user_pass" => "znLHG38Nv56vZNDb",
"rabbitmq_admin_pass" => "zZEGhfKsXhhk0hZC"}
"rabbitmq_admin_pass" => "zZEGhfKsXhhk0hZC",
"https_address"=>"https://test.ucs.avvyland.com/"}
}
machine.vm.network :private_network, ip: ip
# nginx debug blocks-admin
......@@ -57,8 +58,8 @@ Vagrant.configure("2") do |config|
# Test ucs-grpc service
machine.vm.network "forwarded_port", guest: 50040, host: 50040 #file autoindex
machine.vm.network "forwarded_port", guest: 50050, host: 50050 #nginx for grpc
machine.vm.network "forwarded_port", guest: 50051, host: 50051 #grpc-node
machine.vm.network "forwarded_port", guest: 50052, host: 50052 #grpc-node
machine.vm.network "forwarded_port", guest: 50053, host: 50053 #grpc-node
machine.vm.network "forwarded_port", guest: 8700, host: 8700 #storage mock nginx
machine.vm.network "forwarded_port", guest: 8701, host: 8701 #storage mock node1
......@@ -72,7 +73,12 @@ Vagrant.configure("2") do |config|
machine.vm.network "forwarded_port", guest: 5672, host: 5673
# Flower web panel
machine.vm.network "forwarded_port", guest: 5566, host: 5566
machine.vm.network "forwarded_port", guest: 5566, host: 5566 #direct to node
# Custom celery job monitors
machine.vm.network "forwarded_port", guest: 8900, host: 8900 #job monitor nginx
machine.vm.network "forwarded_port", guest: 8901, host: 8901 #job monitor node1
machine.vm.network "forwarded_port", guest: 8902, host: 8902 #job monitor node1
machine.vm.provider "virtualbox" do |vb|
vb.memory = "8192"
......
......@@ -47,7 +47,7 @@
roles:
- { role: nginx_grpc_server,
server_name: ucs-grpc.avvyland.com,
use_certbot: 0
use_certbot: 1
}
- { role: grpc_server,
git_repository: "git@gitlab.domi.do:avvy/ucs.git",
......
......@@ -47,7 +47,7 @@
roles:
- { role: nginx_grpc_server,
server_name: test.ucs-grpc.avvyland.com,
use_certbot: 0
use_certbot: 1
}
- { role: grpc_server,
git_repository: "git@gitlab.domi.do:avvy/ucs.git",
......
......@@ -7,4 +7,5 @@ storage_url=http://68.183.66.117:8700/
ansible_python_interpreter=/usr/bin/python3
rabbitmq_user_pass=znLHG38Nv56vZNDb
rabbitmq_admin_pass=zZEGhfKsXhhk0hZC
job_manifest_storage_url=https://storage.avvyland.com/
\ No newline at end of file
job_manifest_storage_url=https://storage.avvyland.com/
https_address=ucs.avvyland.com
\ No newline at end of file
......@@ -7,6 +7,8 @@
port1_editor: 8711
port2_editor: 8712
port_flower: 5566
port1_job_monitor: 8901
port2_job_monitor: 8902
- set_fact:
log_path: /tmp/{{ server_name }}
......@@ -441,6 +443,74 @@
debug:
msg: "{{ systemctl_output.stdout }}"
###
# CELERY JOBS CUSTOM MONITORING
###
- name: Setup Job monitoring server services conf in /etc/systemd/system/
template:
src=systemd_job_monitor.tpl
dest=/etc/systemd/system/{{ server_name }}{{ item }}_job_monitor.service
vars:
port: "{{ item }}"
server_config_path: "{{server_config_ini_filepath}}"
loop:
- "{{port1_job_monitor}}"
- "{{port2_job_monitor}}"
- name: Try to stop Job monitoring instances
command: "systemctl stop {{ server_name }}{{item}}_job_monitor.service"
loop:
- "{{port1_job_monitor}}"
- "{{port2_job_monitor}}"
ignore_errors: true
- name: Enable Job monitoring server services conf
command: "systemctl enable {{ server_name }}{{item}}_job_monitor.service"
loop:
- "{{port1_job_monitor}}"
- "{{port2_job_monitor}}"
- name: Restart systemctl
command: "systemctl daemon-reload"
- name: Start Job monitoring server instances
command: "systemctl start {{ server_name }}{{item}}_job_monitor.service"
loop:
- "{{port1_job_monitor}}"
- "{{port2_job_monitor}}"
# Check app logs after 5 secs of working
- name: Sleep to wait for apps
pause:
seconds: 5
- name: Register Job monitoring server systemctl of instances to print later
command: "systemctl status {{server_name}}{{port1_job_monitor}}_job_monitor.service {{server_name}}{{port2_job_monitor}}_job_monitor.service"
ignore_errors: true
register: systemctl_output
- block:
- name: Show journal of first service in case of error starting the service
command: "sudo journalctl -u {{server_name}}{{port1_job_monitor}}_job_monitor.service"
register: journalctl_output
- name: print Show journal of first service in case of error starting the service
debug:
msg: "{{ journalctl_output.stdout }}"
- name: Stop execution since there was an error
command: "python3.7 -c '1/0' "
when: systemctl_output is failed
- name: Print systemctl status to console
debug:
msg: "{{ systemctl_output.stdout }}"
# FINALLY NGINX
- name: Restart nginx after starting instances
......
......@@ -48,7 +48,7 @@ app_name = asset_processing_module
broker_url = amqp://ucs_user:{{rabbitmq_user_pass}}@localhost:5672/ucs_vhost
result_backend = mongodb://localhost:27017/celery_tasks
result_expires = 3600
debug_job_logging_backend = mongodb://localhost:27017/celery_debug_job_logging
debug_job_logging_backend = mongodb://localhost:27017/ucs_job_tracing
[jobs_settings]
manifest_storage = {{job_manifest_storage_url}}
......@@ -57,3 +57,6 @@ manifest_storage = {{job_manifest_storage_url}}
max_msg_send_bytes = 15728640
max_msg_receive_bytes = 15728640
max_workers = 10
[editor_settings]
job_monitoring_service_url = http://localhost:8900
[Unit]
Description={{server_name}} UCS Celery job custom monitor
After=multi-user.target
[Service]
Type=simple
WorkingDirectory=/var/www/{{server_name}}/
ExecStart=/usr/local/bin/python3.7 /var/www/{{server_name}}/server_job_monitor.py port={{port}} config={{server_config_path}}
Restart=always
RestartSec=5
MemoryAccounting=true
MemoryHigh=1G
MemoryMax=2G
[Install]
WantedBy=multi-user.target
......@@ -82,3 +82,22 @@
ignore_errors: true
# Celery job monitoring
- name: Setup celery processes monitoring custom service
when: nginxinstalled is success
template:
src=site_avail_job_monitor.conf.tpl
dest=/etc/nginx/sites-available/{{ server_name }}_job_monitor.conf
- name: Tell nginx that {{ server_name }}_job_monitor is enabled
when: nginxinstalled is success
file:
src: "/etc/nginx/sites-available/{{ server_name }}_job_monitor.conf"
dest: "/etc/nginx/sites-enabled/{{ server_name }}_job_monitor.conf"
state: link
ignore_errors: true
......@@ -5,7 +5,8 @@ upstream ucs_editor {
server {
listen 8710;
server_name {{server_name}}_editor www.{{server_name}}_editor;
listen 8810 ssl;
server_name {{https_address}} www.{{https_address}};
location ^~ /static/ {
root /var/www/{{server_name}};
......@@ -14,10 +15,6 @@ server {
try_files $uri $1;
}
# Allow file uploads
client_max_body_size 1G;
add_header Access-Control-Allow-Origin *;
location = /favicon.ico {
rewrite (.*) /static/images/favicon.ico;
}
......@@ -37,4 +34,12 @@ server {
proxy_pass http://ucs_editor;
}
{% if use_certbot == 1 %}
# listen 443 ssl; # managed by Certbot
ssl_certificate /etc/letsencrypt/live/{{https_address}}/fullchain.pem; # managed by Certbot
ssl_certificate_key /etc/letsencrypt/live/{{https_address}}//privkey.pem; # managed by Certbot
include /etc/letsencrypt/options-ssl-nginx.conf; # managed by Certbot
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem; # managed by Certbot
{% endif %}
}
\ No newline at end of file
upstream ucs_job_monitor {
server 127.0.0.1:8901;
server 127.0.0.1:8902;
}
server {
listen 8900;
location / {
proxy_pass_header Server;
proxy_set_header Host $http_host;
proxy_redirect off;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Scheme $scheme;
proxy_pass http://ucs_job_monitor;
}
}
\ No newline at end of file
......@@ -7,4 +7,5 @@ storage_url=http://165.227.139.116:8700/
ansible_python_interpreter=/usr/bin/python3
rabbitmq_user_pass=znLHG38Nv56vZNDb
rabbitmq_admin_pass=zZEGhfKsXhhk0hZC
job_manifest_storage_url=https://test.storage.avvyland.com/
\ No newline at end of file
job_manifest_storage_url=https://test.storage.avvyland.com/
https_address=test.ucs.avvyland.com
\ No newline at end of file
import bpy
import json
import numpy
'''
def to_np(flat_list, dims=3):
N = int(len(flat_list) / dims)
ans = []
for i in range(0, N):
ans.append((flat_list[i],
flat_list[i+1],
flat_list[i+2]))
return flat_list
'''
def default_loops(vertex_index):
N = int(len(vertex_index) // 3)
loop_start = []
loop_total = []
for i in range(0, N):
loop_start.append(i*3)
loop_total.append(3)
return numpy.array(loop_start, dtype=numpy.int32), numpy.array(loop_total, dtype=numpy.int32)
def clean_scene():
'''
Delete everything, basically for a new scene
:return: None
'''
for obj_i in bpy.data.objects:
bpy.data.objects.remove(obj_i)
for col_i in bpy.data.collections:
bpy.data.collections.remove(col_i)
clean_scene()
with open('/home/alexey/Code/ucs/blend/_tmp_creative_collider.json', 'r') as f:
data = json.load(f)
data = json.loads(data['data'])
orig_model = data[0]['meshCollider']['meshData']
# Vertices
vertices = numpy.array(orig_model['vertices'],
dtype=numpy.float32)
num_vertices = vertices.shape[0] // 3
# Loops
vertex_index = numpy.array(orig_model['faces'],
dtype=numpy.int32)
loop_start, loop_total = default_loops(vertex_index)
num_vertex_indices = vertex_index.shape[0]
num_loops = loop_start.shape[0]
# UVs
#uv_coordinates = numpy.array(orig_model['uv'],
# dtype=numpy.float32)
#normals = numpy.array(orig_model['normals'],
# dtype=numpy.float32)
# Build mesh
mesh = bpy.data.meshes.new('new_mesh')
# Geometry
mesh.vertices.add(num_vertices)
mesh.vertices.foreach_set("co", vertices)
mesh.loops.add(num_vertex_indices)
mesh.loops.foreach_set("vertex_index", vertex_index)
mesh.polygons.add(num_loops)
mesh.polygons.foreach_set("loop_start", loop_start)
mesh.polygons.foreach_set("loop_total", loop_total)
mesh.update()
mesh.validate()
new_object = bpy.data.objects.new('tmp_object', mesh)
bpy.context.scene.collection.objects.link(new_object)
import bpy
import mathutils
def find_mesh_obj_on_scene():
all_obj = []
for obj_i in bpy.data.scenes[0].collection.objects:
if obj_i.type == 'MESH':
all_obj.append(obj_i)
return all_obj
def vec2flat(vec_list):
ans = []
for vec_i in vec_list:
for coord in vec_i:
ans.append(coord)
return ans
if __name__ == '__main__':
'''
for obj_i in bpy.context.selected_objects:
obj_i.select_set(False)
for obj_i in find_mesh_obj_on_scene():
obj_i.select_set(True)
bpy.context.view_layer.objects.active = obj_i
bpy.ops.object.join()
for mesh_obj_i in find_mesh_obj_on_scene():
mod = mesh_obj_i.modifiers.get('SharpRemesh')
if mod is None:
mod = new_object.modifiers.new('SharpRemesh', 'REMESH')
mod.mode = 'SHARP'
mod.octree_depth = 3
mod.scale = 0.99
mod.use_remove_disconnected = False
mesh_obj_i.modifier_apply(modifier='SharpRemesh')
'''
# for obj_i in find_mesh_obj_on_scene():
# #print(obj_i.dimensions - obj_i.location)
# new_mesh = bpy.ops.mesh.primitive_cube_add(location=obj_i.location)
# print(bpy.context.view_layer.objects.active)
me = bpy.context.object.data
vcol_map = {}
for poly in me.polygons:
print('*')
for li in poly.loop_indices:
vi = me.loops[li].vertex_index
print(vi)
import bpy
import uuid
LOD_SETUP = [('_LOD0', 0, 0), ('_LOD1', 0.01, 1), ('_LOD2', 0.02, 2)]
def find_mesh_obj_on_scene():
all_obj = []
for obj_i in bpy.data.scenes[0].collection.objects:
if obj_i.type == 'MESH':
all_obj.append(obj_i)
return all_obj
def clone_object(original_mesh_obj, name_postfix):
new_object = original_mesh_obj.copy()
new_object.name = original_mesh_obj.name + name_postfix
new_object.data = original_mesh_obj.data.copy()
new_object.animation_data_clear()
new_object['tree_guid'] = str(uuid.uuid4())
bpy.data.scenes[0].collection.objects.link(new_object)
return new_object
def find_candidates_for_lods_by_name(original_meshes):
'''
Perform mesh scan by name to map lod candidates
'''
original_mesh_names_to_lods = {}
all_lods_set = set()
manual_lods = {}
min_lv = 0
max_lv = 0
for _, _, lvl in LOD_SETUP:
if lvl > max_lv:
max_lv = lvl
out_of_level_lods = [] # when LOD4, LOD5, ... occurs
bad_named_lods = []
lod_naming_collisions = [] # cases when there is only m_Lava_box_LOD2 with no m_Lava_box.
# get candidates
for obj_i in original_meshes:
nm = obj_i.name.split('_LOD')
if len(nm) == 1:
# Not a candidate for manual lod
continue
else:
# Has 'LOD' in name, may be an error
lod_level = int(nm[-1][0]) # for case 'm_Lava_box_LOD2.001'
if lod_level > max_lv or lod_level < min_lv:
out_of_level_lods.append(obj_i['tree_guid'])
continue # we can't register lods that are out of LOD_SETUP
# So we have a proper name like tulips1_LOD0, strip it down to 'tulips'
# and register lod. Later
base_obj_name = ''.join(nm[:-1]) # for case 'm_Lava_LOD_box_LOD2' -> m_Lava_box
if base_obj_name not in original_mesh_names_to_lods:
l = []
for _, _, _ in LOD_SETUP:
l.append(None)
original_mesh_names_to_lods[base_obj_name] = l
if original_mesh_names_to_lods[base_obj_name][lod_level] is None:
original_mesh_names_to_lods[base_obj_name][lod_level] = obj_i['tree_guid']
all_lods_set.add(obj_i['tree_guid'])
else:
# possible when a user supplies two same-named lods for a mesh or
# makes identical names for two meshes.
bad_named_lods.append(obj_i['tree_guid'])
return original_mesh_names_to_lods, all_lods_set, out_of_level_lods, bad_named_lods
def fix_lod_name_collisions(original_meshes, original_mesh_names_to_lods, all_lods_set):
'''
Fix possible issues with LOD naming:
1. No source geometry supplied, only "tulips_LOD0, tulips_LOD1, tulips_LOD2" with
no "tulips" - spawn original mesh called "tulips" as a copy of LOD0.
2. A mesh could be named like "lavacube_LOD2" - consider it original mesh, rename it
to "lavacube".
'''
# Check 1st case "tulips_LOD0, tulips_LOD1, tulips_LOD2"
for orig_name, lod_uuid_list in original_mesh_names_to_lods.items():
ok_lods = True
for lod_i in lod_uuid_list:
if lod_i is None:
# All LODs should be found for this case
ok_lods = False
if not ok_lods:
continue
orig_mesh_exists = False
for obj_j in original_meshes:
if obj_j.name == orig_name:
orig_mesh_exists = True
break
if orig_mesh_exists:
# found "tulips", it's OK
continue
# No mesh name "tulips" found, spawn one based on LOD0
lod0_uuid = lod_uuid_list[0]
lod_obj = None
for obj_j in original_meshes:
if obj_j['tree_guid'] == lod0_uuid:
lod_obj = obj_j
break
if lod_obj is None:
raise Exception(f'Something got wrong in fix_lod_name_collisions while searching for lod for {orig_name}')
spawned_original_object = clone_object(lod_obj, '_clone')
spawned_original_object.name = orig_name
original_meshes.append(spawned_original_object)
# Check 2nd case and rename accidentialy named like lods meshes
for orig_name, lod_uuid_list in original_mesh_names_to_lods.items():
# if there is no original mesh or there is no lod at some level,
# consider all meshes as original, rename them to exclude "LOD"
# from naming.
should_rename_all = False
orig_mesh_exists = False
orig_mesh_link = None
for obj_j in original_meshes:
if obj_j.name == orig_name:
orig_mesh_exists = True
orig_mesh_link = obj_j
break
if not orig_mesh_exists:
should_rename_all = True
else:
for lod_i in lod_uuid_list:
if lod_i is None:
should_rename_all = True
break
if should_rename_all:
# rename mesh and it's lod so that after spliting the mesh by materials
# it won't confuse the algorithm.
if orig_mesh_exists:
obj_j.name = f'{orig_name}_fixed_orig'
original_mesh_names_to_lods.pop(orig_name)
for lod_i, lod_uuid_i in enumerate(lod_uuid_list):
for obj_i in original_meshes:
if obj_i['tree_guid'] == lod_uuid_i:
obj_i.name = f'{orig_name}_fixed_{lod_i}'
all_lods_set.remove(lod_uuid_i)
break
def build_lod_map(original_meshes, original_mesh_names_to_lods, all_lods_set):
mesh_by_uuid = {}
for obj_i in original_meshes:
mesh_id = obj_i['tree_guid']
mesh_by_uuid[mesh_id] = obj_i
# filter original_meshes list - delete lod candidates from it
original_meshes_no_lods = []
for obj_i in original_meshes:
if not obj_i['tree_guid'] in all_lods_set:
original_meshes_no_lods.append(obj_i)
# Build manual lods structure. By this step we should already have all 'base' names in original_mesh_names.
# If there is no such name, it means that some lod points to an absent name. We decide that this
# LOD should be an independent mesh as is, with a corrected name ('_LOD' is forbidden).
manual_lods = {}
for orig_mesh in original_meshes_no_lods:
nm = orig_mesh.name.split('_LOD')
base_obj_name = ''.join(nm) # for case 'm_Lava_LOD_box' -> m_Lava_box
if base_obj_name in original_mesh_names_to_lods:
manual_lods[orig_mesh['tree_guid']] = original_mesh_names_to_lods[base_obj_name]
return manual_lods, original_meshes_no_lods
def delete_objects(list_of_object_uuid):
# Delete meshes that can't be handled due to bad naming
for ob in bpy.context.selected_objects:
ob.select_set(False)
for mesh_id_i in list_of_object_uuid:
mesh_i = mesh_by_uuid[mesh_id_i]
log.error(f'Deleting {mesh_i.name} due to bad naming')
mesh_i.select_set(True)
bpy.ops.object.delete()
def split_all_by_material(mesh_obj):
# This is a global operation, helpful for multimaterial meshes
for ob in bpy.context.selected_objects:
ob.select_set(False)
bpy.context.view_layer.objects.active = mesh_obj
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='SELECT')
bpy.ops.mesh.separate(type='MATERIAL')
bpy.ops.object.mode_set(mode='OBJECT')
def _reindex_meshes(lod_setup):
'''
After the geometry is split to new meshes, we have to reindex it
'''
mesh_objects = find_mesh_obj_on_scene()
new_orig_mesh = []
all_lods = []
lod_by_orig_mesh = {}
# Reindex all meshes since old indeces collide after dividing geometry by materials
guid_remap = {}
guid_to_names = {} # just helpful for debug
for mesh_obj_i in mesh_objects:
mesh_obj_i['old_tree_guid'] = mesh_obj_i['tree_guid']
mesh_obj_i['tree_guid'] = str(uuid.uuid4())
guid_remap[mesh_obj_i['old_tree_guid']] = mesh_obj_i['tree_guid']
guid_to_names[mesh_obj_i['tree_guid']] = mesh_obj_i.name
# Relink morph target child-parent link
for mesh_obj_i in mesh_objects:
if 'original_mesh_tree_guid' in mesh_obj_i:
mesh_obj_i['old_original_mesh_tree_guid'] = mesh_obj_i['original_mesh_tree_guid']
mesh_obj_i['original_mesh_tree_guid'] = guid_remap[mesh_obj_i['original_mesh_tree_guid']]
# Find original objects
for mesh_obj_i in mesh_objects:
if not 'lod_level' in mesh_obj_i:
new_orig_mesh.append(mesh_obj_i)
else:
all_lods.append(mesh_obj_i)
# Init lod_by_orig_mesh dict
for mesh_obj_i in new_orig_mesh:
l = []
for _, _, _ in lod_setup:
l.append(None)
lod_by_orig_mesh[mesh_obj_i['tree_guid']] = l
# For each original mesh object find corresponding LOD by name
for mesh_obj_i in new_orig_mesh:
parent_name = mesh_obj_i.name # dragon_2m.002
orig_mesh_guid = mesh_obj_i['tree_guid']
for lod_j in all_lods:
nm = lod_j.name.split('_LOD') # dragon_2m_LOD?.002 -> dragon_2m.002
if nm[0] + nm[1][1:] == parent_name:
lod_level = lod_j['lod_level']
lod_by_orig_mesh[orig_mesh_guid][lod_level] = lod_j
# Check that we collected lods for all objects
for orig_mesh_uuid, lods_links_list in lod_by_orig_mesh.items():
for lod_link in lods_links_list:
if lod_link is None:
raise Exception(f'No LOD found for object named {guid_to_names[orig_mesh_uuid]} after geometry split by material, object '
f'tree_uuid is {orig_mesh_uuid}, full lod_map name-based is {lod_by_orig_mesh}')
return new_orig_mesh, lod_by_orig_mesh
if __name__ == '__main__':
# Find all mesh objects on scene
original_meshes = find_mesh_obj_on_scene()
# for mesh_i in find_mesh_obj_on_scene():
# split_all_by_material(mesh_i)
_reindex_meshes(LOD_SETUP)
'''
# Check meshes for LOD naming: build a map wtih some generic name to uuid of a LOD.
# original_mesh_names_to_lods: {'tulips1': ['208dcf5b-8c9b-4040-b1dd-586fa938b310',
# 'a7c9a3b0-0533-4418-985b-b858f1315f83',
# '28ce1b8a-1814-43fc-9946-b2f0407bdd98']}
# all_lods_set: {'a7c9a3b0-0533-4418-985b-b858f1315f83', '208dcf5b-8c9b-4040-b1dd-586fa938b310', '28ce1b8a-1814-43fc-9946-b2f0407bdd98'}
# out_of_level_lods: a list of uuid
# bad_named_lods: a list of uuid
original_mesh_names_to_lods, all_lods_set, out_of_level_lods, bad_named_lods = find_candidates_for_lods_by_name(original_meshes)
# Delete objects with bad naming
delete_objects(out_of_level_lods+bad_named_lods)
# Spawn original mesh if needed and rename bad named lods
fix_lod_name_collisions(original_meshes, original_mesh_names_to_lods, all_lods_set)
# Build lod map
manual_lods, original_meshes_no_lods = build_lod_map(original_meshes, original_mesh_names_to_lods, all_lods_set)
print(manual_lods)
print(original_meshes_no_lods)
'''
{"data": "[{\"cid\": {\"cidValue\": \"fad34963f4aeab0135fbb202119511a6\"}, \"meshCollider\": {\"meshData\": {\"vertices\": [-0.04982639, -0.04982639, 0.14982639, -0.04982639, -0.04982639, 0.14982639, -0.04982639, -0.04982639, 0.14982639, -0.04982639, 0.1498264, 0.14982639, -0.04982639, 0.1498264, 0.14982639, -0.04982639, 0.1498264, 0.14982639, -0.04982639, -0.04982639, -0.0498264, -0.04982639, -0.04982639, -0.0498264, -0.04982639, -0.04982639, -0.0498264, -0.04982639, 0.1498264, -0.0498264, -0.04982639, 0.1498264, -0.0498264, -0.04982639, 0.1498264, -0.0498264, 0.1498264, -0.04982639, 0.14982639, 0.1498264, -0.04982639, 0.14982639, 0.1498264, -0.04982639, 0.14982639, 0.1498264, 0.1498264, 0.14982639, 0.1498264, 0.1498264, 0.14982639, 0.1498264, 0.1498264, 0.14982639, 0.1498264, -0.04982639, -0.0498264, 0.1498264, -0.04982639, -0.0498264, 0.1498264, -0.04982639, -0.0498264, 0.1498264, 0.1498264, -0.0498264, 0.1498264, 0.1498264, -0.0498264, 0.1498264, 0.1498264, -0.0498264], \"faces\": [18, 1, 7, 0, 9, 6, 11, 16, 22, 2, 15, 4, 21, 8, 10, 23, 14, 20, 18, 12, 1, 0, 3, 9, 11, 5, 16, 2, 13, 15, 21, 19, 8, 23, 17, 14]}}}]"}
\ No newline at end of file
import bpy
PTH = '/home/alexey/Code/ucs/grpc_assets/'
PREFIX = 'tulips_view_'
class ShapeExporter:
def __init__(self, job_path, original_meshes, file_prefix):
self.job_path = job_path
self.file_prefix = file_prefix
self.original_meshes = original_meshes
def run(self) -> bool:
for view_id_i in self._get_view_indexes_from_scene(self.original_meshes):
for obj_i in self._query_view_by_id_from_scene(self.original_meshes, view_id_i):
for ob in bpy.context.selected_objects:
ob.select_set(False)
obj_i.select_set(True)
bpy.context.view_layer.objects.active = obj_i
file_path = f'{self.job_path}/{self.file_prefix}_{view_id_i}.glb'
# Only selected
bpy.ops.export_scene.gltf(filepath=file_path, check_existing=True, export_format='GLB', ui_tab='GENERAL',
export_copyright='', export_image_format='AUTO', export_texture_dir='',
export_texcoords=True, export_normals=True,
export_draco_mesh_compression_enable=False, export_draco_mesh_compression_level=6,
export_draco_position_quantization=14, export_draco_normal_quantization=10,
export_draco_texcoord_quantization=12,
export_draco_generic_quantization=12, export_tangents=False,
export_materials='EXPORT', export_colors=True,
export_cameras=False, use_selection=True,
export_extras=False, export_yup=True, export_apply=False,
export_animations=True, export_frame_range=True, export_frame_step=1,
export_force_sampling=True, export_nla_strips=True, export_def_bones=False,
export_current_frame=False, export_skins=True,
export_all_influences=False, export_morph=True, export_morph_normal=True,
export_morph_tangent=False, export_lights=False, export_displacement=False,
will_save_settings=False, filter_glob='*.glb;*.gltf')
return True
def _get_view_indexes_from_scene(self, original_meshes):
original_views = set()
for mesh_i in original_meshes:
if 'is_tmp_prefab' in mesh_i and mesh_i['is_tmp_prefab']:
continue
original_views.add(mesh_i['view_id'])
original_views = list(original_views)
original_views.sort()
return original_views
def _query_view_by_id_from_scene(self, original_meshes, selected_view_id):
for mesh_i in original_meshes:
if 'is_tmp_prefab' in mesh_i and mesh_i['is_tmp_prefab']:
continue
if mesh_i['view_id'] == selected_view_id:
yield mesh_i
def find_mesh_obj_on_scene():
all_obj = []
for obj_i in bpy.data.scenes[0].collection.objects:
if obj_i.type == 'MESH':
all_obj.append(obj_i)
return all_obj
all_objects = find_mesh_obj_on_scene()
exporter = ShapeExporter(PTH, all_objects, PREFIX)
exporter.run()
import bpy
import bmesh
import mathutils
import uuid
def clone_object(original_mesh_obj, name_postfix):
new_object = original_mesh_obj.copy()
new_object.name = original_mesh_obj.name + name_postfix
new_object.data = original_mesh_obj.data.copy()
new_object.animation_data_clear()
new_object['tree_guid'] = str(uuid.uuid4())
bpy.data.scenes[0].collection.objects.link(new_object)
return new_object
def mesh2bm(mesh_i):
bm = bmesh.new()
bm.from_mesh(mesh_i.data)
return bm
def find_mesh_obj_on_scene():
......@@ -38,36 +55,113 @@ def compute_distance_to_geom(mesh_obj, conv_hull_vectors):
return conv_vert_to_mesh_distance
def create_optimal_lod(original_mesh_obj, postfix, optimal_shrink, start_ratio, decimate_step, decimate_boundaries, max_iterations=20):
'''
Find an optimal compression ratio for a mesh and apply it. Iteratively spawns and destroys mesh
objects and computes how much they are shrunk relative to original mesh size.
mesh_objects = find_mesh_obj_on_scene()
for mesh_obj_i in mesh_objects:
original_size = mesh_obj_i.dimensions.length
bm = bmesh.new()
bm.from_mesh(mesh_obj_i.to_mesh())
:param original_mesh_obj:
:param postfix:
:param optimal_shrink:
:param decimate_step:
:param max_iterations:
:return:
'''
# for ob in bpy.context.selected_objects:
# ob.select_set(False)
#
conv_hull_vectors = init_conv_hull_geom(bm)
d = compute_distance_to_geom(mesh_obj_i, conv_hull_vectors)
print(f'distance before lodify = {d}, share to original size ={d/original_size}')
# Remove double vertices on the original mesh, otherwise decimate shall do horrible things
for ob in bpy.context.selected_objects:
ob.select_set(False)
bpy.context.view_layer.objects.active = original_mesh_obj
# bpy.ops.object.mode_set(mode='EDIT')
# bpy.ops.mesh.remove_doubles()
# bpy.ops.object.mode_set(mode='OBJECT')
#original_mesh_size = original_mesh_obj.dimensions.length
bpy.context.view_layer.objects.active = mesh_obj_i
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.remove_doubles()
bpy.ops.mesh.decimate(ratio=0.3)
bpy.ops.object.mode_set(mode='OBJECT')
d = compute_distance_to_geom(mesh_obj_i, conv_hull_vectors)
print(f'distance before lodify = {d}, share to original size ={d/original_size}')
# Write down convex hull
original_bm = mesh2bm(original_mesh_obj)
original_volume = original_bm.calc_volume()
#original_conv_hull = init_conv_hull_geom(original_bm)
# Try a candidate
continue_shrink = True
decimate_ratio = start_ratio
c = 0
last_dir = 0 # flag to decide when to stop, 1 - increase step, -1 - decrease step
distance_to_lod = 0
metric = None
lod_candidate_object = None
while continue_shrink:
for ob in bpy.context.selected_objects:
ob.select_set(False)
lod_candidate_object = clone_object(original_mesh_obj, postfix)
bpy.context.view_layer.objects.active = lod_candidate_object
lod_candidate_object.select_set(True)
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.remove_doubles()
bpy.ops.mesh.decimate(ratio=decimate_ratio)
bpy.ops.object.mode_set(mode='OBJECT')
#distance_to_lod = compute_distance_to_geom(lod_candidate_object, original_conv_hull)
bmesh_lod = mesh2bm(lod_candidate_object)
lod_volume = bmesh_lod.calc_volume()
metric = lod_volume / original_volume
bmesh_lod.free()
print(f'current shrink = {metric}, decimate_ratio = {decimate_ratio}')
if metric < optimal_shrink:
decimate_ratio -= decimate_step
if last_dir == 1:
continue_shrink = False
last_dir = -1
if decimate_boundaries[1] > decimate_ratio:
continue_shrink = False
else:
decimate_ratio += decimate_step
if last_dir == -1:
continue_shrink = False
last_dir = 1
if decimate_boundaries[0] < decimate_ratio:
continue_shrink = False
c += 1
if c > max_iterations:
continue_shrink = False
if continue_shrink:
# Not yet final decimate_ratio, delete candidate
lod_candidate_object.select_set(True)
bpy.ops.object.delete()
if abs(metric) > optimal_shrink:
# We failed, no lods here
print('No LODS')
lod_candidate_object.select_set(True)
bpy.ops.object.delete()
lod_candidate_object = clone_object(original_mesh_obj, postfix)
bpy.context.view_layer.objects.active = lod_candidate_object
lod_candidate_object.select_set(True)
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.remove_doubles()
bpy.ops.object.mode_set(mode='OBJECT')
return lod_candidate_object, decimate_ratio, metric, c
mesh_objects = find_mesh_obj_on_scene()
LOD_SETUP = [('_LOD0', 0, 0), ('_LOD1', 0.005, 1), ('_LOD2', 0.01, 2)]
for obj_i in mesh_objects:
# original_bm = mesh2bm(obj_i)
# print(original_bm.calc_volume())
create_optimal_lod(obj_i,
'_LOD1',
0.005,
0.8,
0.01,
(0.99, 0.01),
50)
\ No newline at end of file
{"data": "[{\"cid\": {\"cidValue\": \"e65504cd11e9f30a0411843a9a0b45d5\"}, \"meshModel\": {\"cid\": {}, \"source\": {\"vertices\": [-0.05, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, 0.15, 0.15, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, -0.05, -0.05, 0.15, -0.05, -0.05, 0.15, -0.05, 0.15, 0.15, -0.05, 0.15, -0.05, -0.05, -0.05, -0.05, -0.05, -0.05, -0.05, -0.05, 0.15, -0.05, -0.05, 0.15, -0.05, 0.15, -0.05, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, -0.05, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, 0.15, -0.05, -0.05, -0.05, -0.05, -0.05, 0.15, -0.05, 0.15, 0.15, -0.05, 0.15, -0.05], \"normals\": [0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, -0.0, 0.0, 1.0, -0.0, 0.0, 1.0, -0.0, 0.0, 1.0, -0.0, 0.0, 0.0, -1.0, 0.0, 0.0, -1.0, 0.0, 0.0, -1.0, 0.0, 0.0, -1.0, 0.0, -1.0, -0.0, 0.0, -1.0, -0.0, 0.0, -1.0, -0.0, 0.0, -1.0, -0.0, 1.0, 0.0, -0.0, 1.0, 0.0, -0.0, 1.0, 0.0, -0.0, 1.0, 0.0, -0.0, -1.0, 0.0, -0.0, -1.0, 0.0, -0.0, -1.0, 0.0, -0.0, -1.0, 0.0, -0.0], \"uv\": [0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0], \"faces\": [3, 0, 1, 1, 2, 3, 7, 4, 5, 5, 6, 7, 11, 8, 9, 9, 10, 11, 15, 12, 13, 13, 14, 15, 19, 16, 17, 17, 18, 19, 23, 20, 21, 21, 22, 23]}, \"lodData\": [{\"mesh\": {\"vertices\": [-0.05, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, 0.15, 0.15, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, -0.05, -0.05, 0.15, -0.05, -0.05, 0.15, -0.05, 0.15, 0.15, -0.05, 0.15, -0.05, -0.05, -0.05, -0.05, -0.05, -0.05, -0.05, -0.05, 0.15, -0.05, -0.05, 0.15, -0.05, 0.15, -0.05, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, -0.05, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, 0.15, -0.05, -0.05, -0.05, -0.05, -0.05, 0.15, -0.05, 0.15, 0.15, -0.05, 0.15, -0.05], \"normals\": [0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, -0.0, 0.0, 1.0, -0.0, 0.0, 1.0, -0.0, 0.0, 1.0, -0.0, 0.0, 0.0, -1.0, 0.0, 0.0, -1.0, 0.0, 0.0, -1.0, 0.0, 0.0, -1.0, 0.0, -1.0, -0.0, 0.0, -1.0, -0.0, 0.0, -1.0, -0.0, 0.0, -1.0, -0.0, 1.0, 0.0, -0.0, 1.0, 0.0, -0.0, 1.0, 0.0, -0.0, 1.0, 0.0, -0.0, -1.0, 0.0, -0.0, -1.0, 0.0, -0.0, -1.0, 0.0, -0.0, -1.0, 0.0, -0.0], \"uv\": [0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0], \"faces\": [3, 0, 1, 1, 2, 3, 7, 4, 5, 5, 6, 7, 11, 8, 9, 9, 10, 11, 15, 12, 13, 13, 14, 15, 19, 16, 17, 17, 18, 19, 23, 20, 21, 21, 22, 23]}}, {\"lodNum\": 1, \"mesh\": {\"vertices\": [-0.05, -0.05, 0.15, -0.05, -0.05, 0.15, -0.05, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, -0.05, -0.05, -0.05, -0.05, -0.05, -0.05, -0.05, -0.05, -0.05, 0.15, -0.05, -0.05, 0.15, -0.05, -0.05, 0.15, -0.05, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, -0.05, -0.05, 0.15, -0.05, -0.05, 0.15, -0.05, -0.05, 0.15, -0.05], \"normals\": [-1.0, 0.0, -0.0, 0.0, -1.0, -0.0, 0.0, 0.0, 1.0, 0.0, -1.0, -0.0, 0.0, 0.0, 1.0, 1.0, 0.0, -0.0, 0.0, 0.0, 1.0, 0.0, 1.0, -0.0, 1.0, 0.0, -0.0, -1.0, 0.0, -0.0, 0.0, 0.0, 1.0, 0.0, 1.0, -0.0, -1.0, 0.0, -0.0, 0.0, -1.0, -0.0, 0.0, 0.0, -1.0, 0.0, -1.0, -0.0, 0.0, 0.0, -1.0, 1.0, 0.0, -0.0, 0.0, 0.0, -1.0, 0.0, 1.0, -0.0, 1.0, 0.0, -0.0, -1.0, 0.0, -0.0, 0.0, 0.0, -1.0, 0.0, 1.0, -0.0], \"uv\": [1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0], \"faces\": [10, 2, 4, 4, 6, 10, 23, 11, 7, 7, 19, 23, 14, 22, 18, 18, 16, 14, 1, 13, 15, 15, 3, 1, 8, 5, 17, 17, 20, 8, 21, 12, 0, 0, 9, 21]}}, {\"lodNum\": 2, \"mesh\": {\"vertices\": [-0.05, -0.05, 0.15, -0.05, -0.05, 0.15, -0.05, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, -0.05, -0.05, -0.05, -0.05, -0.05, -0.05, -0.05, -0.05, -0.05, 0.15, -0.05, -0.05, 0.15, -0.05, -0.05, 0.15, -0.05, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, -0.05, 0.15, 0.15, -0.05, -0.05, 0.15, -0.05, -0.05, 0.15, -0.05, -0.05, 0.15, -0.05], \"normals\": [-1.0, 0.0, -0.0, 0.0, -1.0, -0.0, 0.0, 0.0, 1.0, 0.0, -1.0, -0.0, 0.0, 0.0, 1.0, 1.0, 0.0, -0.0, 0.0, 0.0, 1.0, 0.0, 1.0, -0.0, 1.0, 0.0, -0.0, -1.0, 0.0, -0.0, 0.0, 0.0, 1.0, 0.0, 1.0, -0.0, -1.0, 0.0, -0.0, 0.0, -1.0, -0.0, 0.0, 0.0, -1.0, 0.0, -1.0, -0.0, 0.0, 0.0, -1.0, 1.0, 0.0, -0.0, 0.0, 0.0, -1.0, 0.0, 1.0, -0.0, 1.0, 0.0, -0.0, -1.0, 0.0, -0.0, 0.0, 0.0, -1.0, 0.0, 1.0, -0.0], \"uv\": [1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0], \"faces\": [10, 2, 4, 4, 6, 10, 23, 11, 7, 7, 19, 23, 14, 22, 18, 18, 16, 14, 1, 13, 15, 15, 3, 1, 8, 5, 17, 17, 20, 8, 21, 12, 0, 0, 9, 21]}}]}}]"}
\ No newline at end of file
# more here https://github.com/p2or/blender-shapekey-extras/blob/master/shape-key-extras.py
import bpy
from io_scene_gltf2.blender.exp.gltf2_blender_extract import extract_primitives
from bpy_extras.io_utils import axis_conversion
import hashlib
import uuid
def find_mesh_obj_on_scene():
all_obj = []
for obj_i in bpy.data.scenes[0].collection.objects:
if obj_i.type == 'MESH':
all_obj.append(obj_i)
return all_obj
def vec2flat(vec_list):
ans = []
for vec_i in vec_list:
for coord in vec_i:
ans.append(coord)
return ans
def get_geometry(blender_mesh, world_transform):
print(f'extracting mesh {blender_mesh}')
export_settings = {'timestamp': None, 'gltf_filepath': None, 'gltf_filedirectory': None, 'gltf_texturedirectory': None,
'gltf_format': 'GLB', 'gltf_image_format': 'AUTO', 'gltf_copyright': '', 'gltf_texcoords': True,
'gltf_normals': True, 'gltf_tangents': False, 'gltf_draco_mesh_compression': False,
'gltf_materials': 'EXPORT', 'gltf_colors': True, 'gltf_cameras': False, 'gltf_selected': False,
'gltf_layers': True, 'gltf_extras': False, 'gltf_yup': True, 'gltf_apply': True,
'gltf_current_frame': False, 'gltf_animations': True, 'gltf_frame_range': True,
'gltf_force_sampling': True, 'gltf_def_bones': True, 'gltf_nla_strips': True,
'gltf_skins': True, 'gltf_all_vertex_influences': False, 'gltf_frame_step': 1, 'gltf_morph': False,
'gltf_morph_normal': False, 'gltf_morph_tangent': False, 'gltf_lights': False,
'gltf_displacement': False, 'gltf_binary': bytearray(b''), 'gltf_binaryfilename': None,
'gltf_user_extensions': [], 'pre_export_callbacks': [], 'post_export_callbacks': [],
'gltf_loose_edges': False, 'gltf_loose_points': False
}
gltf_primitives = extract_primitives(None, blender_mesh.data, None, blender_mesh, None, None, export_settings)
if len(gltf_primitives) == 0:
# Empty geometry
log.error(f'Zero primitives after blender to glb export - possibly bad LOD with no polygons')
return [], [], [], []
vertices = vec2flat(gltf_primitives[0]['attributes']['POSITION'])
normals = vec2flat(gltf_primitives[0]['attributes']['NORMAL'])
uvs = vec2flat(gltf_primitives[0]['attributes']['TEXCOORD_0'])
faces = list(gltf_primitives[0]['indices'])
return vertices, normals, uvs, faces
def clone_object(original_mesh_obj, name_postfix):
new_object = original_mesh_obj.copy()
new_object.name = original_mesh_obj.name + name_postfix
new_object.data = original_mesh_obj.data.copy()
new_object.animation_data_clear()
new_object['tree_guid'] = str(uuid.uuid4())
bpy.data.scenes[0].collection.objects.link(new_object)
return new_object
if __name__ == '__main__':
mesh_objects = find_mesh_obj_on_scene()
for mesh_obj_i in mesh_objects:
new_obj = clone_object(mesh_obj_i, '_morphed')
verts, normals, uvs, faces = get_geometry(new_obj, None)
hv = hashlib.md5(str([verts, normals, uvs, faces]).encode('utf-8')).hexdigest()
print(f'md5 before = {hv}')
# setup keyshape values
obj_shape_keys = new_obj.to_mesh().shape_keys
for key_name, key_value in obj_shape_keys.key_blocks.items():
key_value.mute = False
key_value.value = 0.1
print(f'{key_name}:{key_value.slider_min}:{key_value.slider_max}:{key_value.value}')
for i, shape_key in enumerate(obj_shape_keys.key_blocks.values()):
new_obj.shape_key_remove(shape_key)
verts, normals, uvs, faces = get_geometry(new_obj, None)
hv = hashlib.md5(str([verts, normals, uvs, faces]).encode('utf-8')).hexdigest()
print(f'md5 after = {hv}')
'''
for mesh_obj_i in mesh_objects:
obj_shape_keys = mesh_obj_i.to_mesh().shape_keys
print(obj_shape_keys)
#c = 0
for key_name, key_value in obj_shape_keys.key_blocks.items():
#mesh_obj_i.active_shape_key_index = c
key_value.mute = False
key_value.value = 0.1
print(f'{key_name}:{key_value.slider_min}:{key_value.slider_max}:{key_value.value}')
#c += 1
#mesh_obj_i.data.update()
#bpy.context.view_layer.update()
for mesh_obj_i in mesh_objects:
depsgraph = bpy.context.evaluated_depsgraph_get()
blender_mesh_owner = mesh_obj_i.evaluated_get(depsgraph)
new_mesh = mesh_obj_i.to_mesh(preserve_all_data_layers=True, depsgraph=depsgraph)
#new_mesh.update(calc_edges=True)
verts, normals, uvs, faces = get_geometry(mesh_obj_i, None)
hv = hashlib.md5(str([verts, normals, uvs, faces]).encode('utf-8')).hexdigest()
print(f'md5 before = {hv}')
l = []
for v_i in new_mesh.vertices:
l.append(v_i.co)
hv = hashlib.md5(str(l).encode('utf-8')).hexdigest()
print(f'md5 mesh.vertices before = {hv}')
mesh_obj_i.to_mesh_clear()
for mesh_obj_i in mesh_objects:
obj_shape_keys = mesh_obj_i.to_mesh().shape_keys
print(obj_shape_keys)
for key_name, key_value in obj_shape_keys.key_blocks.items():
key_value.mute = False
key_value.value = 0.5
print(f'{key_name}:{key_value.slider_min}:{key_value.slider_max}:{key_value.value}')
#mesh_obj_i.data.update()
#bpy.context.view_layer.update()
for mesh_obj_i in mesh_objects:
depsgraph = bpy.context.evaluated_depsgraph_get()
blender_mesh_owner = mesh_obj_i.evaluated_get(depsgraph)
new_mesh = mesh_obj_i.to_mesh(preserve_all_data_layers=True, depsgraph=depsgraph)
#new_mesh.update(calc_edges=True)
verts, normals, uvs, faces = get_geometry(mesh_obj_i, None)
hv = hashlib.md5(str([verts, normals, uvs, faces]).encode('utf-8')).hexdigest()
print(f'md5 after = {hv}')
l = []
for v_i in new_mesh.vertices:
l.append(v_i.co)
hv = hashlib.md5(str(l).encode('utf-8')).hexdigest()
print(f'md5 mesh.vertices after = {hv}')
mesh_obj_i.to_mesh_clear()
'''
import bpy
def split_all_by_material(mesh_obj):
# This is a global operation, helpful for multimaterial meshes
for ob in bpy.context.selected_objects:
ob.select_set(False)
bpy.context.view_layer.objects.active = mesh_obj
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='SELECT')
bpy.ops.mesh.separate(type='MATERIAL')
bpy.ops.object.mode_set(mode='OBJECT')
def find_mesh_obj_on_scene():
all_obj = []
for obj_i in bpy.data.scenes[0].collection.objects:
if obj_i.type == 'MESH':
all_obj.append(obj_i)
return all_obj
for mesh_i in find_mesh_obj_on_scene():
for vert_i in mesh_i.data.vertices:
print(vert_i.co.x)
# for loop_i in mesh_i.data.polygons:
# print(len(loop_i.vertices))
# print(loop_i.vertices[0], loop_i.vertices[1], loop_i.vertices[2])
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment