diff --git a/i_scene_cp77_gltf/exporters/sectors_export.py b/i_scene_cp77_gltf/exporters/sectors_export.py index 073d64b..d502fe2 100644 --- a/i_scene_cp77_gltf/exporters/sectors_export.py +++ b/i_scene_cp77_gltf/exporters/sectors_export.py @@ -370,42 +370,62 @@ def exportSectors( filename): wIMNs+=1 #print(wIMNs) meshname = data['mesh']['DepotPath']['$value'].replace('\\', os.sep) - + #if 'chopstick' in meshname: + # print('worldInstancedMeshNode - ',meshname) if not checkexists(meshname, Masters): print(meshname, ' not found in masters') continue + num=data['worldTransformsBuffer']['numElements'] start=data['worldTransformsBuffer']['startIndex'] if(meshname != 0): for idx in range(start, start+num): + bufferID=0 + if 'Data' in data['worldTransformsBuffer']['sharedDataBuffer'].keys(): + inst_trans=data['worldTransformsBuffer']['sharedDataBuffer']['Data']['buffer']['Data']['Transforms'][idx] + + elif 'HandleRefId' in data['worldTransformsBuffer']['sharedDataBuffer'].keys(): + bufferID = int(data['worldTransformsBuffer']['sharedDataBuffer']['HandleRefId']) + ref=e + for n in nodes: + if n['HandleId']==str(bufferID-1): + ref=n + inst_trans = ref['Data']['worldTransformsBuffer']['sharedDataBuffer']['Data']['buffer']['Data']['Transforms'][idx] + # store the bufferID for when we add new stuff. + if Sector_additions_coll: + Sector_additions_coll['Inst_bufferID']=bufferID obj_col=find_col(i,idx,Sector_coll) - if obj_col: + if obj_col and inst_trans: if len(obj_col.objects)>0: obj=obj_col.objects[0] - if obj.matrix_world!=Matrix(obj['matrix']): + # Check for Position and if changed delete the original and add to the new sector + if obj.matrix_world!=Matrix(obj_col['matrix']): deletions[sectorName].append(obj_col) + new_ni=len(template_nodes) + template_nodes.append(copy.deepcopy(nodes[obj_col['nodeIndex']])) + # might need to convert instanced to static here, not sure what the best approach is. + createNodeData(template_nodeData, obj_col, new_ni, obj,ID) + ID+=1 else: - deletions[sectorName].append(obj_col) - obj_col['exported']=True - # Need to change deletions to pass the values otherwise cant deal with deleted collectors + if obj_col: + deletions[sectorName].append(obj_col) - case 'worldStaticDecalNode': #print('worldStaticDecalNode') - instances = [x for x in t if x['NodeIndex'] == i] - for idx,inst in enumerate(instances): + instances = [(x,y) for y,x in enumerate(t) if x['NodeIndex'] == i] + for idx,(inst,instNid) in enumerate(instances): obj=find_decal(i,idx,Sector_coll) if obj: # Check for Position and if changed delete the original and add to the new sector if obj.matrix_world!=Matrix(obj['matrix']): - deletions['Decals'][sectorName].append({'nodeIndex':idx,'NodeComment' :obj.name, 'NodeType' : obj['nodeType']}) + deletions['Decals'][sectorName].append({'nodeIndex':instNid,'NodeComment' :obj.name, 'NodeType' : obj['nodeType']}) new_ni=len(template_nodes) template_nodes.append(copy.deepcopy(nodes[obj['nodeIndex']])) createNodeData(template_nodeData, Sector_coll, new_ni, obj,ID) ID+=1 - obj['exported']=True else: - deletions['Decals'][sectorName].append({'nodeIndex':idx,'NodeComment' :'DELETED Decal nid:'+str(inst['NodeIndex'])+' ndid:'+str(idx), 'NodeType' : 'worldStaticDecalNode'}) + deletions['Decals'][sectorName].append({'nodeIndex':instNid,'NodeComment' :'DELETED Decal nid:'+str(inst['NodeIndex'])+' ndid:'+str(instNid), 'NodeType' : 'worldStaticDecalNode'}) + case 'worldStaticMeshNode' | 'worldBuildingProxyMeshNode' | 'worldGenericProxyMeshNode'| 'worldTerrainProxyMeshNode': if isinstance(e, dict) and 'mesh' in data.keys(): @@ -417,7 +437,6 @@ def exportSectors( filename): obj_col=find_col(i,idx,Sector_coll) #print(obj_col) if obj_col: - obj_col['exported']=True if len(obj_col.objects)>0: obj=obj_col.objects[0] # Check for Position and if changed delete the original and add to the new sector @@ -429,7 +448,8 @@ def exportSectors( filename): createNodeData(template_nodeData, obj_col, new_ni, obj,ID) ID+=1 else: - deletions[sectorName].append(obj_col) + if obj_col: + deletions[sectorName].append(obj_col) case 'worldEntityNode': if isinstance(e, dict) and 'entityTemplate' in data.keys(): @@ -440,10 +460,10 @@ def exportSectors( filename): for idx,inst in enumerate(instances): obj_col=find_col(i,idx,Sector_coll) #print(obj_col) + # THIS IS WRONG, the entity meshes are in child collectors not objects if obj_col and len(obj_col.children)>0: if len(obj_col.children[0].objects)>0: # Check for Position and if changed delete the original and add to the new sector - # Find a better way to do this if obj.matrix_world!=Matrix(obj_col['matrix']): deletions[sectorName].append(obj_col) new_ni=len(template_nodes) @@ -451,16 +471,76 @@ def exportSectors( filename): createNodeData(template_nodeData, obj_col, new_ni, obj,ID) ID+=1 - obj_col['exported']=True else: if obj_col: - deletions[sectorName].append(obj_col) - obj_col['exported']=True + deletions[sectorName].append(obj_col) - + case 'worldInstancedDestructibleMeshNode': + #print('worldInstancedDestructibleMeshNode',i) + if isinstance(e, dict) and 'mesh' in data.keys(): + meshname = data['mesh']['DepotPath']['$value'].replace('\\', os.sep) + num=data['cookedInstanceTransforms']['numElements'] + start=data['cookedInstanceTransforms']['startIndex'] + instances = [x for x in t if x['NodeIndex'] == i] + for tlidx,inst in enumerate(instances): + for idx in range(start, start+num): + bufferID=0 + basic_trans=None + # Transforms are inside the cookedInstanceTransforms in a buffer + if 'Data' in data['cookedInstanceTransforms']['sharedDataBuffer'].keys(): + basic_trans=data['cookedInstanceTransforms']['sharedDataBuffer']['Data']['buffer']['Data']['Transforms'][idx] + + # Transforms are in a shared buffer in another node, so get the reference and find the transform data + elif 'HandleRefId' in data['cookedInstanceTransforms']['sharedDataBuffer'].keys(): + bufferID = int(data['cookedInstanceTransforms']['sharedDataBuffer']['HandleRefId']) + ref=e + for n in nodes: + if n['HandleId']==str(bufferID-1): + ref=n + basic_trans = ref['Data']['cookedInstanceTransforms']['sharedDataBuffer']['Data']['buffer']['Data']['Transforms'][idx] + #print(basic_trans) + else : + print(e) + # store the bufferID for when we add new stuff. + if Sector_additions_coll: + Sector_additions_coll['Dest_bufferID']=bufferID + #print('Setting Dest_bufferID to ',bufferID) + + # the Transforms are stored as 2 parts, a basic transform applied to all the instances and individual ones per instance + # lets get the basic one so we can calculate the instance one. + basic_pos =Vector(get_pos(basic_trans)) + basic_rot =Quaternion(get_rot(basic_trans)) + basic_scale =Vector((1,1,1)) + basic_matr=Matrix.LocRotScale(basic_pos,basic_rot,basic_scale) + basic_matr_inv=basic_matr.inverted() + + # Never modify the basic on as other nodes may be referencing it. (its normally 0,0,0 anyway) + inst_pos =Vector(get_pos(inst)) + inst_rot =Quaternion(get_rot(inst)) + inst_scale =Vector((1,1,1)) + inst_m=Matrix.LocRotScale(inst_pos,inst_rot,inst_scale) + + + obj_col=find_wIDMN_col(i,tlidx,idx,Sector_coll) + if obj_col: + if len(obj_col.objects)>0: + obj=obj_col.objects[0] + # Check for Position and if changed delete the original and add to the new sector + if obj.matrix_world!=Matrix(obj_col['matrix']): + deletions[sectorName].append(obj_col) + new_ni=len(template_nodes) + template_nodes.append(copy.deepcopy(nodes[obj_col['nodeIndex']])) + + createNodeData(template_nodeData, obj_col, new_ni, obj,ID) + ID+=1 + + else: + if obj_col: + deletions[sectorName].append(obj_col) + print(wIMNs) # __ __ __ __ ___ ___ ___ # /\ | \ | \ | |\ | / _` /__` | | | |__ |__ @@ -472,21 +552,19 @@ def exportSectors( filename): for node in t: if int(node['Id'])>ID: ID=int(node['Id'])+1 - - for col in Sector_coll.children: - if 'exported' not in col.keys() or col['exported']==False: + if Sector_additions_coll: + for col in Sector_additions_coll.children: if 'nodeIndex' in col.keys() and col['sectorName']==sectorName and len(col.objects)>0: match col['nodeType']: case 'worldStaticMeshNode' | 'worldStaticDecalNode' | 'worldBuildingProxyMeshNode' | 'worldGenericProxyMeshNode' | 'worldTerrainProxyMeshNode': - obj=col.objects[0] - new_ni=len(nodes) - createNodeData(template_nodeData, col, new_ni, obj,ID) + obj=col.objects[0] + createNodeData(t, col, col['nodeIndex'], obj,ID) ID+=1 case 'worldEntityNode': new_ni=len(nodes) nodes.append(copy.deepcopy(nodes[col['nodeIndex']])) obj=col.objects[0] - createNodeData(template_nodeData, col, new_ni, obj,ID) + createNodeData(t, col, new_ni, obj,ID) ID+=1 case 'worldInstancedMeshNode': diff --git a/i_scene_cp77_gltf/importers/entity_import.py b/i_scene_cp77_gltf/importers/entity_import.py index 155eba3..92ba3c3 100644 --- a/i_scene_cp77_gltf/importers/entity_import.py +++ b/i_scene_cp77_gltf/importers/entity_import.py @@ -9,6 +9,7 @@ from mathutils import Vector, Matrix , Quaternion import bmesh from ..main.common import json_ver_validate +from ..main.common import jsonload from .phys_import import cp77_phys_import from ..main.collisions import draw_box_collider, draw_capsule_collider, draw_convex_collider, draw_sphere_collider @@ -28,10 +29,10 @@ def importEnt( filepath='', appearances=[], exclude_meshes=[], with_materials=Tr ent_name=os.path.basename(filepath)[:-9] print('Importing Entity', ent_name) with open(filepath,'r') as f: - j=json.load(f) + j=jsonload(f) valid_json=json_ver_validate(j) if not valid_json: - bpy.ops.cp77.message_box('INVOKE_DEFAULT', message="Incompatible entity json file detected. This add-on version requires files generated by WolvenKit 8.9.1 or higher.") + bpy.ops.cp77.message_box('INVOKE_DEFAULT', message="Incompatible entity json file detected. This add-on version requires files generated WolvenKit 8.9.1 or higher.") return {'CANCELLED'} ent_apps= j['Data']['RootChunk']['appearances'] @@ -50,7 +51,7 @@ def importEnt( filepath='', appearances=[], exclude_meshes=[], with_materials=Tr for comp in ent_components: ent_complist.append(comp['name']) if 'rig' in comp.keys(): - print(comp['rig']) + print(comp['rig']['DepotPath']['$value']) ent_rigs.append(os.path.join(path,comp['rig']['DepotPath']['$value'])) if comp['name']['$value'] == 'Chassis': chassis_info = comp @@ -98,8 +99,8 @@ def importEnt( filepath='', appearances=[], exclude_meshes=[], with_materials=Tr # then check for an anim in the project thats using the rig (some things like the arch bike dont ref the anim in the ent) # otherwise just skip this section # - anim_files = glob.glob(path+"\\base\\animations\\"+"\**\*.glb", recursive = True) - ep1_anim_files = glob.glob(path+"\\ep1\\animations\\"+"\**\*.glb", recursive = True) + anim_files = glob.glob(os.path.join(path,"base","animations","**","*.glb"), recursive = True) + ep1_anim_files = glob.glob(os.path.join(path,"ep1","animations","**","*.glb"), recursive = True) anim_files = anim_files + ep1_anim_files app_name=None rig=None @@ -141,7 +142,7 @@ def importEnt( filepath='', appearances=[], exclude_meshes=[], with_materials=Tr print('no anim rig found') # find the rig json associated with the ent - rigjsons = glob.glob(path+"\**\*.rig.json", recursive = True) + rigjsons = glob.glob(os.path.join(path,"**","*.rig.json"), recursive = True) rig_j=None if len(rigjsons)>0 and len(ent_rigs)>0: entrigjsons=[x for x in rigjsons if x[:-5] in ent_rigs] @@ -283,7 +284,7 @@ def importEnt( filepath='', appearances=[], exclude_meshes=[], with_materials=Tr meshApp=c['meshAppearance']['$value'] #print(meshApp) try: - bpy.ops.io_scene_gltf.cp77(filepath=meshpath, appearances=meshApp, with_materials=with_materials, update_gi=False,remap_depot=remapdepot) + bpy.ops.io_scene_gltf.cp77(filepath=meshpath, appearances=meshApp, with_materials=with_materials, update_gi=False, remap_depot=remapdepot) for obj in C.selected_objects: obj['componentName'] = c['name']['$value'] obj['sourcePath'] = meshpath @@ -297,8 +298,7 @@ def importEnt( filepath='', appearances=[], exclude_meshes=[], with_materials=Tr objs = C.selected_objects if meshname=='v_sportbike2_arch_nemesis__ext01_axle_f_a_01': print('those annoying front forks') - if 'rotor' in meshname or 'prop' in meshname: - print('those annoying front forks') + # NEW parentTransform stuff - fixes vehicles being exploded x=None y=None @@ -486,7 +486,6 @@ def importEnt( filepath='', appearances=[], exclude_meshes=[], with_materials=Tr cr.target=target else: target=obj - # end new stuff # dont get the local transform here if we already did it before diff --git a/i_scene_cp77_gltf/main/common.py b/i_scene_cp77_gltf/main/common.py index b591de2..0bb089c 100644 --- a/i_scene_cp77_gltf/main/common.py +++ b/i_scene_cp77_gltf/main/common.py @@ -7,7 +7,30 @@ import bpy import bmesh from mathutils import Vector - +import json + +def normalize_paths(data): + if isinstance(data, dict): + for key, value in data.items(): + data[key] = normalize_paths(value) + elif isinstance(data, list): + for i in range(len(data)): + data[i] = normalize_paths(data[i]) + elif isinstance(data, str): + # Normalize the path if it is absolute + if data[0:4]=='base' or data[0:3]=='ep1' or data[1:3]==':\\': + data = data.replace('\\',os.sep) + return data + +def jsonload(filepath): + data=json.load(filepath) + normalize_paths(data) + return data + +def jsonloads(jsonstrings): + data=json.loads(jsonstrings) + normalize_paths(data) + return data def get_plugin_dir(): return os.path.dirname(os.path.dirname(os.path.abspath(__file__))) @@ -185,6 +208,8 @@ def imageFromPath(Img,image_format,isNormal = False): def imageFromRelPath(ImgPath, image_format='png', isNormal = False, DepotPath='',ProjPath=''): # The speedtree materials use the same name textures for different plants this code was loading the same leaves on all of them # Also copes with the fact that theres black.xbm in base and engine for instance + DepotPath=DepotPath.replace('\\',os.sep) + ProjPath=ProjPath.replace('\\',os.sep) inProj=os.path.join(ProjPath,ImgPath)[:-3]+ image_format inDepot=os.path.join(DepotPath,ImgPath)[:-3]+ image_format img_names=[k for k in bpy.data.images.keys() if bpy.data.images[k].filepath==inProj] diff --git a/i_scene_cp77_gltf/main/setup.py b/i_scene_cp77_gltf/main/setup.py index 7949a7f..8cfe1a2 100644 --- a/i_scene_cp77_gltf/main/setup.py +++ b/i_scene_cp77_gltf/main/setup.py @@ -37,7 +37,7 @@ def __init__(self,Obj,BasePath,image_format,MeshPath): self.image_format = image_format self.obj = Obj self.MeshPath= MeshPath - before,mid,after=MeshPath.partition('source\\raw\\') + before,mid,after=MeshPath.partition('source\\raw\\'.replace('\\',os.sep)) self.ProjPath=before+mid def create(self,materialIndex): diff --git a/i_scene_cp77_gltf/material_types/multilayered.py b/i_scene_cp77_gltf/material_types/multilayered.py index a1c15ab..5b8f72a 100644 --- a/i_scene_cp77_gltf/material_types/multilayered.py +++ b/i_scene_cp77_gltf/material_types/multilayered.py @@ -84,7 +84,7 @@ def __init__(self, BasePath,image_format, ProjPath): self.ProjPath = str(ProjPath) def createBaseMaterial(self,matTemplateObj,mltemplate): - name=os.path.basename(mltemplate) + name=os.path.basename(mltemplate.replace('\\',os.sep)) CT = imageFromRelPath(matTemplateObj["colorTexture"]["DepotPath"]["$value"],self.image_format,DepotPath=self.BasePath, ProjPath=self.ProjPath) NT = imageFromRelPath(matTemplateObj["normalTexture"]["DepotPath"]["$value"],self.image_format,isNormal = True,DepotPath=self.BasePath, ProjPath=self.ProjPath) RT = imageFromRelPath(matTemplateObj["roughnessTexture"]["DepotPath"]["$value"],self.image_format,isNormal = True,DepotPath=self.BasePath, ProjPath=self.ProjPath) @@ -92,7 +92,7 @@ def createBaseMaterial(self,matTemplateObj,mltemplate): TileMult = float(matTemplateObj.get("tilingMultiplier",1)) - NG = bpy.data.node_groups.new(name[:-11],"ShaderNodeTree") + NG = bpy.data.node_groups.new(name.split('.')[0],"ShaderNodeTree") NG['mlTemplate']=mltemplate vers=bpy.app.version if vers[0]<4: @@ -161,6 +161,7 @@ def setGlobNormal(self,normalimgpath,CurMat,input): GNA = create_node(CurMat.nodes, "ShaderNodeVectorMath",(-400,-250),operation='ADD') GNS = create_node(CurMat.nodes, "ShaderNodeVectorMath", (-600,-250),operation='SUBTRACT') + GNS.name="NormalSubtract" GNGeo = create_node(CurMat.nodes, "ShaderNodeNewGeometry", (-800,-250)) @@ -175,10 +176,12 @@ def setGlobNormal(self,normalimgpath,CurMat,input): def createLayerMaterial(self,LayerName,LayerCount,CurMat,mlmaskpath,normalimgpath): NG = _getOrCreateLayerBlend() for x in range(LayerCount-1): - if os.path.exists(os.path.splitext(self.ProjPath + mlmaskpath)[0]+'_layers\\'+mlmaskpath.split('\\')[-1:][0][:-7]+"_"+str(x+1)+".png"): + if os.path.exists((os.path.splitext(self.ProjPath + mlmaskpath)[0]+'_layers\\'+mlmaskpath.split('\\')[-1:][0][:-7]+"_"+str(x+1)+".png").replace('\\',os.sep)): MaskTexture = imageFromPath(os.path.splitext(self.ProjPath+ mlmaskpath)[0]+'_layers\\'+mlmaskpath.split('\\')[-1:][0][:-7]+"_"+str(x+1)+".png",self.image_format,isNormal = True) + elif os.path.exists((os.path.splitext(self.BasePath + mlmaskpath)[0]+'_layers\\'+mlmaskpath.split('\\')[-1:][0][:-7]+"_"+str(x+1)+".png").replace('\\',os.sep)): + MaskTexture = imageFromPath((os.path.splitext(self.BasePath + mlmaskpath)[0]+'_layers\\'+mlmaskpath.split('\\')[-1:][0][:-7]+"_"+str(x+1)+".png").replace('\\',os.sep),self.image_format,isNormal = True) else: - MaskTexture = imageFromPath(os.path.splitext(self.BasePath + mlmaskpath)[0]+'_layers\\'+mlmaskpath.split('\\')[-1:][0][:-7]+"_"+str(x+1)+".png",self.image_format,isNormal = True) + print('Mask image not found for layer ',x+1) @@ -218,22 +221,50 @@ def createLayerMaterial(self,LayerName,LayerCount,CurMat,mlmaskpath,normalimgpat else: targetLayer="Mat_Mod_Layer_0" - CurMat.links.new(CurMat.nodes[targetLayer].outputs[0],CurMat.nodes['Principled BSDF'].inputs['Base Color']) + + + # If theres more than 10 layers, mix them in 2 stacks then mix the stacks, trying to avoid SVM errors + if LayerCount>11: + MixLayerStacks = create_node(CurMat.nodes,"ShaderNodeGroup", (-1000,-180)) + MixLayerStacks.node_tree = NG + MixLayerStacks.name = "MixLayerStacks" + Layer10="Layer_9" + LastLayer="Layer_"+str(LayerCount-2) + # Remove the links already made between 10 & 11 + for out in CurMat.nodes[Layer10].outputs: + for l in out.links: + CurMat.links.remove(l) + CurMat.links.new(CurMat.nodes[Layer10].outputs[0],MixLayerStacks.inputs[0]) + CurMat.links.new(CurMat.nodes[Layer10].outputs[1],MixLayerStacks.inputs[1]) + CurMat.links.new(CurMat.nodes[Layer10].outputs[2],MixLayerStacks.inputs[2]) + CurMat.links.new(CurMat.nodes[Layer10].outputs[3],MixLayerStacks.inputs[3]) + CurMat.links.new(CurMat.nodes[LastLayer].outputs[0],MixLayerStacks.inputs[4]) + CurMat.links.new(CurMat.nodes[LastLayer].outputs[1],MixLayerStacks.inputs[5]) + CurMat.links.new(CurMat.nodes[LastLayer].outputs[2],MixLayerStacks.inputs[6]) + CurMat.links.new(CurMat.nodes[LastLayer].outputs[3],MixLayerStacks.inputs[7]) + + # replace the connections from the bottom of the stack with these + CurMat.links.new(MixLayerStacks.outputs[0],CurMat.nodes['Principled BSDF'].inputs['Base Color']) + CurMat.links.new(MixLayerStacks.outputs[1],CurMat.nodes['Principled BSDF'].inputs['Metallic']) + CurMat.links.new(MixLayerStacks.outputs[2],CurMat.nodes['Principled BSDF'].inputs['Roughness']) + targetLayer="MixLayerStacks" + else: + CurMat.links.new(CurMat.nodes[targetLayer].outputs[0],CurMat.nodes['Principled BSDF'].inputs['Base Color']) + CurMat.links.new(CurMat.nodes[targetLayer].outputs[2],CurMat.nodes['Principled BSDF'].inputs['Roughness']) + CurMat.links.new(CurMat.nodes[targetLayer].outputs[1],CurMat.nodes['Principled BSDF'].inputs['Metallic']) + if normalimgpath: yoink = self.setGlobNormal(normalimgpath,CurMat,CurMat.nodes[targetLayer].outputs[3]) CurMat.links.new(yoink,CurMat.nodes['Principled BSDF'].inputs['Normal']) else: CurMat.links.new(CurMat.nodes[targetLayer].outputs[3],CurMat.nodes['Principled BSDF'].inputs['Normal']) - CurMat.links.new(CurMat.nodes[targetLayer].outputs[2],CurMat.nodes['Principled BSDF'].inputs['Roughness']) - CurMat.links.new(CurMat.nodes[targetLayer].outputs[1],CurMat.nodes['Principled BSDF'].inputs['Metallic']) - return def create(self,Data,Mat): Mat['MLSetup']= Data["MultilayerSetup"] file = openJSON( Data["MultilayerSetup"] + ".json",mode='r',DepotPath=self.BasePath, ProjPath=self.ProjPath) - mlsetup = json.loads(file.read()) + mlsetup = jsonloads(file.read()) file.close() valid_json=json_ver_validate(mlsetup) if not valid_json: @@ -311,7 +342,7 @@ def create(self,Data,Mat): MBI = imageFromPath(self.BasePath+Microblend,self.image_format,True) file = openJSON( material + ".json",mode='r',DepotPath=self.BasePath, ProjPath=self.ProjPath) - mltemplate = json.loads(file.read()) + mltemplate = jsonloads(file.read()) file.close() valid_json=json_ver_validate(mltemplate) if not valid_json: @@ -321,7 +352,7 @@ def create(self,Data,Mat): OverrideTable = createOverrideTable(mltemplate)#get override info for colors and what not # Mat[os.path.basename(material).split('.')[0]+'_cols']=OverrideTable["ColorScale"] - NG = bpy.data.node_groups.new(os.path.basename(Data["MultilayerSetup"])[:-8]+"_Layer_"+str(LayerIndex),"ShaderNodeTree")#crLAer's node group + NG = bpy.data.node_groups.new(os.path.basename(Data["MultilayerSetup"].replace('\\',os.sep))[:-8]+"_Layer_"+str(LayerIndex),"ShaderNodeTree")#crLAer's node group vers=bpy.app.version if vers[0]<4: NG.inputs.new('NodeSocketColor','ColorScale') @@ -376,10 +407,10 @@ def create(self,Data,Mat): GroupOutN = create_node(NG.nodes, "NodeGroupOutput", (200,-100)) LayerGroupN['mlTemplate']=material - if not bpy.data.node_groups.get(os.path.basename(material)[:-11]): - self.createBaseMaterial(mltemplate,material) + if not bpy.data.node_groups.get(os.path.basename(material.replace('\\',os.sep)).split('.')[0]): + self.createBaseMaterial(mltemplate,material.replace('\\',os.sep)) - BaseMat = bpy.data.node_groups.get(os.path.basename(material)[:-11]) + BaseMat = bpy.data.node_groups.get(os.path.basename(material.replace('\\',os.sep)).split('.')[0]) if BaseMat: BMN = create_node(NG.nodes,"ShaderNodeGroup", (-2000,0)) BMN.width = 300 @@ -439,7 +470,7 @@ def create(self,Data,Mat): # Node for blending colorscale color with diffuse texture of mltemplate # Changed from multiply to overlay because multiply is a darkening blend mode, and colors appear too dark. Overlay is still probably wrong - jato if colorScale != "null": - ColorScaleMixN = create_node(NG.nodes,"ShaderNodeMixRGB",(-1400,100),blend_type='OVERLAY') + ColorScaleMixN = create_node(NG.nodes,"ShaderNodeMixRGB",(-1400,100),blend_type='MIX') ColorScaleMixN.inputs[0].default_value=1 # Microblend texture node @@ -626,4 +657,4 @@ def create(self,Data,Mat): else: LayerNormal=Data["GlobalNormal"] - self.createLayerMaterial(os.path.basename(Data["MultilayerSetup"])[:-8]+"_Layer_",LayerCount,CurMat,Data["MultilayerMask"],Data["GlobalNormal"]) + self.createLayerMaterial(os.path.basename(Data["MultilayerSetup"])[:-8]+"_Layer_",LayerCount,CurMat,Data["MultilayerMask"],Data["GlobalNormal"]) \ No newline at end of file