Adds missing data
This commit is contained in:
parent
e6391d9fdd
commit
53cdcc3433
620 changed files with 47293 additions and 151 deletions
82
addons/cyclops_level_builder/io/cyclops_io/buffer_archive.gd
Normal file
82
addons/cyclops_level_builder/io/cyclops_io/buffer_archive.gd
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
# MIT License
|
||||
#
|
||||
# Copyright (c) 2023 Mark McKay
|
||||
# https://github.com/blackears/cyclopsLevelBuilder
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
@tool
|
||||
extends ResourceInspector
|
||||
class_name BufferArchive
|
||||
|
||||
class BufferRegion extends Resource:
|
||||
var builder:BufferArchive
|
||||
#var index:int
|
||||
var start_byte:int
|
||||
var length:int
|
||||
|
||||
func get_buffer()->PackedByteArray:
|
||||
return builder.buffer.slice(start_byte, start_byte + length)
|
||||
|
||||
var buffer:PackedByteArray
|
||||
#var region_list:Array[BufferRegion]
|
||||
|
||||
func store_buffer(buf:PackedByteArray)->BufferRegion:
|
||||
var region:BufferRegion = BufferRegion.new()
|
||||
|
||||
region.builder = self
|
||||
#region.index = region_list.size()
|
||||
region.start_byte = buffer.size()
|
||||
region.length = buf.size()
|
||||
|
||||
buffer.append_array(buf)
|
||||
# buffer.resize(buffer.size() + byte_len)
|
||||
|
||||
#region_list.append(region)
|
||||
|
||||
return region
|
||||
|
||||
|
||||
#func allocate_buffer(byte_len:int)->BufferRegion:
|
||||
#var region:BufferRegion = BufferRegion.new()
|
||||
#
|
||||
#region.builder = self
|
||||
#region.index = region_list.size()
|
||||
#region.start_byte = buffer.size()
|
||||
#region.length = byte_len
|
||||
#buffer.resize(buffer.size() + byte_len)
|
||||
#
|
||||
#region_list.append(region)
|
||||
#
|
||||
#return region
|
||||
|
||||
func to_dictionary()->Dictionary:
|
||||
var result:Dictionary
|
||||
|
||||
#result["regions"] = []
|
||||
#for region in region_list:
|
||||
#result.region.append({
|
||||
##"index": region.index,
|
||||
#"start": region.start_byte,
|
||||
#"length": region.length
|
||||
#})
|
||||
|
||||
result["buffer"] = Marshalls.raw_to_base64(buffer.compress())
|
||||
|
||||
return result
|
||||
|
|
@ -0,0 +1,199 @@
|
|||
# MIT License
|
||||
#
|
||||
# Copyright (c) 2023 Mark McKay
|
||||
# https://github.com/blackears/cyclopsLevelBuilder
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
@tool
|
||||
class_name CyclopsFileBuilder
|
||||
extends RefCounted
|
||||
|
||||
|
||||
var plugin:CyclopsLevelBuilder
|
||||
var buffer_archive:BufferArchive = BufferArchive.new()
|
||||
|
||||
var document:Dictionary
|
||||
var node_indexer:ItemIndexer = ItemIndexer.new()
|
||||
var object_indexer:ItemIndexer = ItemIndexer.new()
|
||||
var buffer_region_indexer:ItemIndexer = ItemIndexer.new()
|
||||
|
||||
var buffer_region_map:Dictionary
|
||||
|
||||
|
||||
func _init(plugin:CyclopsLevelBuilder):
|
||||
self.plugin = plugin
|
||||
|
||||
func should_include_branch(node:Node3D)->bool:
|
||||
if node is CyclopsBlock:
|
||||
return true
|
||||
|
||||
for child in node.get_children():
|
||||
if child is Node3D && should_include_branch(child):
|
||||
return true
|
||||
|
||||
return false
|
||||
|
||||
func build_file():
|
||||
|
||||
var root:Node = plugin.get_editor_interface().get_edited_scene_root()
|
||||
|
||||
document = {
|
||||
"header": {
|
||||
"exporter": "Cyclops Level Builder " + plugin.get_plugin_version(),
|
||||
"version": "1.0.0"
|
||||
},
|
||||
"scenes": [],
|
||||
"nodes": [],
|
||||
"objects": [],
|
||||
"buffer_regions": [],
|
||||
"buffers": []
|
||||
}
|
||||
|
||||
export_scene_recursive(root)
|
||||
|
||||
#var build_scene:Dictionary
|
||||
#build_scene["root"] = root.name
|
||||
document.scenes.append({
|
||||
"id": 0,
|
||||
"root": node_indexer.get_or_create_id(root)
|
||||
})
|
||||
|
||||
for id in buffer_region_map.keys():
|
||||
var region:BufferArchive.BufferRegion = buffer_region_map[id]
|
||||
document.buffer_regions.append({
|
||||
"id": id,
|
||||
"start": region.start_byte,
|
||||
"length": region.length,
|
||||
"buffer_id": 0
|
||||
})
|
||||
|
||||
document.buffers.append({
|
||||
"id": 0,
|
||||
"byte_length": buffer_archive.buffer.size(),
|
||||
"data_buffer": Marshalls.raw_to_base64(buffer_archive.buffer.compress())
|
||||
})
|
||||
|
||||
|
||||
func export_scene_recursive(cur_node:Node3D):
|
||||
#print(str(cur_node.get_path()) + "\n")
|
||||
if !should_include_branch(cur_node):
|
||||
return
|
||||
|
||||
var build_node:Dictionary
|
||||
build_node["id"] = node_indexer.get_or_create_id(cur_node)
|
||||
build_node["name"] = cur_node.name
|
||||
document.nodes.append(build_node)
|
||||
|
||||
if !cur_node.visible:
|
||||
build_node["visible"] = cur_node.visible
|
||||
if !cur_node.position.is_equal_approx(Vector3.ZERO):
|
||||
build_node["translate"] = [cur_node.position.x, cur_node.position.y, cur_node.position.z]
|
||||
if !cur_node.transform.basis.is_equal_approx(Basis.IDENTITY):
|
||||
build_node["basis"] = [
|
||||
cur_node.basis.x.x, cur_node.basis.x.y, cur_node.basis.x.z,
|
||||
cur_node.basis.y.x, cur_node.basis.y.y, cur_node.basis.y.z,
|
||||
cur_node.basis.z.x, cur_node.basis.z.y, cur_node.basis.z.z
|
||||
]
|
||||
|
||||
|
||||
|
||||
if cur_node is CyclopsBlock:
|
||||
var obj_id:int = object_indexer.get_or_create_id(cur_node)
|
||||
build_node["object"] = obj_id
|
||||
|
||||
var dict:Dictionary = cur_node.export_to_cyclops_file(self)
|
||||
|
||||
document.objects.append(
|
||||
{
|
||||
"id": obj_id,
|
||||
"type": "convex_block",
|
||||
"body": dict
|
||||
}
|
||||
)
|
||||
#export_mesh_node(cur_node)
|
||||
else:
|
||||
# print("children of ", cur_node.name)
|
||||
|
||||
var child_ids:Array[int]
|
||||
var exp_children:Array[Node3D]
|
||||
for local_child in cur_node.get_children():
|
||||
if local_child is Node3D && should_include_branch(local_child):
|
||||
child_ids.append(node_indexer.get_or_create_id(local_child))
|
||||
exp_children.append(local_child)
|
||||
|
||||
if !child_ids.is_empty():
|
||||
build_node["children"] = child_ids
|
||||
|
||||
for local_child in exp_children:
|
||||
export_scene_recursive(local_child)
|
||||
|
||||
func export_mesh_node(cur_node:CyclopsBlock):
|
||||
if !cur_node.mesh_vector_data:
|
||||
return
|
||||
|
||||
var build_mesh:Dictionary
|
||||
document.objects.append(build_mesh)
|
||||
|
||||
build_mesh["id"] = object_indexer.get_or_create_id(cur_node)
|
||||
|
||||
build_mesh["collision_type"] = Collision.Type.keys()[cur_node.collision_type]
|
||||
build_mesh["collision_layer"] = cur_node.collision_layer
|
||||
build_mesh["collision_mask"] = cur_node.collision_mask
|
||||
|
||||
var mat_res_paths:PackedStringArray
|
||||
for mat in cur_node.materials:
|
||||
if mat:
|
||||
mat_res_paths.append(mat.resource_path)
|
||||
else:
|
||||
mat_res_paths.append("")
|
||||
build_mesh["materials"] = mat_res_paths
|
||||
|
||||
build_mesh["mesh"] = cur_node.mesh_vector_data.to_dictionary(self)
|
||||
#build_mesh["mesh"] = cur_node.mesh_vector_data.to_dictionary(self)
|
||||
|
||||
|
||||
func export_byte_array(byte_data:PackedByteArray)->int:
|
||||
var result:Dictionary
|
||||
|
||||
var region:BufferArchive.BufferRegion = buffer_archive.store_buffer(byte_data)
|
||||
var buf_id:int = buffer_region_indexer.get_or_create_id(region)
|
||||
buffer_region_map[buf_id] = region
|
||||
# result["data_buffer"] = region.index
|
||||
return buf_id
|
||||
|
||||
|
||||
func export_vector(vec:DataVector)->Dictionary:
|
||||
var result:Dictionary
|
||||
|
||||
result["name"] = vec.name
|
||||
result["data_type"] = DataVector.DataType.keys()[vec.data_type]
|
||||
#if vec.stride != 1:
|
||||
#result["stride"] = vec.stride
|
||||
if !vec.category.is_empty():
|
||||
result["category"] = vec.category
|
||||
|
||||
var region:BufferArchive.BufferRegion = buffer_archive.store_buffer(vec.get_buffer_byte_data())
|
||||
var buf_id:int = buffer_region_indexer.get_or_create_id(region)
|
||||
buffer_region_map[buf_id] = region
|
||||
# result["data_buffer"] = region.index
|
||||
result["data_buffer"] = buf_id
|
||||
|
||||
return result
|
||||
|
||||
|
|
@ -0,0 +1,209 @@
|
|||
# MIT License
|
||||
#
|
||||
# Copyright (c) 2023 Mark McKay
|
||||
# https://github.com/blackears/cyclopsLevelBuilder
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
@tool
|
||||
class_name CyclopsFileLoader
|
||||
extends RefCounted
|
||||
|
||||
class BufferRegion:
|
||||
var start:int
|
||||
var length:int
|
||||
var buffer_id:int
|
||||
|
||||
var buffer_archive:BufferArchive = BufferArchive.new()
|
||||
|
||||
var buffer_map:Dictionary
|
||||
var buffer_region_map:Dictionary
|
||||
var object_map:Dictionary
|
||||
var node_map:Dictionary
|
||||
var scene_map:Dictionary
|
||||
|
||||
var plugin:CyclopsLevelBuilder
|
||||
|
||||
func load(root:Dictionary):
|
||||
for buf_dict in root["buffers"]:
|
||||
var buf_id:int = buf_dict["id"]
|
||||
var buf_size:int = buf_dict["byte_length"]
|
||||
var text:String = buf_dict["data_buffer"]
|
||||
var zip_buf:PackedByteArray = Marshalls.base64_to_raw(text)
|
||||
var buf:PackedByteArray = zip_buf.decompress(buf_size)
|
||||
|
||||
var ba:BufferArchive = BufferArchive.new()
|
||||
ba.buffer = buf
|
||||
buffer_map[buf_id] = ba
|
||||
|
||||
for reg_dict in root["buffer_regions"]:
|
||||
var reg:BufferRegion = BufferRegion.new()
|
||||
var id:int = reg_dict["id"]
|
||||
reg.start = reg_dict["start"]
|
||||
reg.length = reg_dict["length"]
|
||||
reg.buffer_id = reg_dict["buffer_id"]
|
||||
|
||||
buffer_region_map[id] = reg
|
||||
|
||||
for obj_dict in root["objects"]:
|
||||
var id:int = obj_dict["id"]
|
||||
var type:String = obj_dict["type"]
|
||||
var body:Dictionary = obj_dict["body"]
|
||||
|
||||
var object_node
|
||||
match type:
|
||||
"convex_block":
|
||||
object_node = load_convex_block(body)
|
||||
|
||||
if object_node:
|
||||
object_map[id] = object_node
|
||||
|
||||
for node_dict in root["nodes"]:
|
||||
var id:int = node_dict["id"]
|
||||
var node:Node3D
|
||||
if node_dict.has("object"):
|
||||
var obj_id:int = node_dict["object"]
|
||||
node = object_map[obj_id]
|
||||
else:
|
||||
node = Node3D.new()
|
||||
|
||||
node_map[id] = node
|
||||
|
||||
if node_dict.has("name"):
|
||||
node.name = node_dict["name"]
|
||||
|
||||
if node_dict.has("visible"):
|
||||
node.visible = node_dict["visible"]
|
||||
if node_dict.has("basis"):
|
||||
var a:Array = node_dict["basis"]
|
||||
var basis:Basis = Basis(Vector3(a[0], a[1], a[2]), Vector3(a[3], a[4], a[5]), Vector3(a[6], a[7], a[8]))
|
||||
node.basis = basis
|
||||
if node_dict.has("translate"):
|
||||
var a:Array = node_dict["translate"]
|
||||
node.position = Vector3(a[0], a[1], a[2])
|
||||
|
||||
for node_dict in root["nodes"]:
|
||||
var id:int = node_dict["id"]
|
||||
var node:Node3D = node_map[id]
|
||||
|
||||
if node_dict.has("children"):
|
||||
for child_idx in node_dict["children"]:
|
||||
|
||||
var child_node:Node3D = node_map[int(child_idx)]
|
||||
node.add_child(child_node)
|
||||
|
||||
for scene_dict in root["scenes"]:
|
||||
var id:int = scene_dict["id"]
|
||||
var root_id:int = scene_dict["root"]
|
||||
scene_map[id] = root_id
|
||||
|
||||
|
||||
func load_convex_block(body_dict:Dictionary)->CyclopsBlock:
|
||||
var block:CyclopsBlock = preload("res://addons/cyclops_level_builder/nodes/cyclops_block.gd").new()
|
||||
#blocks_root.add_child(block)
|
||||
#block.owner = builder.get_editor_interface().get_edited_scene_root()
|
||||
#block.name = GeneralUtil.find_unique_name(blocks_root, block_name_prefix)
|
||||
|
||||
block.collision_type = Collision.Type.get(body_dict["collision_type"])
|
||||
block.collision_layer = body_dict["collision_layer"]
|
||||
block.collision_mask = body_dict["collision_mask"]
|
||||
|
||||
for mat_res_path in body_dict["materials"]:
|
||||
var res = ResourceLoader.load(mat_res_path)
|
||||
block.materials.append(res)
|
||||
|
||||
if body_dict.has("mesh"):
|
||||
var mesh_dict:Dictionary = body_dict["mesh"]
|
||||
var mesh:MeshVectorData = MeshVectorData.new()
|
||||
mesh.num_vertices = mesh_dict["num_vertices"]
|
||||
mesh.num_edges = mesh_dict["num_edges"]
|
||||
mesh.num_faces = mesh_dict["num_faces"]
|
||||
mesh.num_face_vertices = mesh_dict["num_face_vertices"]
|
||||
mesh.active_vertex = mesh_dict["active_vertex"]
|
||||
mesh.active_edge = mesh_dict["active_edge"]
|
||||
mesh.active_face = mesh_dict["active_face"]
|
||||
mesh.active_face_vertex = mesh_dict["active_face_vertex"]
|
||||
|
||||
mesh.edge_vertex_indices = load_buffer(mesh_dict["edge_vertex_index_buffer"]).to_int32_array()
|
||||
mesh.edge_face_indices = load_buffer(mesh_dict["edge_face_index_buffer"]).to_int32_array()
|
||||
mesh.face_vertex_count = load_buffer(mesh_dict["face_vertex_count_buffer"]).to_int32_array()
|
||||
mesh.face_vertex_indices = load_buffer(mesh_dict["face_vertex_index_buffer"]).to_int32_array()
|
||||
|
||||
for vec_dict in mesh_dict["vectors"]["vertices"]:
|
||||
var vec:DataVector = load_data_vector(vec_dict)
|
||||
mesh.vertex_data[vec.name] = vec
|
||||
|
||||
for vec_dict in mesh_dict["vectors"]["edges"]:
|
||||
var vec:DataVector = load_data_vector(vec_dict)
|
||||
mesh.edge_data[vec.name] = vec
|
||||
|
||||
for vec_dict in mesh_dict["vectors"]["faces"]:
|
||||
var vec:DataVector = load_data_vector(vec_dict)
|
||||
mesh.face_data[vec.name] = vec
|
||||
|
||||
for vec_dict in mesh_dict["vectors"]["face_vertices"]:
|
||||
var vec:DataVector = load_data_vector(vec_dict)
|
||||
mesh.face_vertex_data[vec.name] = vec
|
||||
|
||||
block.mesh_vector_data = mesh
|
||||
|
||||
return block
|
||||
|
||||
#enum DataType { BOOL, INT, FLOAT, STRING, COLOR, VECTOR2, VECTOR3, VECTOR4, TRANSFORM_2D, TRANSFORM_3D }
|
||||
|
||||
func load_data_vector(vec_dict)->DataVector:
|
||||
match vec_dict["data_type"]:
|
||||
"BOOL":
|
||||
var buf:PackedByteArray = load_buffer(vec_dict["data_buffer"])
|
||||
return DataVectorByte.new(vec_dict["name"], buf, DataVector.DataType.BOOL)
|
||||
"INT":
|
||||
var buf:PackedInt32Array = load_buffer(vec_dict["data_buffer"]).to_int32_array()
|
||||
return DataVectorInt.new(vec_dict["name"], buf, DataVector.DataType.INT)
|
||||
"FLOAT":
|
||||
var buf:PackedFloat32Array = load_buffer(vec_dict["data_buffer"]).to_float32_array()
|
||||
return DataVectorFloat.new(vec_dict["name"], buf, DataVector.DataType.FLOAT)
|
||||
"STRING":
|
||||
var buf:PackedStringArray = bytes_to_var(load_buffer(vec_dict["data_buffer"]))
|
||||
return DataVectorString.new(vec_dict["name"], buf, DataVector.DataType.STRING)
|
||||
"COLOR":
|
||||
var buf:PackedFloat32Array = load_buffer(vec_dict["data_buffer"]).to_float32_array()
|
||||
return DataVectorFloat.new(vec_dict["name"], buf, DataVector.DataType.COLOR)
|
||||
"TRANSFORM_2D":
|
||||
var buf:PackedFloat32Array = load_buffer(vec_dict["data_buffer"]).to_float32_array()
|
||||
return DataVectorFloat.new(vec_dict["name"], buf, DataVector.DataType.TRANSFORM_2D)
|
||||
"TRANSFORM_3D":
|
||||
var buf:PackedFloat32Array = load_buffer(vec_dict["data_buffer"]).to_float32_array()
|
||||
return DataVectorFloat.new(vec_dict["name"], buf, DataVector.DataType.TRANSFORM_3D)
|
||||
"VECTOR2":
|
||||
var buf:PackedFloat32Array = load_buffer(vec_dict["data_buffer"]).to_float32_array()
|
||||
return DataVectorFloat.new(vec_dict["name"], buf, DataVector.DataType.VECTOR2)
|
||||
"VECTOR3":
|
||||
var buf:PackedFloat32Array = load_buffer(vec_dict["data_buffer"]).to_float32_array()
|
||||
return DataVectorFloat.new(vec_dict["name"], buf, DataVector.DataType.VECTOR3)
|
||||
"VECTOR4":
|
||||
var buf:PackedFloat32Array = load_buffer(vec_dict["data_buffer"]).to_float32_array()
|
||||
return DataVectorFloat.new(vec_dict["name"], buf, DataVector.DataType.VECTOR4)
|
||||
_:
|
||||
return null
|
||||
|
||||
|
||||
func load_buffer(buf_id:int)->PackedByteArray:
|
||||
var buf_reg:BufferRegion = buffer_region_map[buf_id]
|
||||
var buf_src:BufferArchive = buffer_map[buf_reg.buffer_id]
|
||||
return buf_src.buffer.slice(buf_reg["start"], buf_reg["start"] + buf_reg["length"])
|
||||
36
addons/cyclops_level_builder/io/cyclops_io/item_indexer.gd
Normal file
36
addons/cyclops_level_builder/io/cyclops_io/item_indexer.gd
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
# MIT License
|
||||
#
|
||||
# Copyright (c) 2023 Mark McKay
|
||||
# https://github.com/blackears/cyclopsLevelBuilder
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
@tool
|
||||
class_name ItemIndexer
|
||||
extends RefCounted
|
||||
|
||||
var dict:Dictionary
|
||||
|
||||
func get_or_create_id(node:Variant)->int:
|
||||
if dict.has(node):
|
||||
return dict[node]
|
||||
|
||||
var id:int = dict.size()
|
||||
dict[node] = id
|
||||
return id
|
||||
Loading…
Add table
Add a link
Reference in a new issue