diff options
author | Star Rauchenberger <fefferburbia@gmail.com> | 2025-09-08 17:00:41 -0400 |
---|---|---|
committer | Star Rauchenberger <fefferburbia@gmail.com> | 2025-09-08 17:00:41 -0400 |
commit | cbacfa6277592dd05f6d9a5aaf1026ba58e74162 (patch) | |
tree | e30b0104805bc9537cd45c24a9a016c96f3296a2 | |
parent | ede20b5e97b39af14b42974f00ebf1737f289ece (diff) | |
download | lingo2-archipelago-cbacfa6277592dd05f6d9a5aaf1026ba58e74162.tar.gz lingo2-archipelago-cbacfa6277592dd05f6d9a5aaf1026ba58e74162.tar.bz2 lingo2-archipelago-cbacfa6277592dd05f6d9a5aaf1026ba58e74162.zip |
Added godobuf fork to repository
-rw-r--r-- | vendor/godobuf/LICENSE | 29 | ||||
-rw-r--r-- | vendor/godobuf/README | 4 | ||||
-rw-r--r-- | vendor/godobuf/addons/protobuf/parser.gd | 2254 | ||||
-rw-r--r-- | vendor/godobuf/addons/protobuf/plugin.cfg | 7 | ||||
-rw-r--r-- | vendor/godobuf/addons/protobuf/protobuf_cmdln.gd | 66 | ||||
-rw-r--r-- | vendor/godobuf/addons/protobuf/protobuf_core.gd | 668 | ||||
-rw-r--r-- | vendor/godobuf/addons/protobuf/protobuf_util.gd | 46 | ||||
-rw-r--r-- | vendor/godobuf/default_env.tres | 7 | ||||
-rw-r--r-- | vendor/godobuf/logo.png | bin | 0 -> 19026 bytes | |||
-rw-r--r-- | vendor/godobuf/logo.png.import | 35 | ||||
-rw-r--r-- | vendor/godobuf/project.godot | 26 |
11 files changed, 3142 insertions, 0 deletions
diff --git a/vendor/godobuf/LICENSE b/vendor/godobuf/LICENSE new file mode 100644 index 0000000..5d473d8 --- /dev/null +++ b/vendor/godobuf/LICENSE | |||
@@ -0,0 +1,29 @@ | |||
1 | BSD 3-Clause License | ||
2 | |||
3 | Copyright (c) 2018, oniksan | ||
4 | All rights reserved. | ||
5 | |||
6 | Redistribution and use in source and binary forms, with or without | ||
7 | modification, are permitted provided that the following conditions are met: | ||
8 | |||
9 | * Redistributions of source code must retain the above copyright notice, this | ||
10 | list of conditions and the following disclaimer. | ||
11 | |||
12 | * Redistributions in binary form must reproduce the above copyright notice, | ||
13 | this list of conditions and the following disclaimer in the documentation | ||
14 | and/or other materials provided with the distribution. | ||
15 | |||
16 | * Neither the name of the copyright holder nor the names of its | ||
17 | contributors may be used to endorse or promote products derived from | ||
18 | this software without specific prior written permission. | ||
19 | |||
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" | ||
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | ||
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | ||
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE | ||
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL | ||
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR | ||
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER | ||
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, | ||
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||
diff --git a/vendor/godobuf/README b/vendor/godobuf/README new file mode 100644 index 0000000..ce716bb --- /dev/null +++ b/vendor/godobuf/README | |||
@@ -0,0 +1,4 @@ | |||
1 | This is a fork of https://github.com/oniksan/godobuf with some minor changes so | ||
2 | that it is able to compile the Lingo 2 randomizer proto files. The plugin parts | ||
3 | of the project have also been removed since we only need the command line | ||
4 | script. | ||
diff --git a/vendor/godobuf/addons/protobuf/parser.gd b/vendor/godobuf/addons/protobuf/parser.gd new file mode 100644 index 0000000..dfc0bdd --- /dev/null +++ b/vendor/godobuf/addons/protobuf/parser.gd | |||
@@ -0,0 +1,2254 @@ | |||
1 | # | ||
2 | # BSD 3-Clause License | ||
3 | # | ||
4 | # Copyright (c) 2018 - 2023, Oleg Malyavkin | ||
5 | # All rights reserved. | ||
6 | # | ||
7 | # Redistribution and use in source and binary forms, with or without | ||
8 | # modification, are permitted provided that the following conditions are met: | ||
9 | # | ||
10 | # * Redistributions of source code must retain the above copyright notice, this | ||
11 | # list of conditions and the following disclaimer. | ||
12 | # | ||
13 | # * Redistributions in binary form must reproduce the above copyright notice, | ||
14 | # this list of conditions and the following disclaimer in the documentation | ||
15 | # and/or other materials provided with the distribution. | ||
16 | # | ||
17 | # * Neither the name of the copyright holder nor the names of its | ||
18 | # contributors may be used to endorse or promote products derived from | ||
19 | # this software without specific prior written permission. | ||
20 | # | ||
21 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" | ||
22 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | ||
23 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | ||
24 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE | ||
25 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL | ||
26 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR | ||
27 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER | ||
28 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, | ||
29 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
30 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||
31 | |||
32 | extends Node | ||
33 | |||
34 | const PROTO_VERSION_CONST : String = "const PROTO_VERSION = " | ||
35 | const PROTO_VERSION_DEFAULT : String = PROTO_VERSION_CONST + "0" | ||
36 | |||
37 | class Document: | ||
38 | |||
39 | func _init(doc_name : String, doc_text : String): | ||
40 | name = doc_name | ||
41 | text = doc_text | ||
42 | |||
43 | var name : String | ||
44 | var text : String | ||
45 | |||
46 | class TokenPosition: | ||
47 | func _init(b : int, e : int): | ||
48 | begin = b | ||
49 | end = e | ||
50 | var begin : int = 0 | ||
51 | var end : int = 0 | ||
52 | |||
53 | class Helper: | ||
54 | |||
55 | class StringPosition: | ||
56 | func _init(s : int, c : int, l : int): | ||
57 | str_num = s | ||
58 | column = c | ||
59 | length = l | ||
60 | var str_num : int | ||
61 | var column : int | ||
62 | var length : int | ||
63 | |||
64 | static func str_pos(text : String, position : TokenPosition) -> StringPosition: | ||
65 | var cur_str : int = 1 | ||
66 | var cur_col : int = 1 | ||
67 | var res_str : int = 0 | ||
68 | var res_col : int = 0 | ||
69 | var res_length : int = 0 | ||
70 | for i in range(text.length()): | ||
71 | if text[i] == "\n": | ||
72 | cur_str += 1 | ||
73 | cur_col = 0 | ||
74 | if position.begin == i: | ||
75 | res_str = cur_str | ||
76 | res_col = cur_col | ||
77 | res_length = position.end - position.begin + 1 | ||
78 | break | ||
79 | cur_col += 1 | ||
80 | return StringPosition.new(res_str, res_col, res_length) | ||
81 | |||
82 | static func text_pos(tokens : Array, index : int) -> TokenPosition: | ||
83 | var res_begin : int = 0 | ||
84 | var res_end : int = 0 | ||
85 | if index < tokens.size() && index >= 0: | ||
86 | res_begin = tokens[index].position.begin | ||
87 | res_end = tokens[index].position.end | ||
88 | return TokenPosition.new(res_begin, res_end) | ||
89 | |||
90 | static func error_string(file_name, col, row, error_text): | ||
91 | return file_name + ":" + str(col) + ":" + str(row) + ": error: " + error_text | ||
92 | |||
93 | class AnalyzeResult: | ||
94 | var classes : Array = [] | ||
95 | var fields : Array = [] | ||
96 | var groups : Array = [] | ||
97 | var version : int = 0 | ||
98 | var state : bool = false | ||
99 | var tokens : Array = [] | ||
100 | var syntax : Analysis.TranslationResult | ||
101 | var imports : Array = [] | ||
102 | var doc : Document | ||
103 | |||
104 | func soft_copy() -> AnalyzeResult: | ||
105 | var res : AnalyzeResult = AnalyzeResult.new() | ||
106 | res.classes = classes | ||
107 | res.fields = fields | ||
108 | res.groups = groups | ||
109 | res.version = version | ||
110 | res.state = state | ||
111 | res.tokens = tokens | ||
112 | res.syntax = syntax | ||
113 | res.imports = imports | ||
114 | res.doc = doc | ||
115 | return res | ||
116 | |||
117 | class Analysis: | ||
118 | |||
119 | func _init(path : String, doc : Document): | ||
120 | path_dir = path | ||
121 | document = doc | ||
122 | |||
123 | var document : Document | ||
124 | var path_dir : String | ||
125 | |||
126 | const LEX = { | ||
127 | LETTER = "[A-Za-z]", | ||
128 | DIGIT_DEC = "[0-9]", | ||
129 | DIGIT_OCT = "[0-7]", | ||
130 | DIGIT_HEX = "[0-9]|[A-F]|[a-f]", | ||
131 | BRACKET_ROUND_LEFT = "\\(", | ||
132 | BRACKET_ROUND_RIGHT = "\\)", | ||
133 | BRACKET_CURLY_LEFT = "\\{", | ||
134 | BRACKET_CURLY_RIGHT = "\\}", | ||
135 | BRACKET_SQUARE_LEFT = "\\[", | ||
136 | BRACKET_SQUARE_RIGHT = "\\]", | ||
137 | BRACKET_ANGLE_LEFT = "\\<", | ||
138 | BRACKET_ANGLE_RIGHT = "\\>", | ||
139 | SEMICOLON = ";", | ||
140 | COMMA = ",", | ||
141 | EQUAL = "=", | ||
142 | SIGN = "\\+|\\-", | ||
143 | SPACE = "\\s", | ||
144 | QUOTE_SINGLE = "'", | ||
145 | QUOTE_DOUBLE = "\"", | ||
146 | } | ||
147 | |||
148 | const TOKEN_IDENT : String = "(" + LEX.LETTER + "+" + "(" + LEX.LETTER + "|" + LEX.DIGIT_DEC + "|" + "_)*)" | ||
149 | const TOKEN_FULL_IDENT : String = TOKEN_IDENT + "{0,1}(\\." + TOKEN_IDENT + ")+" | ||
150 | const TOKEN_BRACKET_ROUND_LEFT : String = "(" + LEX.BRACKET_ROUND_LEFT + ")" | ||
151 | const TOKEN_BRACKET_ROUND_RIGHT : String = "(" + LEX.BRACKET_ROUND_RIGHT + ")" | ||
152 | const TOKEN_BRACKET_CURLY_LEFT : String = "(" + LEX.BRACKET_CURLY_LEFT + ")" | ||
153 | const TOKEN_BRACKET_CURLY_RIGHT : String = "(" + LEX.BRACKET_CURLY_RIGHT + ")" | ||
154 | const TOKEN_BRACKET_SQUARE_LEFT : String = "(" + LEX.BRACKET_SQUARE_LEFT + ")" | ||
155 | const TOKEN_BRACKET_SQUARE_RIGHT : String = "(" + LEX.BRACKET_SQUARE_RIGHT + ")" | ||
156 | const TOKEN_BRACKET_ANGLE_LEFT : String = "(" + LEX.BRACKET_ANGLE_LEFT + ")" | ||
157 | const TOKEN_BRACKET_ANGLE_RIGHT : String = "(" + LEX.BRACKET_ANGLE_RIGHT + ")" | ||
158 | const TOKEN_SEMICOLON : String = "(" + LEX.SEMICOLON + ")" | ||
159 | const TOKEN_EUQAL : String = "(" + LEX.EQUAL + ")" | ||
160 | const TOKEN_SIGN : String = "(" + LEX.SIGN + ")" | ||
161 | const TOKEN_LITERAL_DEC : String = "(([1-9])" + LEX.DIGIT_DEC +"*)" | ||
162 | const TOKEN_LITERAL_OCT : String = "(0" + LEX.DIGIT_OCT +"*)" | ||
163 | const TOKEN_LITERAL_HEX : String = "(0(x|X)(" + LEX.DIGIT_HEX +")+)" | ||
164 | const TOKEN_LITERAL_INT : String = "((\\+|\\-){0,1}" + TOKEN_LITERAL_DEC + "|" + TOKEN_LITERAL_OCT + "|" + TOKEN_LITERAL_HEX + ")" | ||
165 | const TOKEN_LITERAL_FLOAT_DEC : String = "(" + LEX.DIGIT_DEC + "+)" | ||
166 | const TOKEN_LITERAL_FLOAT_EXP : String = "((e|E)(\\+|\\-)?" + TOKEN_LITERAL_FLOAT_DEC + "+)" | ||
167 | const TOKEN_LITERAL_FLOAT : String = "((\\+|\\-){0,1}(" + TOKEN_LITERAL_FLOAT_DEC + "\\." + TOKEN_LITERAL_FLOAT_DEC + "?" + TOKEN_LITERAL_FLOAT_EXP + "?)|(" + TOKEN_LITERAL_FLOAT_DEC + TOKEN_LITERAL_FLOAT_EXP + ")|(\\." + TOKEN_LITERAL_FLOAT_DEC + TOKEN_LITERAL_FLOAT_EXP + "?))" | ||
168 | const TOKEN_SPACE : String = "(" + LEX.SPACE + ")+" | ||
169 | const TOKEN_COMMA : String = "(" + LEX.COMMA + ")" | ||
170 | const TOKEN_CHAR_ESC : String = "[\\\\(a|b|f|n|r|t|v|\\\\|'|\")]" | ||
171 | const TOKEN_OCT_ESC : String = "[\\\\" + LEX.DIGIT_OCT + "{3}]" | ||
172 | const TOKEN_HEX_ESC : String = "[\\\\(x|X)" + LEX.DIGIT_HEX + "{2}]" | ||
173 | const TOKEN_CHAR_EXCLUDE : String = "[^\\0\\n\\\\]" | ||
174 | const TOKEN_CHAR_VALUE : String = "(" + TOKEN_HEX_ESC + "|" + TOKEN_OCT_ESC + "|" + TOKEN_CHAR_ESC + "|" + TOKEN_CHAR_EXCLUDE + ")" | ||
175 | const TOKEN_STRING_SINGLE : String = "('" + TOKEN_CHAR_VALUE + "*?')" | ||
176 | const TOKEN_STRING_DOUBLE : String = "(\"" + TOKEN_CHAR_VALUE + "*?\")" | ||
177 | const TOKEN_COMMENT_SINGLE : String = "((//[^\\n\\r]*[^\\s])|//)" | ||
178 | const TOKEN_COMMENT_MULTI : String = "/\\*(.|[\\n\\r])*?\\*/" | ||
179 | |||
180 | const TOKEN_SECOND_MESSAGE : String = "^message$" | ||
181 | const TOKEN_SECOND_SIMPLE_DATA_TYPE : String = "^(double|float|int32|int64|uint32|uint64|sint32|sint64|fixed32|fixed64|sfixed32|sfixed64|bool|string|bytes)$" | ||
182 | const TOKEN_SECOND_ENUM : String = "^enum$" | ||
183 | const TOKEN_SECOND_MAP : String = "^map$" | ||
184 | const TOKEN_SECOND_ONEOF : String = "^oneof$" | ||
185 | const TOKEN_SECOND_LITERAL_BOOL : String = "^(true|false)$" | ||
186 | const TOKEN_SECOND_SYNTAX : String = "^syntax$" | ||
187 | const TOKEN_SECOND_IMPORT : String = "^import$" | ||
188 | const TOKEN_SECOND_PACKAGE : String = "^package$" | ||
189 | const TOKEN_SECOND_OPTION : String = "^option$" | ||
190 | const TOKEN_SECOND_SERVICE : String = "^service$" | ||
191 | const TOKEN_SECOND_RESERVED : String = "^reserved$" | ||
192 | const TOKEN_SECOND_IMPORT_QUALIFICATION : String = "^(weak|public)$" | ||
193 | const TOKEN_SECOND_FIELD_QUALIFICATION : String = "^(repeated|required|optional)$" | ||
194 | const TOKEN_SECOND_ENUM_OPTION : String = "^allow_alias$" | ||
195 | const TOKEN_SECOND_QUALIFICATION : String = "^(custom_option|extensions)$" | ||
196 | const TOKEN_SECOND_FIELD_OPTION : String = "^packed$" | ||
197 | |||
198 | class TokenEntrance: | ||
199 | func _init(i : int, b : int, e : int, t : String): | ||
200 | position = TokenPosition.new(b, e) | ||
201 | text = t | ||
202 | id = i | ||
203 | var position : TokenPosition | ||
204 | var text : String | ||
205 | var id : int | ||
206 | |||
207 | enum RANGE_STATE { | ||
208 | INCLUDE = 0, | ||
209 | EXCLUDE_LEFT = 1, | ||
210 | EXCLUDE_RIGHT = 2, | ||
211 | OVERLAY = 3, | ||
212 | EQUAL = 4, | ||
213 | ENTERS = 5 | ||
214 | } | ||
215 | |||
216 | class TokenRange: | ||
217 | func _init(b : int, e : int, s): | ||
218 | position = TokenPosition.new(b, e) | ||
219 | state = s | ||
220 | var position : TokenPosition | ||
221 | var state | ||
222 | |||
223 | class Token: | ||
224 | var _regex : RegEx | ||
225 | var _entrance : TokenEntrance = null | ||
226 | var _entrances : Array = [] | ||
227 | var _entrance_index : int = 0 | ||
228 | var _id : int | ||
229 | var _ignore : bool | ||
230 | var _clarification : String | ||
231 | |||
232 | func _init(id : int, clarification : String, regex_str : String, ignore = false): | ||
233 | _id = id | ||
234 | _regex = RegEx.new() | ||
235 | _regex.compile(regex_str) | ||
236 | _clarification = clarification | ||
237 | _ignore = ignore | ||
238 | |||
239 | func find(text : String, start : int) -> TokenEntrance: | ||
240 | _entrance = null | ||
241 | if !_regex.is_valid(): | ||
242 | return null | ||
243 | var match_result : RegExMatch = _regex.search(text, start) | ||
244 | if match_result != null: | ||
245 | var capture | ||
246 | capture = match_result.get_string(0) | ||
247 | if capture.is_empty(): | ||
248 | return null | ||
249 | _entrance = TokenEntrance.new(_id, match_result.get_start(0), capture.length() - 1 + match_result.get_start(0), capture) | ||
250 | return _entrance | ||
251 | |||
252 | func find_all(text : String) -> Array: | ||
253 | var pos : int = 0 | ||
254 | clear() | ||
255 | while find(text, pos) != null: | ||
256 | _entrances.append(_entrance) | ||
257 | pos = _entrance.position.end + 1 | ||
258 | return _entrances | ||
259 | |||
260 | func add_entrance(entrance) -> void: | ||
261 | _entrances.append(entrance) | ||
262 | |||
263 | func clear() -> void: | ||
264 | _entrance = null | ||
265 | _entrances = [] | ||
266 | _entrance_index = 0 | ||
267 | |||
268 | func get_entrances() -> Array: | ||
269 | return _entrances | ||
270 | |||
271 | func remove_entrance(index) -> void: | ||
272 | if index < _entrances.size(): | ||
273 | _entrances.remove_at(index) | ||
274 | |||
275 | func get_index() -> int: | ||
276 | return _entrance_index | ||
277 | |||
278 | func set_index(index : int) -> void: | ||
279 | if index < _entrances.size(): | ||
280 | _entrance_index = index | ||
281 | else: | ||
282 | _entrance_index = 0 | ||
283 | |||
284 | func is_ignore() -> bool: | ||
285 | return _ignore | ||
286 | |||
287 | func get_clarification() -> String: | ||
288 | return _clarification | ||
289 | |||
290 | class TokenResult: | ||
291 | var tokens : Array = [] | ||
292 | var errors : Array = [] | ||
293 | |||
294 | enum TOKEN_ID { | ||
295 | UNDEFINED = -1, | ||
296 | IDENT = 0, | ||
297 | FULL_IDENT = 1, | ||
298 | BRACKET_ROUND_LEFT = 2, | ||
299 | BRACKET_ROUND_RIGHT = 3, | ||
300 | BRACKET_CURLY_LEFT = 4, | ||
301 | BRACKET_CURLY_RIGHT = 5, | ||
302 | BRACKET_SQUARE_LEFT = 6, | ||
303 | BRACKET_SQUARE_RIGHT = 7, | ||
304 | BRACKET_ANGLE_LEFT = 8, | ||
305 | BRACKET_ANGLE_RIGHT = 9, | ||
306 | SEMICOLON = 10, | ||
307 | EUQAL = 11, | ||
308 | SIGN = 12, | ||
309 | INT = 13, | ||
310 | FLOAT = 14, | ||
311 | SPACE = 15, | ||
312 | COMMA = 16, | ||
313 | STRING_SINGLE = 17, | ||
314 | STRING_DOUBLE = 18, | ||
315 | COMMENT_SINGLE = 19, | ||
316 | COMMENT_MULTI = 20, | ||
317 | |||
318 | MESSAGE = 21, | ||
319 | SIMPLE_DATA_TYPE = 22, | ||
320 | ENUM = 23, | ||
321 | MAP = 24, | ||
322 | ONEOF = 25, | ||
323 | LITERAL_BOOL = 26, | ||
324 | SYNTAX = 27, | ||
325 | IMPORT = 28, | ||
326 | PACKAGE = 29, | ||
327 | OPTION = 30, | ||
328 | SERVICE = 31, | ||
329 | RESERVED = 32, | ||
330 | IMPORT_QUALIFICATION = 33, | ||
331 | FIELD_QUALIFICATION = 34, | ||
332 | ENUM_OPTION = 35, | ||
333 | QUALIFICATION = 36, | ||
334 | FIELD_OPTION = 37, | ||
335 | |||
336 | STRING = 38 | ||
337 | } | ||
338 | |||
339 | var TOKEN = { | ||
340 | TOKEN_ID.IDENT: Token.new(TOKEN_ID.IDENT, "Identifier", TOKEN_IDENT), | ||
341 | TOKEN_ID.FULL_IDENT: Token.new(TOKEN_ID.FULL_IDENT, "Full identifier", TOKEN_FULL_IDENT), | ||
342 | TOKEN_ID.BRACKET_ROUND_LEFT: Token.new(TOKEN_ID.BRACKET_ROUND_LEFT, "(", TOKEN_BRACKET_ROUND_LEFT), | ||
343 | TOKEN_ID.BRACKET_ROUND_RIGHT: Token.new(TOKEN_ID.BRACKET_ROUND_RIGHT, ")", TOKEN_BRACKET_ROUND_RIGHT), | ||
344 | TOKEN_ID.BRACKET_CURLY_LEFT: Token.new(TOKEN_ID.BRACKET_CURLY_LEFT, "{", TOKEN_BRACKET_CURLY_LEFT), | ||
345 | TOKEN_ID.BRACKET_CURLY_RIGHT: Token.new(TOKEN_ID.BRACKET_CURLY_RIGHT, "}", TOKEN_BRACKET_CURLY_RIGHT), | ||
346 | TOKEN_ID.BRACKET_SQUARE_LEFT: Token.new(TOKEN_ID.BRACKET_SQUARE_LEFT, "[", TOKEN_BRACKET_SQUARE_LEFT), | ||
347 | TOKEN_ID.BRACKET_SQUARE_RIGHT: Token.new(TOKEN_ID.BRACKET_SQUARE_RIGHT, "]", TOKEN_BRACKET_SQUARE_RIGHT), | ||
348 | TOKEN_ID.BRACKET_ANGLE_LEFT: Token.new(TOKEN_ID.BRACKET_ANGLE_LEFT, "<", TOKEN_BRACKET_ANGLE_LEFT), | ||
349 | TOKEN_ID.BRACKET_ANGLE_RIGHT: Token.new(TOKEN_ID.BRACKET_ANGLE_RIGHT, ">", TOKEN_BRACKET_ANGLE_RIGHT), | ||
350 | TOKEN_ID.SEMICOLON: Token.new(TOKEN_ID.SEMICOLON, ";", TOKEN_SEMICOLON), | ||
351 | TOKEN_ID.EUQAL: Token.new(TOKEN_ID.EUQAL, "=", TOKEN_EUQAL), | ||
352 | TOKEN_ID.INT: Token.new(TOKEN_ID.INT, "Integer", TOKEN_LITERAL_INT), | ||
353 | TOKEN_ID.FLOAT: Token.new(TOKEN_ID.FLOAT, "Float", TOKEN_LITERAL_FLOAT), | ||
354 | TOKEN_ID.SPACE: Token.new(TOKEN_ID.SPACE, "Space", TOKEN_SPACE), | ||
355 | TOKEN_ID.COMMA: Token.new(TOKEN_ID.COMMA, ",", TOKEN_COMMA), | ||
356 | TOKEN_ID.STRING_SINGLE: Token.new(TOKEN_ID.STRING_SINGLE, "'String'", TOKEN_STRING_SINGLE), | ||
357 | TOKEN_ID.STRING_DOUBLE: Token.new(TOKEN_ID.STRING_DOUBLE, "\"String\"", TOKEN_STRING_DOUBLE), | ||
358 | TOKEN_ID.COMMENT_SINGLE: Token.new(TOKEN_ID.COMMENT_SINGLE, "//Comment", TOKEN_COMMENT_SINGLE), | ||
359 | TOKEN_ID.COMMENT_MULTI: Token.new(TOKEN_ID.COMMENT_MULTI, "/*Comment*/", TOKEN_COMMENT_MULTI), | ||
360 | |||
361 | TOKEN_ID.MESSAGE: Token.new(TOKEN_ID.MESSAGE, "Message", TOKEN_SECOND_MESSAGE, true), | ||
362 | TOKEN_ID.SIMPLE_DATA_TYPE: Token.new(TOKEN_ID.SIMPLE_DATA_TYPE, "Data type", TOKEN_SECOND_SIMPLE_DATA_TYPE, true), | ||
363 | TOKEN_ID.ENUM: Token.new(TOKEN_ID.ENUM, "Enum", TOKEN_SECOND_ENUM, true), | ||
364 | TOKEN_ID.MAP: Token.new(TOKEN_ID.MAP, "Map", TOKEN_SECOND_MAP, true), | ||
365 | TOKEN_ID.ONEOF: Token.new(TOKEN_ID.ONEOF, "OneOf", TOKEN_SECOND_ONEOF, true), | ||
366 | TOKEN_ID.LITERAL_BOOL: Token.new(TOKEN_ID.LITERAL_BOOL, "Bool literal", TOKEN_SECOND_LITERAL_BOOL, true), | ||
367 | TOKEN_ID.SYNTAX: Token.new(TOKEN_ID.SYNTAX, "Syntax", TOKEN_SECOND_SYNTAX, true), | ||
368 | TOKEN_ID.IMPORT: Token.new(TOKEN_ID.IMPORT, "Import", TOKEN_SECOND_IMPORT, true), | ||
369 | TOKEN_ID.PACKAGE: Token.new(TOKEN_ID.PACKAGE, "Package", TOKEN_SECOND_PACKAGE, true), | ||
370 | TOKEN_ID.OPTION: Token.new(TOKEN_ID.OPTION, "Option", TOKEN_SECOND_OPTION, true), | ||
371 | TOKEN_ID.SERVICE: Token.new(TOKEN_ID.SERVICE, "Service", TOKEN_SECOND_SERVICE, true), | ||
372 | TOKEN_ID.RESERVED: Token.new(TOKEN_ID.RESERVED, "Reserved", TOKEN_SECOND_RESERVED, true), | ||
373 | TOKEN_ID.IMPORT_QUALIFICATION: Token.new(TOKEN_ID.IMPORT_QUALIFICATION, "Import qualification", TOKEN_SECOND_IMPORT_QUALIFICATION, true), | ||
374 | TOKEN_ID.FIELD_QUALIFICATION: Token.new(TOKEN_ID.FIELD_QUALIFICATION, "Field qualification", TOKEN_SECOND_FIELD_QUALIFICATION, true), | ||
375 | TOKEN_ID.ENUM_OPTION: Token.new(TOKEN_ID.ENUM_OPTION, "Enum option", TOKEN_SECOND_ENUM_OPTION, true), | ||
376 | TOKEN_ID.QUALIFICATION: Token.new(TOKEN_ID.QUALIFICATION, "Qualification", TOKEN_SECOND_QUALIFICATION, true), | ||
377 | TOKEN_ID.FIELD_OPTION: Token.new(TOKEN_ID.FIELD_OPTION, "Field option", TOKEN_SECOND_FIELD_OPTION, true), | ||
378 | |||
379 | TOKEN_ID.STRING: Token.new(TOKEN_ID.STRING, "String", "", true) | ||
380 | } | ||
381 | |||
382 | static func check_range(main : TokenEntrance, current : TokenEntrance) -> TokenRange: | ||
383 | if main.position.begin > current.position.begin: | ||
384 | if main.position.end > current.position.end: | ||
385 | if main.position.begin >= current.position.end: | ||
386 | return TokenRange.new(current.position.begin, current.position.end, RANGE_STATE.EXCLUDE_LEFT) | ||
387 | else: | ||
388 | return TokenRange.new(main.position.begin, current.position.end, RANGE_STATE.OVERLAY) | ||
389 | else: | ||
390 | return TokenRange.new(current.position.begin, current.position.end, RANGE_STATE.ENTERS) | ||
391 | elif main.position.begin < current.position.begin: | ||
392 | if main.position.end >= current.position.end: | ||
393 | return TokenRange.new(main.position.begin, main.position.end, RANGE_STATE.INCLUDE) | ||
394 | else: | ||
395 | if main.position.end < current.position.begin: | ||
396 | return TokenRange.new(main.position.begin, main.position.end, RANGE_STATE.EXCLUDE_RIGHT) | ||
397 | else: | ||
398 | return TokenRange.new(main.position.begin, current.position.end, RANGE_STATE.OVERLAY) | ||
399 | else: | ||
400 | if main.position.end == current.position.end: | ||
401 | return TokenRange.new(main.position.begin, main.position.end, RANGE_STATE.EQUAL) | ||
402 | elif main.position.end > current.position.end: | ||
403 | return TokenRange.new(main.position.begin, main.position.end, RANGE_STATE.INCLUDE) | ||
404 | else: | ||
405 | return TokenRange.new(current.position.begin, current.position.end, RANGE_STATE.ENTERS) | ||
406 | |||
407 | func tokenizer() -> TokenResult: | ||
408 | for k in TOKEN: | ||
409 | if !TOKEN[k].is_ignore(): | ||
410 | TOKEN[k].find_all(document.text) | ||
411 | var second_tokens : Array = [] | ||
412 | second_tokens.append(TOKEN[TOKEN_ID.MESSAGE]) | ||
413 | second_tokens.append(TOKEN[TOKEN_ID.SIMPLE_DATA_TYPE]) | ||
414 | second_tokens.append(TOKEN[TOKEN_ID.ENUM]) | ||
415 | second_tokens.append(TOKEN[TOKEN_ID.MAP]) | ||
416 | second_tokens.append(TOKEN[TOKEN_ID.ONEOF]) | ||
417 | second_tokens.append(TOKEN[TOKEN_ID.LITERAL_BOOL]) | ||
418 | second_tokens.append(TOKEN[TOKEN_ID.SYNTAX]) | ||
419 | second_tokens.append(TOKEN[TOKEN_ID.IMPORT]) | ||
420 | second_tokens.append(TOKEN[TOKEN_ID.PACKAGE]) | ||
421 | second_tokens.append(TOKEN[TOKEN_ID.OPTION]) | ||
422 | second_tokens.append(TOKEN[TOKEN_ID.SERVICE]) | ||
423 | second_tokens.append(TOKEN[TOKEN_ID.RESERVED]) | ||
424 | second_tokens.append(TOKEN[TOKEN_ID.IMPORT_QUALIFICATION]) | ||
425 | second_tokens.append(TOKEN[TOKEN_ID.FIELD_QUALIFICATION]) | ||
426 | second_tokens.append(TOKEN[TOKEN_ID.ENUM_OPTION]) | ||
427 | second_tokens.append(TOKEN[TOKEN_ID.QUALIFICATION]) | ||
428 | second_tokens.append(TOKEN[TOKEN_ID.FIELD_OPTION]) | ||
429 | |||
430 | var ident_token : Token = TOKEN[TOKEN_ID.IDENT] | ||
431 | for sec_token in second_tokens: | ||
432 | var remove_indexes : Array = [] | ||
433 | for i in range(ident_token.get_entrances().size()): | ||
434 | var entrance : TokenEntrance = sec_token.find(ident_token.get_entrances()[i].text, 0) | ||
435 | if entrance != null: | ||
436 | entrance.position.begin = ident_token.get_entrances()[i].position.begin | ||
437 | entrance.position.end = ident_token.get_entrances()[i].position.end | ||
438 | sec_token.add_entrance(entrance) | ||
439 | remove_indexes.append(i) | ||
440 | for i in range(remove_indexes.size()): | ||
441 | ident_token.remove_entrance(remove_indexes[i] - i) | ||
442 | for v in TOKEN[TOKEN_ID.STRING_DOUBLE].get_entrances(): | ||
443 | v.id = TOKEN_ID.STRING | ||
444 | TOKEN[TOKEN_ID.STRING].add_entrance(v) | ||
445 | TOKEN[TOKEN_ID.STRING_DOUBLE].clear() | ||
446 | for v in TOKEN[TOKEN_ID.STRING_SINGLE].get_entrances(): | ||
447 | v.id = TOKEN_ID.STRING | ||
448 | TOKEN[TOKEN_ID.STRING].add_entrance(v) | ||
449 | TOKEN[TOKEN_ID.STRING_SINGLE].clear() | ||
450 | var main_token : TokenEntrance | ||
451 | var cur_token : TokenEntrance | ||
452 | var main_index : int = -1 | ||
453 | var token_index_flag : bool = false | ||
454 | var result : TokenResult = TokenResult.new() | ||
455 | var check : TokenRange | ||
456 | var end : bool = false | ||
457 | var all : bool = false | ||
458 | var repeat : bool = false | ||
459 | while true: | ||
460 | all = true | ||
461 | for k in TOKEN: | ||
462 | if main_index == k: | ||
463 | continue | ||
464 | repeat = false | ||
465 | while TOKEN[k].get_entrances().size() > 0: | ||
466 | all = false | ||
467 | if !token_index_flag: | ||
468 | main_index = k | ||
469 | main_token = TOKEN[main_index].get_entrances()[0] | ||
470 | token_index_flag = true | ||
471 | break | ||
472 | else: | ||
473 | cur_token = TOKEN[k].get_entrances()[0] | ||
474 | check = check_range(main_token, cur_token) | ||
475 | if check.state == RANGE_STATE.INCLUDE: | ||
476 | TOKEN[k].remove_entrance(0) | ||
477 | end = true | ||
478 | elif check.state == RANGE_STATE.EXCLUDE_LEFT: | ||
479 | main_token = cur_token | ||
480 | main_index = k | ||
481 | end = false | ||
482 | repeat = true | ||
483 | break | ||
484 | elif check.state == RANGE_STATE.EXCLUDE_RIGHT: | ||
485 | end = true | ||
486 | break | ||
487 | elif check.state == RANGE_STATE.OVERLAY || check.state == RANGE_STATE.EQUAL: | ||
488 | result.errors.append(check) | ||
489 | TOKEN[main_index].remove_entrance(0) | ||
490 | TOKEN[k].remove_entrance(0) | ||
491 | token_index_flag = false | ||
492 | end = false | ||
493 | repeat = true | ||
494 | break | ||
495 | elif check.state == RANGE_STATE.ENTERS: | ||
496 | TOKEN[main_index].remove_entrance(0) | ||
497 | main_token = cur_token | ||
498 | main_index = k | ||
499 | end = false | ||
500 | repeat = true | ||
501 | break | ||
502 | if repeat: | ||
503 | break | ||
504 | if end: | ||
505 | if TOKEN[main_index].get_entrances().size() > 0: | ||
506 | result.tokens.append(main_token) | ||
507 | TOKEN[main_index].remove_entrance(0) | ||
508 | token_index_flag = false | ||
509 | if all: | ||
510 | break | ||
511 | return result | ||
512 | |||
513 | static func check_tokens_integrity(tokens : Array, end : int) -> Array: | ||
514 | var cur_index : int = 0 | ||
515 | var result : Array = [] | ||
516 | for v in tokens: | ||
517 | if v.position.begin > cur_index: | ||
518 | result.append(TokenPosition.new(cur_index, v.position.begin)) | ||
519 | cur_index = v.position.end + 1 | ||
520 | if cur_index < end: | ||
521 | result.append(TokenPosition.new(cur_index, end)) | ||
522 | return result | ||
523 | |||
524 | static func comment_space_processing(tokens : Array) -> void: | ||
525 | var remove_indexes : Array = [] | ||
526 | for i in range(tokens.size()): | ||
527 | if tokens[i].id == TOKEN_ID.COMMENT_SINGLE || tokens[i].id == TOKEN_ID.COMMENT_MULTI: | ||
528 | tokens[i].id = TOKEN_ID.SPACE | ||
529 | var space_index : int = -1 | ||
530 | for i in range(tokens.size()): | ||
531 | if tokens[i].id == TOKEN_ID.SPACE: | ||
532 | if space_index >= 0: | ||
533 | tokens[space_index].position.end = tokens[i].position.end | ||
534 | tokens[space_index].text = tokens[space_index].text + tokens[i].text | ||
535 | remove_indexes.append(i) | ||
536 | else: | ||
537 | space_index = i | ||
538 | else: | ||
539 | space_index = -1 | ||
540 | for i in range(remove_indexes.size()): | ||
541 | tokens.remove_at(remove_indexes[i] - i) | ||
542 | |||
543 | #Analysis rule | ||
544 | enum AR { | ||
545 | MAYBE = 1, | ||
546 | MUST_ONE = 2, | ||
547 | ANY = 3, | ||
548 | OR = 4, | ||
549 | MAYBE_BEGIN = 5, | ||
550 | MAYBE_END = 6, | ||
551 | ANY_BEGIN = 7, | ||
552 | ANY_END = 8 | ||
553 | } | ||
554 | |||
555 | #Space rule (space after token) | ||
556 | enum SP { | ||
557 | MAYBE = 1, | ||
558 | MUST = 2, | ||
559 | NO = 3 | ||
560 | } | ||
561 | |||
562 | #Analysis Syntax Description | ||
563 | class ASD: | ||
564 | func _init(t, s : int = SP.MAYBE, r : int = AR.MUST_ONE, i : bool = false): | ||
565 | token = t | ||
566 | space = s | ||
567 | rule = r | ||
568 | importance = i | ||
569 | var token | ||
570 | var space : int | ||
571 | var rule : int | ||
572 | var importance : bool | ||
573 | |||
574 | var TEMPLATE_SYNTAX : Array = [ | ||
575 | Callable(self, "desc_syntax"), | ||
576 | ASD.new(TOKEN_ID.SYNTAX), | ||
577 | ASD.new(TOKEN_ID.EUQAL), | ||
578 | ASD.new(TOKEN_ID.STRING, SP.MAYBE, AR.MUST_ONE, true), | ||
579 | ASD.new(TOKEN_ID.SEMICOLON) | ||
580 | ] | ||
581 | |||
582 | var TEMPLATE_IMPORT : Array = [ | ||
583 | Callable(self, "desc_import"), | ||
584 | ASD.new(TOKEN_ID.IMPORT, SP.MUST), | ||
585 | ASD.new(TOKEN_ID.IMPORT_QUALIFICATION, SP.MUST, AR.MAYBE, true), | ||
586 | ASD.new(TOKEN_ID.STRING, SP.MAYBE, AR.MUST_ONE, true), | ||
587 | ASD.new(TOKEN_ID.SEMICOLON) | ||
588 | ] | ||
589 | |||
590 | var TEMPLATE_PACKAGE : Array = [ | ||
591 | Callable(self, "desc_package"), | ||
592 | ASD.new(TOKEN_ID.PACKAGE, SP.MUST), | ||
593 | ASD.new([TOKEN_ID.IDENT, TOKEN_ID.FULL_IDENT], SP.MAYBE, AR.OR, true), | ||
594 | ASD.new(TOKEN_ID.SEMICOLON) | ||
595 | ] | ||
596 | |||
597 | var TEMPLATE_OPTION : Array = [ | ||
598 | Callable(self, "desc_option"), | ||
599 | ASD.new(TOKEN_ID.OPTION, SP.MUST), | ||
600 | ASD.new([TOKEN_ID.IDENT, TOKEN_ID.FULL_IDENT], SP.MAYBE, AR.OR, true), | ||
601 | ASD.new(TOKEN_ID.EUQAL), | ||
602 | ASD.new([TOKEN_ID.STRING, TOKEN_ID.INT, TOKEN_ID.FLOAT, TOKEN_ID.LITERAL_BOOL], SP.MAYBE, AR.OR, true), | ||
603 | ASD.new(TOKEN_ID.SEMICOLON) | ||
604 | ] | ||
605 | |||
606 | var TEMPLATE_FIELD : Array = [ | ||
607 | Callable(self, "desc_field"), | ||
608 | ASD.new(TOKEN_ID.FIELD_QUALIFICATION, SP.MUST, AR.MAYBE, true), | ||
609 | ASD.new([TOKEN_ID.SIMPLE_DATA_TYPE, TOKEN_ID.IDENT, TOKEN_ID.FULL_IDENT], SP.MAYBE, AR.OR, true), | ||
610 | ASD.new(TOKEN_ID.IDENT, SP.MAYBE, AR.MUST_ONE, true), | ||
611 | ASD.new(TOKEN_ID.EUQAL), | ||
612 | ASD.new(TOKEN_ID.INT, SP.MAYBE, AR.MUST_ONE, true), | ||
613 | ASD.new(TOKEN_ID.BRACKET_SQUARE_LEFT, SP.MAYBE, AR.MAYBE_BEGIN), | ||
614 | ASD.new(TOKEN_ID.FIELD_OPTION, SP.MAYBE, AR.MUST_ONE, true), | ||
615 | ASD.new(TOKEN_ID.EUQAL), | ||
616 | ASD.new(TOKEN_ID.LITERAL_BOOL, SP.MAYBE, AR.MUST_ONE, true), | ||
617 | ASD.new(TOKEN_ID.BRACKET_SQUARE_RIGHT, SP.MAYBE, AR.MAYBE_END), | ||
618 | ASD.new(TOKEN_ID.SEMICOLON) | ||
619 | ] | ||
620 | |||
621 | var TEMPLATE_FIELD_ONEOF : Array = TEMPLATE_FIELD | ||
622 | |||
623 | var TEMPLATE_MAP_FIELD : Array = [ | ||
624 | Callable(self, "desc_map_field"), | ||
625 | ASD.new(TOKEN_ID.MAP), | ||
626 | ASD.new(TOKEN_ID.BRACKET_ANGLE_LEFT), | ||
627 | ASD.new(TOKEN_ID.SIMPLE_DATA_TYPE, SP.MAYBE, AR.MUST_ONE, true), | ||
628 | ASD.new(TOKEN_ID.COMMA), | ||
629 | ASD.new([TOKEN_ID.SIMPLE_DATA_TYPE, TOKEN_ID.IDENT, TOKEN_ID.FULL_IDENT], SP.MAYBE, AR.OR, true), | ||
630 | ASD.new(TOKEN_ID.BRACKET_ANGLE_RIGHT, SP.MUST), | ||
631 | ASD.new(TOKEN_ID.IDENT, SP.MAYBE, AR.MUST_ONE, true), | ||
632 | ASD.new(TOKEN_ID.EUQAL), | ||
633 | ASD.new(TOKEN_ID.INT, SP.MAYBE, AR.MUST_ONE, true), | ||
634 | ASD.new(TOKEN_ID.BRACKET_SQUARE_LEFT, SP.MAYBE, AR.MAYBE_BEGIN), | ||
635 | ASD.new(TOKEN_ID.FIELD_OPTION, SP.MAYBE, AR.MUST_ONE, true), | ||
636 | ASD.new(TOKEN_ID.EUQAL), | ||
637 | ASD.new(TOKEN_ID.LITERAL_BOOL, SP.MAYBE, AR.MUST_ONE, true), | ||
638 | ASD.new(TOKEN_ID.BRACKET_SQUARE_RIGHT, SP.MAYBE, AR.MAYBE_END), | ||
639 | ASD.new(TOKEN_ID.SEMICOLON) | ||
640 | ] | ||
641 | |||
642 | var TEMPLATE_MAP_FIELD_ONEOF : Array = TEMPLATE_MAP_FIELD | ||
643 | |||
644 | var TEMPLATE_ENUM : Array = [ | ||
645 | Callable(self, "desc_enum"), | ||
646 | ASD.new(TOKEN_ID.ENUM, SP.MUST), | ||
647 | ASD.new(TOKEN_ID.IDENT, SP.MAYBE, AR.MUST_ONE, true), | ||
648 | ASD.new(TOKEN_ID.BRACKET_CURLY_LEFT), | ||
649 | ASD.new(TOKEN_ID.OPTION, SP.MUST, AR.MAYBE_BEGIN), | ||
650 | ASD.new(TOKEN_ID.ENUM_OPTION, SP.MAYBE, AR.MUST_ONE, true), | ||
651 | ASD.new(TOKEN_ID.EUQAL), | ||
652 | ASD.new(TOKEN_ID.LITERAL_BOOL, SP.MAYBE, AR.MUST_ONE, true), | ||
653 | ASD.new(TOKEN_ID.SEMICOLON, SP.MAYBE, AR.MAYBE_END), | ||
654 | ASD.new(TOKEN_ID.IDENT, SP.MAYBE, AR.ANY_BEGIN, true), | ||
655 | ASD.new(TOKEN_ID.EUQAL), | ||
656 | ASD.new(TOKEN_ID.INT, SP.MAYBE, AR.MUST_ONE, true), | ||
657 | ASD.new(TOKEN_ID.SEMICOLON, SP.MAYBE, AR.ANY_END), | ||
658 | ASD.new(TOKEN_ID.BRACKET_CURLY_RIGHT) | ||
659 | ] | ||
660 | |||
661 | var TEMPLATE_MESSAGE_HEAD : Array = [ | ||
662 | Callable(self, "desc_message_head"), | ||
663 | ASD.new(TOKEN_ID.MESSAGE, SP.MUST), | ||
664 | ASD.new(TOKEN_ID.IDENT, SP.MAYBE, AR.MUST_ONE, true), | ||
665 | ASD.new(TOKEN_ID.BRACKET_CURLY_LEFT) | ||
666 | ] | ||
667 | |||
668 | var TEMPLATE_MESSAGE_TAIL : Array = [ | ||
669 | Callable(self, "desc_message_tail"), | ||
670 | ASD.new(TOKEN_ID.BRACKET_CURLY_RIGHT) | ||
671 | ] | ||
672 | |||
673 | var TEMPLATE_ONEOF_HEAD : Array = [ | ||
674 | Callable(self, "desc_oneof_head"), | ||
675 | ASD.new(TOKEN_ID.ONEOF, SP.MUST), | ||
676 | ASD.new(TOKEN_ID.IDENT, SP.MAYBE, AR.MUST_ONE, true), | ||
677 | ASD.new(TOKEN_ID.BRACKET_CURLY_LEFT), | ||
678 | ] | ||
679 | |||
680 | var TEMPLATE_ONEOF_TAIL : Array = [ | ||
681 | Callable(self, "desc_oneof_tail"), | ||
682 | ASD.new(TOKEN_ID.BRACKET_CURLY_RIGHT) | ||
683 | ] | ||
684 | |||
685 | var TEMPLATE_BEGIN : Array = [ | ||
686 | null, | ||
687 | ASD.new(TOKEN_ID.SPACE, SP.NO, AR.MAYBE) | ||
688 | ] | ||
689 | |||
690 | var TEMPLATE_END : Array = [ | ||
691 | null | ||
692 | ] | ||
693 | |||
694 | func get_token_id(tokens : Array, index : int) -> int: | ||
695 | if index < tokens.size(): | ||
696 | return tokens[index].id | ||
697 | return TOKEN_ID.UNDEFINED | ||
698 | |||
699 | enum COMPARE_STATE { | ||
700 | DONE = 0, | ||
701 | MISMATCH = 1, | ||
702 | INCOMPLETE = 2, | ||
703 | ERROR_VALUE = 3 | ||
704 | } | ||
705 | |||
706 | class TokenCompare: | ||
707 | func _init(s : int, i : int, d : String = ""): | ||
708 | state = s | ||
709 | index = i | ||
710 | description = d | ||
711 | var state : int | ||
712 | var index : int | ||
713 | var description : String | ||
714 | |||
715 | func check_space(tokens : Array, index : int, space) -> int: | ||
716 | if get_token_id(tokens, index) == TOKEN_ID.SPACE: | ||
717 | if space == SP.MAYBE: | ||
718 | return 1 | ||
719 | elif space == SP.MUST: | ||
720 | return 1 | ||
721 | elif space == SP.NO: | ||
722 | return -1 | ||
723 | else: | ||
724 | if space == SP.MUST: | ||
725 | return -2 | ||
726 | return 0 | ||
727 | |||
728 | class IndexedToken: | ||
729 | func _init(t : TokenEntrance, i : int): | ||
730 | token = t | ||
731 | index = i | ||
732 | var token : TokenEntrance | ||
733 | var index : int | ||
734 | |||
735 | func token_importance_checkadd(template : ASD, token : TokenEntrance, index : int, importance : Array) -> void: | ||
736 | if template.importance: | ||
737 | importance.append(IndexedToken.new(token, index)) | ||
738 | |||
739 | class CompareSettings: | ||
740 | func _init(ci : int, n : int, pi : int, pn : String = ""): | ||
741 | construction_index = ci | ||
742 | nesting = n | ||
743 | parent_index = pi | ||
744 | parent_name = pn | ||
745 | |||
746 | var construction_index : int | ||
747 | var nesting : int | ||
748 | var parent_index : int | ||
749 | var parent_name : String | ||
750 | |||
751 | func description_compare(template : Array, tokens : Array, index : int, settings : CompareSettings) -> TokenCompare: | ||
752 | var j : int = index | ||
753 | var space : int | ||
754 | var rule : int | ||
755 | var rule_flag : bool | ||
756 | var cont : bool | ||
757 | var check : int | ||
758 | var maybe_group_skip : bool = false | ||
759 | var any_group_index : int = -1 | ||
760 | var any_end_group_index : int = -1 | ||
761 | var i : int = 0 | ||
762 | var importance : Array = [] | ||
763 | while true: | ||
764 | i += 1 | ||
765 | if i >= template.size(): | ||
766 | break | ||
767 | rule_flag = false | ||
768 | cont = false | ||
769 | rule = template[i].rule | ||
770 | space = template[i].space | ||
771 | if rule == AR.MAYBE_END && maybe_group_skip: | ||
772 | maybe_group_skip = false | ||
773 | continue | ||
774 | if maybe_group_skip: | ||
775 | continue | ||
776 | if rule == AR.MAYBE: | ||
777 | if template[i].token == get_token_id(tokens, j): | ||
778 | token_importance_checkadd(template[i], tokens[j], j, importance) | ||
779 | rule_flag = true | ||
780 | else: | ||
781 | continue | ||
782 | elif rule == AR.MUST_ONE || rule == AR.MAYBE_END || rule == AR.ANY_END: | ||
783 | if template[i].token == get_token_id(tokens, j): | ||
784 | token_importance_checkadd(template[i], tokens[j], j, importance) | ||
785 | rule_flag = true | ||
786 | elif rule == AR.ANY: | ||
787 | var find_any : bool = false | ||
788 | while true: | ||
789 | if template[i].token == get_token_id(tokens, j): | ||
790 | token_importance_checkadd(template[i], tokens[j], j, importance) | ||
791 | find_any = true | ||
792 | j += 1 | ||
793 | check = check_space(tokens, j, space) | ||
794 | if check < 0: | ||
795 | return TokenCompare.new(COMPARE_STATE.INCOMPLETE, j) | ||
796 | else: | ||
797 | j += check | ||
798 | else: | ||
799 | if find_any: | ||
800 | cont = true | ||
801 | break | ||
802 | elif rule == AR.OR: | ||
803 | var or_tokens = template[i].token | ||
804 | for v in or_tokens: | ||
805 | if v == get_token_id(tokens, j): | ||
806 | token_importance_checkadd(template[i], tokens[j], j, importance) | ||
807 | j += 1 | ||
808 | check = check_space(tokens, j, space) | ||
809 | if check < 0: | ||
810 | return TokenCompare.new(COMPARE_STATE.INCOMPLETE, j) | ||
811 | else: | ||
812 | j += check | ||
813 | cont = true | ||
814 | break | ||
815 | elif rule == AR.MAYBE_BEGIN: | ||
816 | if template[i].token == get_token_id(tokens, j): | ||
817 | token_importance_checkadd(template[i], tokens[j], j, importance) | ||
818 | rule_flag = true | ||
819 | else: | ||
820 | maybe_group_skip = true | ||
821 | continue | ||
822 | elif rule == AR.ANY_BEGIN: | ||
823 | if template[i].token == get_token_id(tokens, j): | ||
824 | token_importance_checkadd(template[i], tokens[j], j, importance) | ||
825 | rule_flag = true | ||
826 | any_group_index = i | ||
827 | else: | ||
828 | if any_end_group_index > 0: | ||
829 | any_group_index = -1 | ||
830 | i = any_end_group_index | ||
831 | any_end_group_index = -1 | ||
832 | continue | ||
833 | if cont: | ||
834 | continue | ||
835 | if rule_flag: | ||
836 | j += 1 | ||
837 | check = check_space(tokens, j, space) | ||
838 | if check < 0: | ||
839 | return TokenCompare.new(COMPARE_STATE.INCOMPLETE, j) | ||
840 | else: | ||
841 | j += check | ||
842 | else: | ||
843 | if j > index: | ||
844 | return TokenCompare.new(COMPARE_STATE.INCOMPLETE, j) | ||
845 | else: | ||
846 | return TokenCompare.new(COMPARE_STATE.MISMATCH, j) | ||
847 | if any_group_index >= 0 && rule == AR.ANY_END: | ||
848 | any_end_group_index = i | ||
849 | i = any_group_index - 1 | ||
850 | if template[0] != null: | ||
851 | var result : DescriptionResult = template[0].call(importance, settings) | ||
852 | if !result.success: | ||
853 | return TokenCompare.new(COMPARE_STATE.ERROR_VALUE, result.error, result.description) | ||
854 | return TokenCompare.new(COMPARE_STATE.DONE, j) | ||
855 | |||
856 | var DESCRIPTION : Array = [ | ||
857 | TEMPLATE_BEGIN, #0 | ||
858 | TEMPLATE_SYNTAX, #1 | ||
859 | TEMPLATE_IMPORT, #2 | ||
860 | TEMPLATE_PACKAGE, #3 | ||
861 | TEMPLATE_OPTION, #4 | ||
862 | TEMPLATE_FIELD, #5 | ||
863 | TEMPLATE_FIELD_ONEOF, #6 | ||
864 | TEMPLATE_MAP_FIELD, #7 | ||
865 | TEMPLATE_MAP_FIELD_ONEOF, #8 | ||
866 | TEMPLATE_ENUM, #9 | ||
867 | TEMPLATE_MESSAGE_HEAD, #10 | ||
868 | TEMPLATE_MESSAGE_TAIL, #11 | ||
869 | TEMPLATE_ONEOF_HEAD, #12 | ||
870 | TEMPLATE_ONEOF_TAIL, #13 | ||
871 | TEMPLATE_END #14 | ||
872 | ] | ||
873 | |||
874 | enum JUMP { | ||
875 | NOTHING = 0, #nothing | ||
876 | SIMPLE = 1, #simple jump | ||
877 | NESTED_INCREMENT = 2, #nested increment | ||
878 | NESTED_DECREMENT = 3, #nested decrement | ||
879 | MUST_NESTED_SIMPLE = 4, #check: must be nested > 0 | ||
880 | MUST_NESTED_INCREMENT = 5, #check: must be nested > 0, then nested increment | ||
881 | MUST_NESTED_DECREMENT = 6, #nested decrement, then check: must be nested > 0 | ||
882 | } | ||
883 | |||
884 | var TRANSLATION_TABLE : Array = [ | ||
885 | # BEGIN SYNTAX IMPORT PACKAGE OPTION FIELD FIELD_O MAP_F MAP_F_O ENUM MES_H MES_T ONEOF_H ONEOF_T END | ||
886 | [ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], #BEGIN | ||
887 | [ 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 2, 0, 0, 0, 1], #SYNTAX | ||
888 | [ 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 2, 0, 0, 0, 1], #IMPORT | ||
889 | [ 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 2, 0, 0, 0, 1], #PACKAGE | ||
890 | [ 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 2, 0, 0, 0, 1], #OPTION | ||
891 | [ 0, 0, 0, 0, 0, 4, 0, 4, 0, 1, 2, 3, 5, 0, 0], #FIELD | ||
892 | [ 0, 0, 0, 0, 0, 0, 4, 0, 4, 0, 0, 0, 0, 6, 0], #FIELD_ONEOF | ||
893 | [ 0, 0, 0, 0, 0, 4, 0, 4, 0, 1, 2, 3, 5, 0, 0], #MAP_F | ||
894 | [ 0, 0, 0, 0, 0, 0, 4, 0, 4, 0, 0, 0, 0, 6, 0], #MAP_F_ONEOF | ||
895 | [ 0, 0, 0, 0, 0, 4, 0, 4, 0, 1, 2, 3, 5, 0, 1], #ENUM | ||
896 | [ 0, 0, 0, 0, 0, 4, 0, 4, 0, 1, 2, 3, 5, 0, 0], #MES_H | ||
897 | [ 0, 0, 0, 0, 0, 4, 0, 4, 0, 1, 2, 3, 5, 0, 1], #MES_T | ||
898 | [ 0, 0, 0, 0, 0, 0, 4, 0, 4, 0, 0, 0, 0, 0, 0], #ONEOF_H | ||
899 | [ 0, 0, 0, 0, 0, 4, 0, 4, 0, 1, 2, 3, 5, 0, 1], #ONEOF_T | ||
900 | [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] #END | ||
901 | ] | ||
902 | |||
903 | class Construction: | ||
904 | func _init(b : int, e : int, d : int): | ||
905 | begin_token_index = b | ||
906 | end_token_index = e | ||
907 | description = d | ||
908 | var begin_token_index : int | ||
909 | var end_token_index : int | ||
910 | var description : int | ||
911 | |||
912 | class TranslationResult: | ||
913 | var constructions : Array = [] | ||
914 | var done : bool = false | ||
915 | var error_description_id : int = -1 | ||
916 | var error_description_text : String = "" | ||
917 | var parse_token_index : int = 0 | ||
918 | var error_token_index : int = 0 | ||
919 | |||
920 | func analyze_tokens(tokens : Array) -> TranslationResult: | ||
921 | var i : int = 0 | ||
922 | var result : TranslationResult = TranslationResult.new() | ||
923 | var comp : TokenCompare | ||
924 | var cur_template_id : int = 0 | ||
925 | var error : bool = false | ||
926 | var template_index : int | ||
927 | var comp_set : CompareSettings = CompareSettings.new(result.constructions.size(), 0, -1) | ||
928 | comp = description_compare(DESCRIPTION[cur_template_id], tokens, i, comp_set) | ||
929 | if comp.state == COMPARE_STATE.DONE: | ||
930 | i = comp.index | ||
931 | while true: | ||
932 | var end : bool = true | ||
933 | var find : bool = false | ||
934 | for j in range(TRANSLATION_TABLE[cur_template_id].size()): | ||
935 | template_index = j | ||
936 | if j == DESCRIPTION.size() - 1 && i < tokens.size(): | ||
937 | end = false | ||
938 | if result.error_description_id < 0: | ||
939 | error = true | ||
940 | break | ||
941 | if TRANSLATION_TABLE[cur_template_id][j] > 0: | ||
942 | end = false | ||
943 | comp_set.construction_index = result.constructions.size() | ||
944 | comp = description_compare(DESCRIPTION[j], tokens, i, comp_set) | ||
945 | if comp.state == COMPARE_STATE.DONE: | ||
946 | if TRANSLATION_TABLE[cur_template_id][j] == JUMP.NESTED_INCREMENT: | ||
947 | comp_set.nesting += 1 | ||
948 | elif TRANSLATION_TABLE[cur_template_id][j] == JUMP.NESTED_DECREMENT: | ||
949 | comp_set.nesting -= 1 | ||
950 | if comp_set.nesting < 0: | ||
951 | error = true | ||
952 | break | ||
953 | elif TRANSLATION_TABLE[cur_template_id][j] == JUMP.MUST_NESTED_SIMPLE: | ||
954 | if comp_set.nesting <= 0: | ||
955 | error = true | ||
956 | break | ||
957 | elif TRANSLATION_TABLE[cur_template_id][j] == JUMP.MUST_NESTED_INCREMENT: | ||
958 | if comp_set.nesting <= 0: | ||
959 | error = true | ||
960 | break | ||
961 | comp_set.nesting += 1 | ||
962 | elif TRANSLATION_TABLE[cur_template_id][j] == JUMP.MUST_NESTED_DECREMENT: | ||
963 | comp_set.nesting -= 1 | ||
964 | if comp_set.nesting <= 0: | ||
965 | error = true | ||
966 | break | ||
967 | result.constructions.append(Construction.new(i, comp.index, j)) | ||
968 | find = true | ||
969 | i = comp.index | ||
970 | cur_template_id = j | ||
971 | if i == tokens.size(): | ||
972 | if TRANSLATION_TABLE[cur_template_id][DESCRIPTION.size() - 1] == JUMP.SIMPLE: | ||
973 | if comp_set.nesting == 0: | ||
974 | end = true | ||
975 | else: | ||
976 | error = true | ||
977 | else: | ||
978 | error = true | ||
979 | elif i > tokens.size(): | ||
980 | error = true | ||
981 | break | ||
982 | elif comp.state == COMPARE_STATE.INCOMPLETE: | ||
983 | error = true | ||
984 | break | ||
985 | elif comp.state == COMPARE_STATE.ERROR_VALUE: | ||
986 | error = true | ||
987 | break | ||
988 | if error: | ||
989 | result.error_description_text = comp.description | ||
990 | result.error_description_id = template_index | ||
991 | result.parse_token_index = i | ||
992 | if comp.index >= tokens.size(): | ||
993 | result.error_token_index = tokens.size() - 1 | ||
994 | else: | ||
995 | result.error_token_index = comp.index | ||
996 | if end: | ||
997 | result.done = true | ||
998 | result.error_description_id = -1 | ||
999 | break | ||
1000 | if !find: | ||
1001 | break | ||
1002 | return result | ||
1003 | |||
1004 | enum CLASS_TYPE { | ||
1005 | ENUM = 0, | ||
1006 | MESSAGE = 1, | ||
1007 | MAP = 2 | ||
1008 | } | ||
1009 | |||
1010 | enum FIELD_TYPE { | ||
1011 | UNDEFINED = -1, | ||
1012 | INT32 = 0, | ||
1013 | SINT32 = 1, | ||
1014 | UINT32 = 2, | ||
1015 | INT64 = 3, | ||
1016 | SINT64 = 4, | ||
1017 | UINT64 = 5, | ||
1018 | BOOL = 6, | ||
1019 | ENUM = 7, | ||
1020 | FIXED32 = 8, | ||
1021 | SFIXED32 = 9, | ||
1022 | FLOAT = 10, | ||
1023 | FIXED64 = 11, | ||
1024 | SFIXED64 = 12, | ||
1025 | DOUBLE = 13, | ||
1026 | STRING = 14, | ||
1027 | BYTES = 15, | ||
1028 | MESSAGE = 16, | ||
1029 | MAP = 17 | ||
1030 | } | ||
1031 | |||
1032 | enum FIELD_QUALIFICATOR { | ||
1033 | OPTIONAL = 0, | ||
1034 | REQUIRED = 1, | ||
1035 | REPEATED = 2, | ||
1036 | RESERVED = 3 | ||
1037 | } | ||
1038 | |||
1039 | enum FIELD_OPTION { | ||
1040 | PACKED = 0, | ||
1041 | NOT_PACKED = 1 | ||
1042 | } | ||
1043 | |||
1044 | class ASTClass: | ||
1045 | func _init(n : String, t : int, p : int, pn : String, o : String, ci : int): | ||
1046 | name = n | ||
1047 | type = t | ||
1048 | parent_index = p | ||
1049 | parent_name = pn | ||
1050 | option = o | ||
1051 | construction_index = ci | ||
1052 | values = [] | ||
1053 | |||
1054 | var name : String | ||
1055 | var type : int | ||
1056 | var parent_index : int | ||
1057 | var parent_name : String | ||
1058 | var option : String | ||
1059 | var construction_index | ||
1060 | var values : Array | ||
1061 | |||
1062 | func copy() -> ASTClass: | ||
1063 | var res : ASTClass = ASTClass.new(name, type, parent_index, parent_name, option, construction_index) | ||
1064 | for v in values: | ||
1065 | res.values.append(v.copy()) | ||
1066 | return res | ||
1067 | |||
1068 | class ASTEnumValue: | ||
1069 | func _init(n : String, v : String): | ||
1070 | name = n | ||
1071 | value = v | ||
1072 | |||
1073 | var name : String | ||
1074 | var value : String | ||
1075 | |||
1076 | func copy() -> ASTEnumValue: | ||
1077 | return ASTEnumValue.new(name, value) | ||
1078 | |||
1079 | class ASTField: | ||
1080 | func _init(t, n : String, tn : String, p : int, q : int, o : int, ci : int, mf : bool): | ||
1081 | tag = t | ||
1082 | name = n | ||
1083 | type_name = tn | ||
1084 | parent_class_id = p | ||
1085 | qualificator = q | ||
1086 | option = o | ||
1087 | construction_index = ci | ||
1088 | is_map_field = mf | ||
1089 | |||
1090 | var tag | ||
1091 | var name : String | ||
1092 | var type_name : String | ||
1093 | var parent_class_id : int | ||
1094 | var qualificator : int | ||
1095 | var option : int | ||
1096 | var construction_index : int | ||
1097 | var is_map_field : bool | ||
1098 | var field_type : int = FIELD_TYPE.UNDEFINED | ||
1099 | var type_class_id : int = -1 | ||
1100 | |||
1101 | func copy() -> ASTField: | ||
1102 | var res : ASTField = ASTField.new(tag, name, type_name, parent_class_id, qualificator, option, construction_index, is_map_field) | ||
1103 | res.field_type = field_type | ||
1104 | res.type_class_id = type_class_id | ||
1105 | return res | ||
1106 | |||
1107 | enum AST_GROUP_RULE { | ||
1108 | ONEOF = 0, | ||
1109 | ALL = 1 | ||
1110 | } | ||
1111 | |||
1112 | class ASTFieldGroup: | ||
1113 | func _init(n : String, pi : int, r : int): | ||
1114 | name = n | ||
1115 | parent_class_id = pi | ||
1116 | rule = r | ||
1117 | opened = true | ||
1118 | |||
1119 | var name : String | ||
1120 | var parent_class_id : int | ||
1121 | var rule : int | ||
1122 | var field_indexes : Array = [] | ||
1123 | var opened : bool | ||
1124 | |||
1125 | func copy() -> ASTFieldGroup: | ||
1126 | var res : ASTFieldGroup = ASTFieldGroup.new(name, parent_class_id, rule) | ||
1127 | res.opened = opened | ||
1128 | for fi in field_indexes: | ||
1129 | res.field_indexes.append(fi) | ||
1130 | return res | ||
1131 | |||
1132 | class ASTImport: | ||
1133 | func _init(a_path : String, a_public : bool, sha : String): | ||
1134 | path = a_path | ||
1135 | public = a_public | ||
1136 | sha256 = sha | ||
1137 | |||
1138 | var path : String | ||
1139 | var public : bool | ||
1140 | var sha256 : String | ||
1141 | |||
1142 | var class_table : Array = [] | ||
1143 | var field_table : Array = [] | ||
1144 | var group_table : Array = [] | ||
1145 | var import_table : Array = [] | ||
1146 | var proto_version : int = 0 | ||
1147 | |||
1148 | class DescriptionResult: | ||
1149 | func _init(s : bool = true, e = null, d : String = ""): | ||
1150 | success = s | ||
1151 | error = e | ||
1152 | description = d | ||
1153 | var success : bool | ||
1154 | var error | ||
1155 | var description : String | ||
1156 | |||
1157 | static func get_text_from_token(string_token : TokenEntrance) -> String: | ||
1158 | return string_token.text.substr(1, string_token.text.length() - 2) | ||
1159 | |||
1160 | func desc_syntax(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult: | ||
1161 | var result : DescriptionResult = DescriptionResult.new() | ||
1162 | var s : String = get_text_from_token(indexed_tokens[0].token) | ||
1163 | if s == "proto2": | ||
1164 | proto_version = 2 | ||
1165 | elif s == "proto3": | ||
1166 | proto_version = 3 | ||
1167 | else: | ||
1168 | result.success = false | ||
1169 | result.error = indexed_tokens[0].index | ||
1170 | result.description = "Unspecified version of the protocol. Use \"proto2\" or \"proto3\" syntax string." | ||
1171 | return result | ||
1172 | |||
1173 | func desc_import(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult: | ||
1174 | var result : DescriptionResult = DescriptionResult.new() | ||
1175 | var offset : int = 0 | ||
1176 | var public : bool = false | ||
1177 | if indexed_tokens[offset].token.id == TOKEN_ID.IMPORT_QUALIFICATION: | ||
1178 | if indexed_tokens[offset].token.text == "public": | ||
1179 | public = true | ||
1180 | offset += 1 | ||
1181 | var f_name : String = path_dir + get_text_from_token(indexed_tokens[offset].token) | ||
1182 | var sha : String = FileAccess.get_sha256(f_name) | ||
1183 | if FileAccess.file_exists(f_name): | ||
1184 | for i in import_table: | ||
1185 | if i.path == f_name: | ||
1186 | result.success = false | ||
1187 | result.error = indexed_tokens[offset].index | ||
1188 | result.description = "File '" + f_name + "' already imported." | ||
1189 | return result | ||
1190 | if i.sha256 == sha: | ||
1191 | result.success = false | ||
1192 | result.error = indexed_tokens[offset].index | ||
1193 | result.description = "File '" + f_name + "' with matching SHA256 already imported." | ||
1194 | return result | ||
1195 | import_table.append(ASTImport.new(f_name, public, sha)) | ||
1196 | else: | ||
1197 | result.success = false | ||
1198 | result.error = indexed_tokens[offset].index | ||
1199 | result.description = "Import file '" + f_name + "' not found." | ||
1200 | return result | ||
1201 | |||
1202 | func desc_package(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult: | ||
1203 | printerr("UNRELEASED desc_package: ", indexed_tokens.size(), ", nesting: ", settings.nesting) | ||
1204 | var result : DescriptionResult = DescriptionResult.new() | ||
1205 | return result | ||
1206 | |||
1207 | func desc_option(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult: | ||
1208 | printerr("UNRELEASED desc_option: ", indexed_tokens.size(), ", nesting: ", settings.nesting) | ||
1209 | var result : DescriptionResult = DescriptionResult.new() | ||
1210 | return result | ||
1211 | |||
1212 | func desc_field(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult: | ||
1213 | var result : DescriptionResult = DescriptionResult.new() | ||
1214 | var qualifcator : int = FIELD_QUALIFICATOR.OPTIONAL | ||
1215 | var option : int | ||
1216 | var offset : int = 0 | ||
1217 | |||
1218 | if proto_version == 3: | ||
1219 | option = FIELD_OPTION.PACKED | ||
1220 | if indexed_tokens[offset].token.id == TOKEN_ID.FIELD_QUALIFICATION: | ||
1221 | if indexed_tokens[offset].token.text == "repeated": | ||
1222 | qualifcator = FIELD_QUALIFICATOR.REPEATED | ||
1223 | elif indexed_tokens[offset].token.text == "required" || indexed_tokens[offset].token.text == "optional": | ||
1224 | result.success = false | ||
1225 | result.error = indexed_tokens[offset].index | ||
1226 | result.description = "Using the 'required' or 'optional' qualificator is unacceptable in Protobuf v3." | ||
1227 | return result | ||
1228 | offset += 1 | ||
1229 | if proto_version == 2: | ||
1230 | option = FIELD_OPTION.NOT_PACKED | ||
1231 | if !(group_table.size() > 0 && group_table[group_table.size() - 1].opened): | ||
1232 | if indexed_tokens[offset].token.id == TOKEN_ID.FIELD_QUALIFICATION: | ||
1233 | if indexed_tokens[offset].token.text == "repeated": | ||
1234 | qualifcator = FIELD_QUALIFICATOR.REPEATED | ||
1235 | elif indexed_tokens[offset].token.text == "required": | ||
1236 | qualifcator = FIELD_QUALIFICATOR.REQUIRED | ||
1237 | elif indexed_tokens[offset].token.text == "optional": | ||
1238 | qualifcator = FIELD_QUALIFICATOR.OPTIONAL | ||
1239 | offset += 1 | ||
1240 | else: | ||
1241 | if class_table[settings.parent_index].type == CLASS_TYPE.MESSAGE: | ||
1242 | result.success = false | ||
1243 | result.error = indexed_tokens[offset].index | ||
1244 | result.description = "Using the 'required', 'optional' or 'repeated' qualificator necessarily in Protobuf v2." | ||
1245 | return result | ||
1246 | var type_name : String = indexed_tokens[offset].token.text; offset += 1 | ||
1247 | var field_name : String = indexed_tokens[offset].token.text; offset += 1 | ||
1248 | var tag : String = indexed_tokens[offset].token.text; offset += 1 | ||
1249 | |||
1250 | if indexed_tokens.size() == offset + 2: | ||
1251 | if indexed_tokens[offset].token.text == "packed": | ||
1252 | offset += 1 | ||
1253 | if indexed_tokens[offset].token.text == "true": | ||
1254 | option = FIELD_OPTION.PACKED | ||
1255 | else: | ||
1256 | option = FIELD_OPTION.NOT_PACKED | ||
1257 | else: | ||
1258 | result.success = false | ||
1259 | result.error = indexed_tokens[offset].index | ||
1260 | result.description = "Undefined field option." | ||
1261 | return result | ||
1262 | |||
1263 | if group_table.size() > 0: | ||
1264 | if group_table[group_table.size() - 1].opened: | ||
1265 | if indexed_tokens[0].token.id == TOKEN_ID.FIELD_QUALIFICATION: | ||
1266 | result.success = false | ||
1267 | result.error = indexed_tokens[0].index | ||
1268 | result.description = "Using the 'required', 'optional' or 'repeated' qualificator is unacceptable in 'OneOf' field." | ||
1269 | return result | ||
1270 | group_table[group_table.size() - 1].field_indexes.append(field_table.size()) | ||
1271 | field_table.append(ASTField.new(tag, field_name, type_name, settings.parent_index, qualifcator, option, settings.construction_index, false)) | ||
1272 | return result | ||
1273 | |||
1274 | func desc_map_field(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult: | ||
1275 | var result : DescriptionResult = DescriptionResult.new() | ||
1276 | var qualifcator : int = FIELD_QUALIFICATOR.REPEATED | ||
1277 | var option : int | ||
1278 | var offset : int = 0 | ||
1279 | |||
1280 | if proto_version == 3: | ||
1281 | option = FIELD_OPTION.PACKED | ||
1282 | if proto_version == 2: | ||
1283 | option = FIELD_OPTION.NOT_PACKED | ||
1284 | |||
1285 | var key_type_name : String = indexed_tokens[offset].token.text; offset += 1 | ||
1286 | if key_type_name == "float" || key_type_name == "double" || key_type_name == "bytes": | ||
1287 | result.success = false | ||
1288 | result.error = indexed_tokens[offset - 1].index | ||
1289 | result.description = "Map 'key_type' can't be floating point types and bytes." | ||
1290 | var type_name : String = indexed_tokens[offset].token.text; offset += 1 | ||
1291 | var field_name : String = indexed_tokens[offset].token.text; offset += 1 | ||
1292 | var tag : String = indexed_tokens[offset].token.text; offset += 1 | ||
1293 | |||
1294 | if indexed_tokens.size() == offset + 2: | ||
1295 | if indexed_tokens[offset].token.text == "packed": | ||
1296 | offset += 1 | ||
1297 | if indexed_tokens[offset] == "true": | ||
1298 | option = FIELD_OPTION.PACKED | ||
1299 | else: | ||
1300 | option = FIELD_OPTION.NOT_PACKED | ||
1301 | else: | ||
1302 | result.success = false | ||
1303 | result.error = indexed_tokens[offset].index | ||
1304 | result.description = "Undefined field option." | ||
1305 | |||
1306 | if group_table.size() > 0: | ||
1307 | if group_table[group_table.size() - 1].opened: | ||
1308 | group_table[group_table.size() - 1].field_indexes.append(field_table.size()) | ||
1309 | |||
1310 | class_table.append(ASTClass.new("map_type_" + field_name, CLASS_TYPE.MAP, settings.parent_index, settings.parent_name, "", settings.construction_index)) | ||
1311 | field_table.append(ASTField.new(tag, field_name, "map_type_" + field_name, settings.parent_index, qualifcator, option, settings.construction_index, false)) | ||
1312 | |||
1313 | field_table.append(ASTField.new(1, "key", key_type_name, class_table.size() - 1, FIELD_QUALIFICATOR.OPTIONAL, option, settings.construction_index, true)) | ||
1314 | field_table.append(ASTField.new(2, "value", type_name, class_table.size() - 1, FIELD_QUALIFICATOR.OPTIONAL, option, settings.construction_index, true)) | ||
1315 | |||
1316 | return result | ||
1317 | |||
1318 | func desc_enum(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult: | ||
1319 | var result : DescriptionResult = DescriptionResult.new() | ||
1320 | var option : String = "" | ||
1321 | var offset : int = 0 | ||
1322 | var type_name : String = indexed_tokens[offset].token.text; offset += 1 | ||
1323 | if indexed_tokens[offset].token.id == TOKEN_ID.ENUM_OPTION: | ||
1324 | if indexed_tokens[offset].token.text == "allow_alias" && indexed_tokens[offset + 1].token.text == "true": | ||
1325 | option = "allow_alias" | ||
1326 | offset += 2 | ||
1327 | var value : ASTEnumValue | ||
1328 | var enum_class : ASTClass = ASTClass.new(type_name, CLASS_TYPE.ENUM, settings.parent_index, settings.parent_name, option, settings.construction_index) | ||
1329 | var first_value : bool = true | ||
1330 | while offset < indexed_tokens.size(): | ||
1331 | if first_value: | ||
1332 | if indexed_tokens[offset + 1].token.text != "0": | ||
1333 | result.success = false | ||
1334 | result.error = indexed_tokens[offset + 1].index | ||
1335 | result.description = "For Enums, the default value is the first defined enum value, which must be 0." | ||
1336 | break | ||
1337 | first_value = false | ||
1338 | #if indexed_tokens[offset + 1].token.text[0] == "+" || indexed_tokens[offset + 1].token.text[0] == "-": | ||
1339 | # result.success = false | ||
1340 | # result.error = indexed_tokens[offset + 1].index | ||
1341 | # result.description = "For Enums, signed values are not allowed." | ||
1342 | # break | ||
1343 | value = ASTEnumValue.new(indexed_tokens[offset].token.text, indexed_tokens[offset + 1].token.text) | ||
1344 | enum_class.values.append(value) | ||
1345 | offset += 2 | ||
1346 | |||
1347 | class_table.append(enum_class) | ||
1348 | return result | ||
1349 | |||
1350 | func desc_message_head(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult: | ||
1351 | var result : DescriptionResult = DescriptionResult.new() | ||
1352 | class_table.append(ASTClass.new(indexed_tokens[0].token.text, CLASS_TYPE.MESSAGE, settings.parent_index, settings.parent_name, "", settings.construction_index)) | ||
1353 | settings.parent_index = class_table.size() - 1 | ||
1354 | settings.parent_name = settings.parent_name + "." + indexed_tokens[0].token.text | ||
1355 | return result | ||
1356 | |||
1357 | func desc_message_tail(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult: | ||
1358 | settings.parent_index = class_table[settings.parent_index].parent_index | ||
1359 | settings.parent_name = class_table[settings.parent_index + 1].parent_name | ||
1360 | var result : DescriptionResult = DescriptionResult.new() | ||
1361 | return result | ||
1362 | |||
1363 | func desc_oneof_head(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult: | ||
1364 | var result : DescriptionResult = DescriptionResult.new() | ||
1365 | for g in group_table: | ||
1366 | if g.parent_class_id == settings.parent_index && g.name == indexed_tokens[0].token.text: | ||
1367 | result.success = false | ||
1368 | result.error = indexed_tokens[0].index | ||
1369 | result.description = "OneOf name must be unique." | ||
1370 | return result | ||
1371 | group_table.append(ASTFieldGroup.new(indexed_tokens[0].token.text, settings.parent_index, AST_GROUP_RULE.ONEOF)) | ||
1372 | return result | ||
1373 | |||
1374 | func desc_oneof_tail(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult: | ||
1375 | group_table[group_table.size() - 1].opened = false | ||
1376 | var result : DescriptionResult = DescriptionResult.new() | ||
1377 | return result | ||
1378 | |||
1379 | func analyze() -> AnalyzeResult: | ||
1380 | var analyze_result : AnalyzeResult = AnalyzeResult.new() | ||
1381 | analyze_result.doc = document | ||
1382 | analyze_result.classes = class_table | ||
1383 | analyze_result.fields = field_table | ||
1384 | analyze_result.groups = group_table | ||
1385 | analyze_result.state = false | ||
1386 | var result : TokenResult = tokenizer() | ||
1387 | if result.errors.size() > 0: | ||
1388 | for v in result.errors: | ||
1389 | var spos : Helper.StringPosition = Helper.str_pos(document.text, v.position) | ||
1390 | var err_text : String = "Unexpected token intersection " + "'" + document.text.substr(v.position.begin, spos.length) + "'" | ||
1391 | printerr(Helper.error_string(document.name, spos.str_num, spos.column, err_text)) | ||
1392 | else: | ||
1393 | var integrity = check_tokens_integrity(result.tokens, document.text.length() - 1) | ||
1394 | if integrity.size() > 0: | ||
1395 | for v in integrity: | ||
1396 | var spos: Helper.StringPosition = Helper.str_pos(document.text, TokenPosition.new(v.begin, v.end)) | ||
1397 | var err_text : String = "Unexpected token " + "'" + document.text.substr(v.begin, spos.length) + "'" | ||
1398 | printerr(Helper.error_string(document.name, spos.str_num, spos.column, err_text)) | ||
1399 | else: | ||
1400 | analyze_result.tokens = result.tokens | ||
1401 | comment_space_processing(result.tokens) | ||
1402 | var syntax : TranslationResult = analyze_tokens(result.tokens) | ||
1403 | if !syntax.done: | ||
1404 | var pos_main : TokenPosition = Helper.text_pos(result.tokens, syntax.parse_token_index) | ||
1405 | var pos_inner : TokenPosition = Helper.text_pos(result.tokens, syntax.error_token_index) | ||
1406 | var spos_main : Helper.StringPosition = Helper.str_pos(document.text, pos_main) | ||
1407 | var spos_inner : Helper.StringPosition = Helper.str_pos(document.text, pos_inner) | ||
1408 | var err_text : String = "Syntax error in construction '" + result.tokens[syntax.parse_token_index].text + "'. " | ||
1409 | err_text += "Unacceptable use '" + result.tokens[syntax.error_token_index].text + "' at:" + str(spos_inner.str_num) + ":" + str(spos_inner.column) | ||
1410 | err_text += "\n" + syntax.error_description_text | ||
1411 | printerr(Helper.error_string(document.name, spos_main.str_num, spos_main.column, err_text)) | ||
1412 | else: | ||
1413 | analyze_result.version = proto_version | ||
1414 | analyze_result.imports = import_table | ||
1415 | analyze_result.syntax = syntax | ||
1416 | analyze_result.state = true | ||
1417 | return analyze_result | ||
1418 | |||
1419 | class Semantic: | ||
1420 | |||
1421 | var class_table : Array | ||
1422 | var field_table : Array | ||
1423 | var group_table : Array | ||
1424 | var syntax : Analysis.TranslationResult | ||
1425 | var tokens : Array | ||
1426 | var document : Document | ||
1427 | |||
1428 | func _init(analyze_result : AnalyzeResult): | ||
1429 | class_table = analyze_result.classes | ||
1430 | field_table = analyze_result.fields | ||
1431 | group_table = analyze_result.groups | ||
1432 | syntax = analyze_result.syntax | ||
1433 | tokens = analyze_result.tokens | ||
1434 | document = analyze_result.doc | ||
1435 | |||
1436 | |||
1437 | enum CHECK_SUBJECT { | ||
1438 | CLASS_NAME = 0, | ||
1439 | FIELD_NAME = 1, | ||
1440 | FIELD_TAG_NUMBER = 2, | ||
1441 | FIELD_TYPE = 3 | ||
1442 | } | ||
1443 | |||
1444 | var STRING_FIELD_TYPE = { | ||
1445 | "int32": Analysis.FIELD_TYPE.INT32, | ||
1446 | "sint32": Analysis.FIELD_TYPE.SINT32, | ||
1447 | "uint32": Analysis.FIELD_TYPE.UINT32, | ||
1448 | "int64": Analysis.FIELD_TYPE.INT64, | ||
1449 | "sint64": Analysis.FIELD_TYPE.SINT64, | ||
1450 | "uint64": Analysis.FIELD_TYPE.UINT64, | ||
1451 | "bool": Analysis.FIELD_TYPE.BOOL, | ||
1452 | "fixed32": Analysis.FIELD_TYPE.FIXED32, | ||
1453 | "sfixed32": Analysis.FIELD_TYPE.SFIXED32, | ||
1454 | "float": Analysis.FIELD_TYPE.FLOAT, | ||
1455 | "fixed64": Analysis.FIELD_TYPE.FIXED64, | ||
1456 | "sfixed64": Analysis.FIELD_TYPE.SFIXED64, | ||
1457 | "double": Analysis.FIELD_TYPE.DOUBLE, | ||
1458 | "string": Analysis.FIELD_TYPE.STRING, | ||
1459 | "bytes": Analysis.FIELD_TYPE.BYTES, | ||
1460 | "map": Analysis.FIELD_TYPE.MAP | ||
1461 | } | ||
1462 | |||
1463 | class CheckResult: | ||
1464 | func _init(mci : int, aci : int, ti : int, s : int): | ||
1465 | main_construction_index = mci | ||
1466 | associated_construction_index = aci | ||
1467 | table_index = ti | ||
1468 | subject = s | ||
1469 | |||
1470 | var main_construction_index: int = -1 | ||
1471 | var associated_construction_index: int = -1 | ||
1472 | var table_index: int = -1 | ||
1473 | var subject : int | ||
1474 | |||
1475 | func check_class_names() -> Array: | ||
1476 | var result : Array = [] | ||
1477 | for i in range(class_table.size()): | ||
1478 | var the_class_name : String = class_table[i].parent_name + "." + class_table[i].name | ||
1479 | for j in range(i + 1, class_table.size(), 1): | ||
1480 | var inner_name : String = class_table[j].parent_name + "." + class_table[j].name | ||
1481 | if inner_name == the_class_name: | ||
1482 | var check : CheckResult = CheckResult.new(class_table[j].construction_index, class_table[i].construction_index, j, CHECK_SUBJECT.CLASS_NAME) | ||
1483 | result.append(check) | ||
1484 | break | ||
1485 | return result | ||
1486 | |||
1487 | func check_field_names() -> Array: | ||
1488 | var result : Array = [] | ||
1489 | for i in range(field_table.size()): | ||
1490 | var the_class_name : String = class_table[field_table[i].parent_class_id].parent_name + "." + class_table[field_table[i].parent_class_id].name | ||
1491 | for j in range(i + 1, field_table.size(), 1): | ||
1492 | var inner_name : String = class_table[field_table[j].parent_class_id].parent_name + "." + class_table[field_table[j].parent_class_id].name | ||
1493 | if inner_name == the_class_name: | ||
1494 | if field_table[i].name == field_table[j].name: | ||
1495 | var check : CheckResult = CheckResult.new(field_table[j].construction_index, field_table[i].construction_index, j, CHECK_SUBJECT.FIELD_NAME) | ||
1496 | result.append(check) | ||
1497 | break | ||
1498 | if field_table[i].tag == field_table[j].tag: | ||
1499 | var check : CheckResult = CheckResult.new(field_table[j].construction_index, field_table[i].construction_index, j, CHECK_SUBJECT.FIELD_TAG_NUMBER) | ||
1500 | result.append(check) | ||
1501 | break | ||
1502 | return result | ||
1503 | |||
1504 | func find_full_class_name(the_class_name : String) -> int: | ||
1505 | for i in range(class_table.size()): | ||
1506 | if the_class_name == class_table[i].parent_name + "." + class_table[i].name: | ||
1507 | return i | ||
1508 | return -1 | ||
1509 | |||
1510 | func find_class_name(the_class_name : String) -> int: | ||
1511 | for i in range(class_table.size()): | ||
1512 | if the_class_name == class_table[i].name: | ||
1513 | return i | ||
1514 | return -1 | ||
1515 | |||
1516 | func get_class_childs(class_index : int) -> Array: | ||
1517 | var result : Array = [] | ||
1518 | for i in range(class_table.size()): | ||
1519 | if class_table[i].parent_index == class_index: | ||
1520 | result.append(i) | ||
1521 | return result | ||
1522 | |||
1523 | func find_in_childs(the_class_name : String, child_indexes : Array) -> int: | ||
1524 | for c in child_indexes: | ||
1525 | if the_class_name == class_table[c].name: | ||
1526 | return c | ||
1527 | return -1 | ||
1528 | |||
1529 | func determine_field_types() -> Array: | ||
1530 | var result : Array = [] | ||
1531 | for f in field_table: | ||
1532 | if STRING_FIELD_TYPE.has(f.type_name): | ||
1533 | f.field_type = STRING_FIELD_TYPE[f.type_name] | ||
1534 | else: | ||
1535 | if f.type_name[0] == ".": | ||
1536 | f.type_class_id = find_full_class_name(f.type_name) | ||
1537 | else: | ||
1538 | # Reset result from previous assignment, that can be incorrect because of merging of imports | ||
1539 | f.type_class_id = -1 | ||
1540 | var splited_name : Array = f.type_name.split(".", false) | ||
1541 | var cur_class_index : int = f.parent_class_id | ||
1542 | var exit : bool = false | ||
1543 | while(true): | ||
1544 | var find : bool = false | ||
1545 | if cur_class_index == -1: | ||
1546 | break | ||
1547 | for n in splited_name: | ||
1548 | var childs_and_parent : Array = get_class_childs(cur_class_index) | ||
1549 | var res_index : int = find_in_childs(n, childs_and_parent) | ||
1550 | if res_index >= 0: | ||
1551 | find = true | ||
1552 | cur_class_index = res_index | ||
1553 | else: | ||
1554 | if find: | ||
1555 | exit = true | ||
1556 | else: | ||
1557 | cur_class_index = class_table[cur_class_index].parent_index | ||
1558 | break | ||
1559 | if exit: | ||
1560 | break | ||
1561 | if find: | ||
1562 | f.type_class_id = cur_class_index | ||
1563 | break | ||
1564 | if f.type_class_id == -1: | ||
1565 | f.type_class_id = find_full_class_name("." + f.type_name) | ||
1566 | for i in range(field_table.size()): | ||
1567 | if field_table[i].field_type == Analysis.FIELD_TYPE.UNDEFINED: | ||
1568 | if field_table[i].type_class_id == -1: | ||
1569 | result.append(CheckResult.new(field_table[i].construction_index, field_table[i].construction_index, i, CHECK_SUBJECT.FIELD_TYPE)) | ||
1570 | else: | ||
1571 | if class_table[field_table[i].type_class_id].type == Analysis.CLASS_TYPE.ENUM: | ||
1572 | field_table[i].field_type = Analysis.FIELD_TYPE.ENUM | ||
1573 | elif class_table[field_table[i].type_class_id].type == Analysis.CLASS_TYPE.MESSAGE: | ||
1574 | field_table[i].field_type = Analysis.FIELD_TYPE.MESSAGE | ||
1575 | elif class_table[field_table[i].type_class_id].type == Analysis.CLASS_TYPE.MAP: | ||
1576 | field_table[i].field_type = Analysis.FIELD_TYPE.MAP | ||
1577 | else: | ||
1578 | result.append(CheckResult.new(field_table[i].construction_index, field_table[i].construction_index, i, CHECK_SUBJECT.FIELD_TYPE)) | ||
1579 | return result | ||
1580 | |||
1581 | func check_constructions() -> Array: | ||
1582 | var cl : Array = check_class_names() | ||
1583 | var fl : Array = check_field_names() | ||
1584 | var ft : Array = determine_field_types() | ||
1585 | return cl + fl + ft | ||
1586 | |||
1587 | func check() -> bool: | ||
1588 | var check_result : Array = check_constructions() | ||
1589 | if check_result.size() == 0: | ||
1590 | return true | ||
1591 | else: | ||
1592 | for v in check_result: | ||
1593 | var main_tok : int = syntax.constructions[v.main_construction_index].begin_token_index | ||
1594 | var assoc_tok : int = syntax.constructions[v.associated_construction_index].begin_token_index | ||
1595 | var main_err_pos : Helper.StringPosition = Helper.str_pos(document.text, Helper.text_pos(tokens, main_tok)) | ||
1596 | var assoc_err_pos : Helper.StringPosition = Helper.str_pos(document.text, Helper.text_pos(tokens, assoc_tok)) | ||
1597 | var err_text : String | ||
1598 | if v.subject == CHECK_SUBJECT.CLASS_NAME: | ||
1599 | var class_type = "Undefined" | ||
1600 | if class_table[v.table_index].type == Analysis.CLASS_TYPE.ENUM: | ||
1601 | class_type = "Enum" | ||
1602 | elif class_table[v.table_index].type == Analysis.CLASS_TYPE.MESSAGE: | ||
1603 | class_type = "Message" | ||
1604 | elif class_table[v.table_index].type == Analysis.CLASS_TYPE.MAP: | ||
1605 | class_type = "Map" | ||
1606 | err_text = class_type + " name '" + class_table[v.table_index].name + "' is already defined at:" + str(assoc_err_pos.str_num) + ":" + str(assoc_err_pos.column) | ||
1607 | elif v.subject == CHECK_SUBJECT.FIELD_NAME: | ||
1608 | err_text = "Field name '" + field_table[v.table_index].name + "' is already defined at:" + str(assoc_err_pos.str_num) + ":" + str(assoc_err_pos.column) | ||
1609 | elif v.subject == CHECK_SUBJECT.FIELD_TAG_NUMBER: | ||
1610 | err_text = "Tag number '" + field_table[v.table_index].tag + "' is already defined at:" + str(assoc_err_pos.str_num) + ":" + str(assoc_err_pos.column) | ||
1611 | elif v.subject == CHECK_SUBJECT.FIELD_TYPE: | ||
1612 | err_text = "Type '" + field_table[v.table_index].type_name + "' of the '" + field_table[v.table_index].name + "' field undefined" | ||
1613 | else: | ||
1614 | err_text = "Undefined error" | ||
1615 | printerr(Helper.error_string(document.name, main_err_pos.str_num, main_err_pos.column, err_text)) | ||
1616 | return false | ||
1617 | |||
1618 | class Translator: | ||
1619 | |||
1620 | var class_table : Array | ||
1621 | var field_table : Array | ||
1622 | var group_table : Array | ||
1623 | var proto_version : int | ||
1624 | |||
1625 | func _init(analyzer_result : AnalyzeResult): | ||
1626 | class_table = analyzer_result.classes | ||
1627 | field_table = analyzer_result.fields | ||
1628 | group_table = analyzer_result.groups | ||
1629 | proto_version = analyzer_result.version | ||
1630 | |||
1631 | func tabulate(text : String, nesting : int) -> String: | ||
1632 | var tab : String = "" | ||
1633 | for i in range(nesting): | ||
1634 | tab += "\t" | ||
1635 | return tab + text | ||
1636 | |||
1637 | func default_dict_text() -> String: | ||
1638 | if proto_version == 2: | ||
1639 | return "DEFAULT_VALUES_2" | ||
1640 | elif proto_version == 3: | ||
1641 | return "DEFAULT_VALUES_3" | ||
1642 | return "TRANSLATION_ERROR" | ||
1643 | |||
1644 | func generate_field_type(field : Analysis.ASTField) -> String: | ||
1645 | var text : String = "PB_DATA_TYPE." | ||
1646 | if field.field_type == Analysis.FIELD_TYPE.INT32: | ||
1647 | return text + "INT32" | ||
1648 | elif field.field_type == Analysis.FIELD_TYPE.SINT32: | ||
1649 | return text + "SINT32" | ||
1650 | elif field.field_type == Analysis.FIELD_TYPE.UINT32: | ||
1651 | return text + "UINT32" | ||
1652 | elif field.field_type == Analysis.FIELD_TYPE.INT64: | ||
1653 | return text + "INT64" | ||
1654 | elif field.field_type == Analysis.FIELD_TYPE.SINT64: | ||
1655 | return text + "SINT64" | ||
1656 | elif field.field_type == Analysis.FIELD_TYPE.UINT64: | ||
1657 | return text + "UINT64" | ||
1658 | elif field.field_type == Analysis.FIELD_TYPE.BOOL: | ||
1659 | return text + "BOOL" | ||
1660 | elif field.field_type == Analysis.FIELD_TYPE.ENUM: | ||
1661 | return text + "ENUM" | ||
1662 | elif field.field_type == Analysis.FIELD_TYPE.FIXED32: | ||
1663 | return text + "FIXED32" | ||
1664 | elif field.field_type == Analysis.FIELD_TYPE.SFIXED32: | ||
1665 | return text + "SFIXED32" | ||
1666 | elif field.field_type == Analysis.FIELD_TYPE.FLOAT: | ||
1667 | return text + "FLOAT" | ||
1668 | elif field.field_type == Analysis.FIELD_TYPE.FIXED64: | ||
1669 | return text + "FIXED64" | ||
1670 | elif field.field_type == Analysis.FIELD_TYPE.SFIXED64: | ||
1671 | return text + "SFIXED64" | ||
1672 | elif field.field_type == Analysis.FIELD_TYPE.DOUBLE: | ||
1673 | return text + "DOUBLE" | ||
1674 | elif field.field_type == Analysis.FIELD_TYPE.STRING: | ||
1675 | return text + "STRING" | ||
1676 | elif field.field_type == Analysis.FIELD_TYPE.BYTES: | ||
1677 | return text + "BYTES" | ||
1678 | elif field.field_type == Analysis.FIELD_TYPE.MESSAGE: | ||
1679 | return text + "MESSAGE" | ||
1680 | elif field.field_type == Analysis.FIELD_TYPE.MAP: | ||
1681 | return text + "MAP" | ||
1682 | return text | ||
1683 | |||
1684 | func generate_field_rule(field : Analysis.ASTField) -> String: | ||
1685 | var text : String = "PB_RULE." | ||
1686 | if field.qualificator == Analysis.FIELD_QUALIFICATOR.OPTIONAL: | ||
1687 | return text + "OPTIONAL" | ||
1688 | elif field.qualificator == Analysis.FIELD_QUALIFICATOR.REQUIRED: | ||
1689 | return text + "REQUIRED" | ||
1690 | elif field.qualificator == Analysis.FIELD_QUALIFICATOR.REPEATED: | ||
1691 | return text + "REPEATED" | ||
1692 | elif field.qualificator == Analysis.FIELD_QUALIFICATOR.RESERVED: | ||
1693 | return text + "RESERVED" | ||
1694 | return text | ||
1695 | |||
1696 | func generate_gdscript_type(field : Analysis.ASTField) -> String: | ||
1697 | if field.field_type == Analysis.FIELD_TYPE.MESSAGE: | ||
1698 | var type_name : String = class_table[field.type_class_id].parent_name + "." + class_table[field.type_class_id].name | ||
1699 | return type_name.substr(1, type_name.length() - 1) | ||
1700 | return generate_gdscript_simple_type(field) | ||
1701 | |||
1702 | func generate_gdscript_simple_type(field : Analysis.ASTField) -> String: | ||
1703 | if field.field_type == Analysis.FIELD_TYPE.INT32: | ||
1704 | return "int" | ||
1705 | elif field.field_type == Analysis.FIELD_TYPE.SINT32: | ||
1706 | return "int" | ||
1707 | elif field.field_type == Analysis.FIELD_TYPE.UINT32: | ||
1708 | return "int" | ||
1709 | elif field.field_type == Analysis.FIELD_TYPE.INT64: | ||
1710 | return "int" | ||
1711 | elif field.field_type == Analysis.FIELD_TYPE.SINT64: | ||
1712 | return "int" | ||
1713 | elif field.field_type == Analysis.FIELD_TYPE.UINT64: | ||
1714 | return "int" | ||
1715 | elif field.field_type == Analysis.FIELD_TYPE.BOOL: | ||
1716 | return "bool" | ||
1717 | elif field.field_type == Analysis.FIELD_TYPE.ENUM: | ||
1718 | return "" | ||
1719 | elif field.field_type == Analysis.FIELD_TYPE.FIXED32: | ||
1720 | return "int" | ||
1721 | elif field.field_type == Analysis.FIELD_TYPE.SFIXED32: | ||
1722 | return "int" | ||
1723 | elif field.field_type == Analysis.FIELD_TYPE.FLOAT: | ||
1724 | return "float" | ||
1725 | elif field.field_type == Analysis.FIELD_TYPE.FIXED64: | ||
1726 | return "int" | ||
1727 | elif field.field_type == Analysis.FIELD_TYPE.SFIXED64: | ||
1728 | return "int" | ||
1729 | elif field.field_type == Analysis.FIELD_TYPE.DOUBLE: | ||
1730 | return "float" | ||
1731 | elif field.field_type == Analysis.FIELD_TYPE.STRING: | ||
1732 | return "String" | ||
1733 | elif field.field_type == Analysis.FIELD_TYPE.BYTES: | ||
1734 | return "PackedByteArray" | ||
1735 | return "" | ||
1736 | |||
1737 | func generate_field_constructor(field_index : int, nesting : int) -> String: | ||
1738 | var text : String = "" | ||
1739 | var f : Analysis.ASTField = field_table[field_index] | ||
1740 | var field_name : String = "__" + f.name | ||
1741 | var pbfield_text : String | ||
1742 | var default_var_name := field_name + "_default" | ||
1743 | if f.qualificator == Analysis.FIELD_QUALIFICATOR.REPEATED: | ||
1744 | var type_name := generate_gdscript_type(f) | ||
1745 | if type_name: | ||
1746 | text = tabulate("var %s: Array[%s] = []\n" % [default_var_name, type_name], nesting) | ||
1747 | else: | ||
1748 | text = tabulate("var %s: Array = []\n" % [default_var_name], nesting) | ||
1749 | pbfield_text += field_name + " = PBField.new(" | ||
1750 | pbfield_text += "\"" + f.name + "\", " | ||
1751 | pbfield_text += generate_field_type(f) + ", " | ||
1752 | pbfield_text += generate_field_rule(f) + ", " | ||
1753 | pbfield_text += str(f.tag) + ", " | ||
1754 | if f.option == Analysis.FIELD_OPTION.PACKED: | ||
1755 | pbfield_text += "true" | ||
1756 | elif f.option == Analysis.FIELD_OPTION.NOT_PACKED: | ||
1757 | pbfield_text += "false" | ||
1758 | if f.qualificator == Analysis.FIELD_QUALIFICATOR.REPEATED: | ||
1759 | pbfield_text += ", " + default_var_name | ||
1760 | else: | ||
1761 | pbfield_text += ", " + default_dict_text() + "[" + generate_field_type(f) + "]" | ||
1762 | pbfield_text += ")\n" | ||
1763 | text += tabulate(pbfield_text, nesting) | ||
1764 | if f.is_map_field: | ||
1765 | text += tabulate(field_name + ".is_map_field = true\n", nesting) | ||
1766 | text += tabulate("service = PBServiceField.new()\n", nesting) | ||
1767 | text += tabulate("service.field = " + field_name + "\n", nesting) | ||
1768 | if f.field_type == Analysis.FIELD_TYPE.MESSAGE: | ||
1769 | if f.qualificator == Analysis.FIELD_QUALIFICATOR.REPEATED: | ||
1770 | text += tabulate("service.func_ref = Callable(self, \"add_" + f.name + "\")\n", nesting) | ||
1771 | else: | ||
1772 | text += tabulate("service.func_ref = Callable(self, \"new_" + f.name + "\")\n", nesting) | ||
1773 | elif f.field_type == Analysis.FIELD_TYPE.MAP: | ||
1774 | text += tabulate("service.func_ref = Callable(self, \"add_empty_" + f.name + "\")\n", nesting) | ||
1775 | text += tabulate("data[" + field_name + ".tag] = service\n", nesting) | ||
1776 | |||
1777 | return text | ||
1778 | |||
1779 | func generate_group_clear(field_index : int, nesting : int) -> String: | ||
1780 | for g in group_table: | ||
1781 | var text : String = "" | ||
1782 | var find : bool = false | ||
1783 | if g.parent_class_id == field_table[field_index].parent_class_id: | ||
1784 | for i in g.field_indexes: | ||
1785 | if field_index == i: | ||
1786 | find = true | ||
1787 | text += tabulate("data[" + field_table[i].tag + "].state = PB_SERVICE_STATE.FILLED\n", nesting) | ||
1788 | else: | ||
1789 | text += tabulate("__" + field_table[i].name + ".value = " + default_dict_text() + "[" + generate_field_type(field_table[i]) + "]\n", nesting) | ||
1790 | text += tabulate("data[" + field_table[i].tag + "].state = PB_SERVICE_STATE.UNFILLED\n", nesting) | ||
1791 | if find: | ||
1792 | return text | ||
1793 | return "" | ||
1794 | |||
1795 | func generate_has_oneof(field_index : int, nesting : int) -> String: | ||
1796 | for g in group_table: | ||
1797 | var text : String = "" | ||
1798 | if g.parent_class_id == field_table[field_index].parent_class_id: | ||
1799 | for i in g.field_indexes: | ||
1800 | if field_index == i: | ||
1801 | text += tabulate("func has_" + field_table[i].name + "() -> bool:\n", nesting) | ||
1802 | nesting += 1 | ||
1803 | text += tabulate("return data[" + field_table[i].tag + "].state == PB_SERVICE_STATE.FILLED\n", nesting) | ||
1804 | return text | ||
1805 | return "" | ||
1806 | |||
1807 | func generate_field(field_index : int, nesting : int) -> String: | ||
1808 | var text : String = "" | ||
1809 | var f : Analysis.ASTField = field_table[field_index] | ||
1810 | var varname : String = "__" + f.name | ||
1811 | text += tabulate("var " + varname + ": PBField\n", nesting) | ||
1812 | if f.field_type == Analysis.FIELD_TYPE.MESSAGE: | ||
1813 | var the_class_name : String = class_table[f.type_class_id].parent_name + "." + class_table[f.type_class_id].name | ||
1814 | the_class_name = the_class_name.substr(1, the_class_name.length() - 1) | ||
1815 | if f.qualificator != Analysis.FIELD_QUALIFICATOR.OPTIONAL: | ||
1816 | text += generate_has_oneof(field_index, nesting) | ||
1817 | if f.qualificator == Analysis.FIELD_QUALIFICATOR.REPEATED: | ||
1818 | text += tabulate("func get_" + f.name + "() -> Array[" + the_class_name + "]:\n", nesting) | ||
1819 | else: | ||
1820 | if f.qualificator == Analysis.FIELD_QUALIFICATOR.OPTIONAL: | ||
1821 | text += tabulate("func has_" + f.name + "() -> bool:\n", nesting) | ||
1822 | nesting += 1 | ||
1823 | text += tabulate("if " + varname + ".value != null:\n", nesting) | ||
1824 | nesting += 1 | ||
1825 | text += tabulate("return true\n", nesting) | ||
1826 | nesting -= 1 | ||
1827 | text += tabulate("return false\n", nesting) | ||
1828 | nesting -= 1 | ||
1829 | text += tabulate("func get_" + f.name + "() -> " + the_class_name + ":\n", nesting) | ||
1830 | nesting += 1 | ||
1831 | text += tabulate("return " + varname + ".value\n", nesting) | ||
1832 | nesting -= 1 | ||
1833 | text += tabulate("func clear_" + f.name + "() -> void:\n", nesting) | ||
1834 | nesting += 1 | ||
1835 | text += tabulate("data[" + str(f.tag) + "].state = PB_SERVICE_STATE.UNFILLED\n", nesting) | ||
1836 | if f.qualificator == Analysis.FIELD_QUALIFICATOR.REPEATED: | ||
1837 | text += tabulate(varname + ".value.clear()\n", nesting) | ||
1838 | nesting -= 1 | ||
1839 | text += tabulate("func add_" + f.name + "() -> " + the_class_name + ":\n", nesting) | ||
1840 | nesting += 1 | ||
1841 | text += tabulate("var element = " + the_class_name + ".new()\n", nesting) | ||
1842 | text += tabulate(varname + ".value.append(element)\n", nesting) | ||
1843 | text += tabulate("return element\n", nesting) | ||
1844 | else: | ||
1845 | text += tabulate(varname + ".value = " + default_dict_text() + "[" + generate_field_type(f) + "]\n", nesting) | ||
1846 | nesting -= 1 | ||
1847 | text += tabulate("func new_" + f.name + "() -> " + the_class_name + ":\n", nesting) | ||
1848 | nesting += 1 | ||
1849 | text += generate_group_clear(field_index, nesting) | ||
1850 | text += tabulate(varname + ".value = " + the_class_name + ".new()\n", nesting) | ||
1851 | text += tabulate("return " + varname + ".value\n", nesting) | ||
1852 | elif f.field_type == Analysis.FIELD_TYPE.MAP: | ||
1853 | var the_parent_class_name : String = class_table[f.type_class_id].parent_name | ||
1854 | the_parent_class_name = the_parent_class_name.substr(1, the_parent_class_name.length() - 1) | ||
1855 | var the_class_name : String = the_parent_class_name + "." + class_table[f.type_class_id].name | ||
1856 | |||
1857 | text += generate_has_oneof(field_index, nesting) | ||
1858 | text += tabulate("func get_raw_" + f.name + "():\n", nesting) | ||
1859 | nesting += 1 | ||
1860 | text += tabulate("return " + varname + ".value\n", nesting) | ||
1861 | nesting -= 1 | ||
1862 | text += tabulate("func get_" + f.name + "():\n", nesting) | ||
1863 | nesting += 1 | ||
1864 | text += tabulate("return PBPacker.construct_map(" + varname + ".value)\n", nesting) | ||
1865 | nesting -= 1 | ||
1866 | text += tabulate("func clear_" + f.name + "():\n", nesting) | ||
1867 | nesting += 1 | ||
1868 | text += tabulate("data[" + str(f.tag) + "].state = PB_SERVICE_STATE.UNFILLED\n", nesting) | ||
1869 | text += tabulate(varname + ".value = " + default_dict_text() + "[" + generate_field_type(f) + "]\n", nesting) | ||
1870 | nesting -= 1 | ||
1871 | for i in range(field_table.size()): | ||
1872 | if field_table[i].parent_class_id == f.type_class_id && field_table[i].name == "value": | ||
1873 | var gd_type : String = generate_gdscript_simple_type(field_table[i]) | ||
1874 | var return_type : String = " -> " + the_class_name | ||
1875 | var value_return_type : String = "" | ||
1876 | if gd_type != "": | ||
1877 | value_return_type = return_type | ||
1878 | elif field_table[i].field_type == Analysis.FIELD_TYPE.MESSAGE: | ||
1879 | value_return_type = " -> " + the_parent_class_name + "." + field_table[i].type_name | ||
1880 | text += tabulate("func add_empty_" + f.name + "()" + return_type + ":\n", nesting) | ||
1881 | nesting += 1 | ||
1882 | text += generate_group_clear(field_index, nesting) | ||
1883 | text += tabulate("var element = " + the_class_name + ".new()\n", nesting) | ||
1884 | text += tabulate(varname + ".value.append(element)\n", nesting) | ||
1885 | text += tabulate("return element\n", nesting) | ||
1886 | nesting -= 1 | ||
1887 | if field_table[i].field_type == Analysis.FIELD_TYPE.MESSAGE: | ||
1888 | text += tabulate("func add_" + f.name + "(a_key)" + value_return_type + ":\n", nesting) | ||
1889 | nesting += 1 | ||
1890 | text += generate_group_clear(field_index, nesting) | ||
1891 | text += tabulate("var idx = -1\n", nesting) | ||
1892 | text += tabulate("for i in range(" + varname + ".value.size()):\n", nesting) | ||
1893 | nesting += 1 | ||
1894 | text += tabulate("if " + varname + ".value[i].get_key() == a_key:\n", nesting) | ||
1895 | nesting += 1 | ||
1896 | text += tabulate("idx = i\n", nesting) | ||
1897 | text += tabulate("break\n", nesting) | ||
1898 | nesting -= 2 | ||
1899 | text += tabulate("var element = " + the_class_name + ".new()\n", nesting) | ||
1900 | text += tabulate("element.set_key(a_key)\n", nesting) | ||
1901 | text += tabulate("if idx != -1:\n", nesting) | ||
1902 | nesting += 1 | ||
1903 | text += tabulate(varname + ".value[idx] = element\n", nesting) | ||
1904 | nesting -= 1 | ||
1905 | text += tabulate("else:\n", nesting) | ||
1906 | nesting += 1 | ||
1907 | text += tabulate(varname + ".value.append(element)\n", nesting) | ||
1908 | nesting -= 1 | ||
1909 | text += tabulate("return element.new_value()\n", nesting) | ||
1910 | else: | ||
1911 | text += tabulate("func add_" + f.name + "(a_key, a_value) -> void:\n", nesting) | ||
1912 | nesting += 1 | ||
1913 | text += generate_group_clear(field_index, nesting) | ||
1914 | text += tabulate("var idx = -1\n", nesting) | ||
1915 | text += tabulate("for i in range(" + varname + ".value.size()):\n", nesting) | ||
1916 | nesting += 1 | ||
1917 | text += tabulate("if " + varname + ".value[i].get_key() == a_key:\n", nesting) | ||
1918 | nesting += 1 | ||
1919 | text += tabulate("idx = i\n", nesting) | ||
1920 | text += tabulate("break\n", nesting) | ||
1921 | nesting -= 2 | ||
1922 | text += tabulate("var element = " + the_class_name + ".new()\n", nesting) | ||
1923 | text += tabulate("element.set_key(a_key)\n", nesting) | ||
1924 | text += tabulate("element.set_value(a_value)\n", nesting) | ||
1925 | text += tabulate("if idx != -1:\n", nesting) | ||
1926 | nesting += 1 | ||
1927 | text += tabulate(varname + ".value[idx] = element\n", nesting) | ||
1928 | nesting -= 1 | ||
1929 | text += tabulate("else:\n", nesting) | ||
1930 | nesting += 1 | ||
1931 | text += tabulate(varname + ".value.append(element)\n", nesting) | ||
1932 | nesting -= 1 | ||
1933 | break | ||
1934 | else: | ||
1935 | var gd_type : String = generate_gdscript_simple_type(f) | ||
1936 | var return_type : String = "" | ||
1937 | var argument_type : String = "" | ||
1938 | if gd_type != "": | ||
1939 | return_type = " -> " + gd_type | ||
1940 | argument_type = " : " + gd_type | ||
1941 | if f.qualificator != Analysis.FIELD_QUALIFICATOR.OPTIONAL: | ||
1942 | text += generate_has_oneof(field_index, nesting) | ||
1943 | if f.qualificator == Analysis.FIELD_QUALIFICATOR.REPEATED: | ||
1944 | var array_type := "[" + gd_type + "]" if gd_type else "" | ||
1945 | text += tabulate("func get_" + f.name + "() -> Array" + array_type + ":\n", nesting) | ||
1946 | else: | ||
1947 | if f.qualificator == Analysis.FIELD_QUALIFICATOR.OPTIONAL: | ||
1948 | text += tabulate("func has_" + f.name + "() -> bool:\n", nesting) | ||
1949 | nesting += 1 | ||
1950 | text += tabulate("if " + varname + ".value != null:\n", nesting) | ||
1951 | nesting += 1 | ||
1952 | text += tabulate("return true\n", nesting) | ||
1953 | nesting -= 1 | ||
1954 | text += tabulate("return false\n", nesting) | ||
1955 | nesting -= 1 | ||
1956 | text += tabulate("func get_" + f.name + "()" + return_type + ":\n", nesting) | ||
1957 | nesting += 1 | ||
1958 | text += tabulate("return " + varname + ".value\n", nesting) | ||
1959 | nesting -= 1 | ||
1960 | text += tabulate("func clear_" + f.name + "() -> void:\n", nesting) | ||
1961 | nesting += 1 | ||
1962 | text += tabulate("data[" + str(f.tag) + "].state = PB_SERVICE_STATE.UNFILLED\n", nesting) | ||
1963 | if f.qualificator == Analysis.FIELD_QUALIFICATOR.REPEATED: | ||
1964 | text += tabulate(varname + ".value.clear()\n", nesting) | ||
1965 | nesting -= 1 | ||
1966 | text += tabulate("func add_" + f.name + "(value" + argument_type + ") -> void:\n", nesting) | ||
1967 | nesting += 1 | ||
1968 | text += tabulate(varname + ".value.append(value)\n", nesting) | ||
1969 | else: | ||
1970 | text += tabulate(varname + ".value = " + default_dict_text() + "[" + generate_field_type(f) + "]\n", nesting) | ||
1971 | nesting -= 1 | ||
1972 | text += tabulate("func set_" + f.name + "(value" + argument_type + ") -> void:\n", nesting) | ||
1973 | nesting += 1 | ||
1974 | text += generate_group_clear(field_index, nesting) | ||
1975 | text += tabulate(varname + ".value = value\n", nesting) | ||
1976 | return text | ||
1977 | |||
1978 | func generate_class(class_index : int, nesting : int) -> String: | ||
1979 | var text : String = "" | ||
1980 | if class_table[class_index].type == Analysis.CLASS_TYPE.MESSAGE || class_table[class_index].type == Analysis.CLASS_TYPE.MAP: | ||
1981 | var cls_pref : String = "" | ||
1982 | cls_pref += tabulate("class " + class_table[class_index].name + ":\n", nesting) | ||
1983 | nesting += 1 | ||
1984 | cls_pref += tabulate("func _init():\n", nesting) | ||
1985 | text += cls_pref | ||
1986 | nesting += 1 | ||
1987 | text += tabulate("var service\n", nesting) | ||
1988 | text += tabulate("\n", nesting) | ||
1989 | var field_text : String = "" | ||
1990 | for i in range(field_table.size()): | ||
1991 | if field_table[i].parent_class_id == class_index: | ||
1992 | text += generate_field_constructor(i, nesting) | ||
1993 | text += tabulate("\n", nesting) | ||
1994 | field_text += generate_field(i, nesting - 1) | ||
1995 | field_text += tabulate("\n", nesting - 1) | ||
1996 | nesting -= 1 | ||
1997 | text += tabulate("var data = {}\n", nesting) | ||
1998 | text += tabulate("\n", nesting) | ||
1999 | text += field_text | ||
2000 | for j in range(class_table.size()): | ||
2001 | if class_table[j].parent_index == class_index: | ||
2002 | var cl_text = generate_class(j, nesting) | ||
2003 | text += cl_text | ||
2004 | if class_table[j].type == Analysis.CLASS_TYPE.MESSAGE || class_table[j].type == Analysis.CLASS_TYPE.MAP: | ||
2005 | text += generate_class_services(nesting + 1) | ||
2006 | text += tabulate("\n", nesting + 1) | ||
2007 | elif class_table[class_index].type == Analysis.CLASS_TYPE.ENUM: | ||
2008 | text += tabulate("enum " + class_table[class_index].name + " {\n", nesting) | ||
2009 | nesting += 1 | ||
2010 | |||
2011 | var expected_prefix = class_table[class_index].name.to_snake_case().to_upper() + "_" | ||
2012 | var all_have_prefix = true | ||
2013 | for en in range(class_table[class_index].values.size()): | ||
2014 | var value_name = class_table[class_index].values[en].name | ||
2015 | all_have_prefix = all_have_prefix and value_name.begins_with(expected_prefix) and value_name != expected_prefix | ||
2016 | |||
2017 | for en in range(class_table[class_index].values.size()): | ||
2018 | var value_name = class_table[class_index].values[en].name | ||
2019 | if all_have_prefix: | ||
2020 | value_name = value_name.substr(expected_prefix.length()) | ||
2021 | var enum_val = value_name + " = " + class_table[class_index].values[en].value | ||
2022 | if en == class_table[class_index].values.size() - 1: | ||
2023 | text += tabulate(enum_val + "\n", nesting) | ||
2024 | else: | ||
2025 | text += tabulate(enum_val + ",\n", nesting) | ||
2026 | nesting -= 1 | ||
2027 | text += tabulate("}\n", nesting) | ||
2028 | text += tabulate("\n", nesting) | ||
2029 | |||
2030 | return text | ||
2031 | |||
2032 | func generate_class_services(nesting : int) -> String: | ||
2033 | var text : String = "" | ||
2034 | text += tabulate("func _to_string() -> String:\n", nesting) | ||
2035 | nesting += 1 | ||
2036 | text += tabulate("return PBPacker.message_to_string(data)\n", nesting) | ||
2037 | text += tabulate("\n", nesting) | ||
2038 | nesting -= 1 | ||
2039 | text += tabulate("func to_bytes() -> PackedByteArray:\n", nesting) | ||
2040 | nesting += 1 | ||
2041 | text += tabulate("return PBPacker.pack_message(data)\n", nesting) | ||
2042 | text += tabulate("\n", nesting) | ||
2043 | nesting -= 1 | ||
2044 | text += tabulate("func from_bytes(bytes : PackedByteArray, offset : int = 0, limit : int = -1) -> int:\n", nesting) | ||
2045 | nesting += 1 | ||
2046 | text += tabulate("var cur_limit = bytes.size()\n", nesting) | ||
2047 | text += tabulate("if limit != -1:\n", nesting) | ||
2048 | nesting += 1 | ||
2049 | text += tabulate("cur_limit = limit\n", nesting) | ||
2050 | nesting -= 1 | ||
2051 | text += tabulate("var result = PBPacker.unpack_message(data, bytes, offset, cur_limit)\n", nesting) | ||
2052 | text += tabulate("if result == cur_limit:\n", nesting) | ||
2053 | nesting += 1 | ||
2054 | text += tabulate("if PBPacker.check_required(data):\n", nesting) | ||
2055 | nesting += 1 | ||
2056 | text += tabulate("if limit == -1:\n", nesting) | ||
2057 | nesting += 1 | ||
2058 | text += tabulate("return PB_ERR.NO_ERRORS\n", nesting) | ||
2059 | nesting -= 2 | ||
2060 | text += tabulate("else:\n", nesting) | ||
2061 | nesting += 1 | ||
2062 | text += tabulate("return PB_ERR.REQUIRED_FIELDS\n", nesting) | ||
2063 | nesting -= 2 | ||
2064 | text += tabulate("elif limit == -1 && result > 0:\n", nesting) | ||
2065 | nesting += 1 | ||
2066 | text += tabulate("return PB_ERR.PARSE_INCOMPLETE\n", nesting) | ||
2067 | nesting -= 1 | ||
2068 | text += tabulate("return result\n", nesting) | ||
2069 | return text | ||
2070 | |||
2071 | func translate(file_name : String, core_file_name : String) -> bool: | ||
2072 | |||
2073 | var file : FileAccess = FileAccess.open(file_name, FileAccess.WRITE) | ||
2074 | if file == null: | ||
2075 | printerr("File: '", file_name, "' save error.") | ||
2076 | return false | ||
2077 | |||
2078 | if !FileAccess.file_exists(core_file_name): | ||
2079 | printerr("File: '", core_file_name, "' not found.") | ||
2080 | return false | ||
2081 | |||
2082 | var core_file : FileAccess = FileAccess.open(core_file_name, FileAccess.READ) | ||
2083 | if core_file == null: | ||
2084 | printerr("File: '", core_file_name, "' read error.") | ||
2085 | return false | ||
2086 | var core_text : String = core_file.get_as_text() | ||
2087 | core_file.close() | ||
2088 | |||
2089 | var text : String = "" | ||
2090 | var nesting : int = 0 | ||
2091 | core_text = core_text.replace(PROTO_VERSION_DEFAULT, PROTO_VERSION_CONST + str(proto_version)) | ||
2092 | text += core_text + "\n\n\n" | ||
2093 | text += "############### USER DATA BEGIN ################\n" | ||
2094 | var cls_user : String = "" | ||
2095 | for i in range(class_table.size()): | ||
2096 | if class_table[i].parent_index == -1: | ||
2097 | var cls_text = generate_class(i, nesting) | ||
2098 | cls_user += cls_text | ||
2099 | if class_table[i].type == Analysis.CLASS_TYPE.MESSAGE: | ||
2100 | nesting += 1 | ||
2101 | cls_user += generate_class_services(nesting) | ||
2102 | cls_user += tabulate("\n", nesting) | ||
2103 | nesting -= 1 | ||
2104 | text += "\n\n" | ||
2105 | text += cls_user | ||
2106 | text += "################ USER DATA END #################\n" | ||
2107 | file.store_string(text) | ||
2108 | file.close() | ||
2109 | if !FileAccess.file_exists(file_name): | ||
2110 | printerr("File: '", file_name, "' save error.") | ||
2111 | return false | ||
2112 | return true | ||
2113 | |||
2114 | |||
2115 | class ImportFile: | ||
2116 | func _init(sha : String, a_path : String, a_parent : int): | ||
2117 | sha256 = sha | ||
2118 | path = a_path | ||
2119 | parent_index = a_parent | ||
2120 | |||
2121 | var sha256 : String | ||
2122 | var path : String | ||
2123 | var parent_index : int | ||
2124 | |||
2125 | func parse_all(analyzes : Dictionary, imports : Array, path : String, full_name : String, parent_index : int) -> bool: | ||
2126 | |||
2127 | if !FileAccess.file_exists(full_name): | ||
2128 | printerr(full_name, ": not found.") | ||
2129 | return false | ||
2130 | |||
2131 | var file : FileAccess = FileAccess.open(full_name, FileAccess.READ) | ||
2132 | if file == null: | ||
2133 | printerr(full_name, ": read error.") | ||
2134 | return false | ||
2135 | var doc : Document = Document.new(full_name, file.get_as_text()) | ||
2136 | var sha : String = file.get_sha256(full_name) | ||
2137 | file.close() | ||
2138 | |||
2139 | if !analyzes.has(sha): | ||
2140 | print(full_name, ": parsing.") | ||
2141 | var analysis : Analysis = Analysis.new(path, doc) | ||
2142 | var an_result : AnalyzeResult = analysis.analyze() | ||
2143 | if an_result.state: | ||
2144 | analyzes[sha] = an_result | ||
2145 | var parent : int = imports.size() | ||
2146 | imports.append(ImportFile.new(sha, doc.name, parent_index)) | ||
2147 | for im in an_result.imports: | ||
2148 | if !parse_all(analyzes, imports, path, im.path, parent): | ||
2149 | return false | ||
2150 | else: | ||
2151 | printerr(doc.name + ": parsing error.") | ||
2152 | return false | ||
2153 | else: | ||
2154 | print(full_name, ": retrieving data from cache.") | ||
2155 | imports.append(ImportFile.new(sha, doc.name, parent_index)) | ||
2156 | return true | ||
2157 | |||
2158 | func union_analyses(a1 : AnalyzeResult, a2 : AnalyzeResult, only_classes : bool = true) -> void: | ||
2159 | var class_offset : int = a1.classes.size() | ||
2160 | var field_offset = a1.fields.size() | ||
2161 | for cl in a2.classes: | ||
2162 | var cur_class : Analysis.ASTClass = cl.copy() | ||
2163 | if cur_class.parent_index != -1: | ||
2164 | cur_class.parent_index += class_offset | ||
2165 | a1.classes.append(cur_class) | ||
2166 | if only_classes: | ||
2167 | return | ||
2168 | for fl in a2.fields: | ||
2169 | var cur_field : Analysis.ASTField = fl.copy() | ||
2170 | cur_field.parent_class_id += class_offset | ||
2171 | cur_field.type_class_id = -1 | ||
2172 | a1.fields.append(cur_field) | ||
2173 | for gr in a2.groups: | ||
2174 | var cur_group : Analysis.ASTFieldGroup = gr.copy() | ||
2175 | cur_group.parent_class_id += class_offset | ||
2176 | var indexes : Array = [] | ||
2177 | for i in cur_group.field_indexes: | ||
2178 | indexes.append(i + field_offset) | ||
2179 | cur_group.field_indexes = indexes | ||
2180 | a1.groups.append(cur_group) | ||
2181 | |||
2182 | func union_imports(analyzes : Dictionary, key : String, result : AnalyzeResult, keys : Array, nesting : int, use_public : bool = true, only_classes : bool = true) -> void: | ||
2183 | nesting += 1 | ||
2184 | for im in analyzes[key].imports: | ||
2185 | var find : bool = false | ||
2186 | for k in keys: | ||
2187 | if im.sha256 == k: | ||
2188 | find = true | ||
2189 | break | ||
2190 | if find: | ||
2191 | continue | ||
2192 | if (!use_public) || (use_public && ((im.public && nesting > 1) || nesting < 2)): | ||
2193 | keys.append(im.sha256) | ||
2194 | union_analyses(result, analyzes[im.sha256], only_classes) | ||
2195 | union_imports(analyzes, im.sha256, result, keys, nesting, use_public, only_classes) | ||
2196 | |||
2197 | func semantic_all(analyzes : Dictionary, imports : Array)-> bool: | ||
2198 | for k in analyzes.keys(): | ||
2199 | print(analyzes[k].doc.name, ": analysis.") | ||
2200 | var keys : Array = [] | ||
2201 | var analyze : AnalyzeResult = analyzes[k].soft_copy() | ||
2202 | keys.append(k) | ||
2203 | analyze.classes = [] | ||
2204 | for cl in analyzes[k].classes: | ||
2205 | analyze.classes.append(cl.copy()) | ||
2206 | union_imports(analyzes, k, analyze, keys, 0) | ||
2207 | var semantic : Semantic = Semantic.new(analyze) | ||
2208 | if !semantic.check(): | ||
2209 | printerr(analyzes[k].doc.name, ": analysis error.") | ||
2210 | return false | ||
2211 | return true | ||
2212 | |||
2213 | func translate_all(analyzes : Dictionary, file_name : String, core_file_name : String) -> bool: | ||
2214 | var first_key : String = analyzes.keys()[0] | ||
2215 | var analyze : AnalyzeResult = analyzes[first_key] | ||
2216 | var keys : Array = [] | ||
2217 | keys.append(first_key) | ||
2218 | union_imports(analyzes, first_key, analyze, keys, 0, false, false) | ||
2219 | print("Performing full semantic analysis.") | ||
2220 | var semantic : Semantic = Semantic.new(analyze) | ||
2221 | if !semantic.check(): | ||
2222 | return false | ||
2223 | print("Performing translation.") | ||
2224 | var translator : Translator = Translator.new(analyze) | ||
2225 | if !translator.translate(file_name, core_file_name): | ||
2226 | return false | ||
2227 | var first : bool = true | ||
2228 | return true | ||
2229 | |||
2230 | func work(path : String, in_file : String, out_file : String, core_file : String) -> bool: | ||
2231 | var in_full_name : String = path + in_file | ||
2232 | var imports : Array = [] | ||
2233 | var analyzes : Dictionary = {} | ||
2234 | |||
2235 | print("Compiling source: '", in_full_name, "', output: '", out_file, "'.") | ||
2236 | print("\n1. Parsing:") | ||
2237 | if parse_all(analyzes, imports, path, in_full_name, -1): | ||
2238 | print("* Parsing completed successfully. *") | ||
2239 | else: | ||
2240 | return false | ||
2241 | print("\n2. Perfoming semantic analysis:") | ||
2242 | if semantic_all(analyzes, imports): | ||
2243 | print("* Semantic analysis completed successfully. *") | ||
2244 | else: | ||
2245 | return false | ||
2246 | print("\n3. Output file creating:") | ||
2247 | if translate_all(analyzes, out_file, core_file): | ||
2248 | print("* Output file was created successfully. *") | ||
2249 | else: | ||
2250 | return false | ||
2251 | return true | ||
2252 | |||
2253 | func _ready(): | ||
2254 | pass | ||
diff --git a/vendor/godobuf/addons/protobuf/plugin.cfg b/vendor/godobuf/addons/protobuf/plugin.cfg new file mode 100644 index 0000000..6456a11 --- /dev/null +++ b/vendor/godobuf/addons/protobuf/plugin.cfg | |||
@@ -0,0 +1,7 @@ | |||
1 | [plugin] | ||
2 | |||
3 | name="Godobuf" | ||
4 | description="Google Protobuf implementation for Godot/GDScript" | ||
5 | author="oniksan" | ||
6 | version="0.6.1 for Godot 4.x.y" | ||
7 | script="protobuf_ui.gd" | ||
diff --git a/vendor/godobuf/addons/protobuf/protobuf_cmdln.gd b/vendor/godobuf/addons/protobuf/protobuf_cmdln.gd new file mode 100644 index 0000000..97d7ba4 --- /dev/null +++ b/vendor/godobuf/addons/protobuf/protobuf_cmdln.gd | |||
@@ -0,0 +1,66 @@ | |||
1 | # | ||
2 | # BSD 3-Clause License | ||
3 | # | ||
4 | # Copyright (c) 2018, Oleg Malyavkin | ||
5 | # All rights reserved. | ||
6 | # | ||
7 | # Redistribution and use in source and binary forms, with or without | ||
8 | # modification, are permitted provided that the following conditions are met: | ||
9 | # | ||
10 | # * Redistributions of source code must retain the above copyright notice, this | ||
11 | # list of conditions and the following disclaimer. | ||
12 | # | ||
13 | # * Redistributions in binary form must reproduce the above copyright notice, | ||
14 | # this list of conditions and the following disclaimer in the documentation | ||
15 | # and/or other materials provided with the distribution. | ||
16 | # | ||
17 | # * Neither the name of the copyright holder nor the names of its | ||
18 | # contributors may be used to endorse or promote products derived from | ||
19 | # this software without specific prior written permission. | ||
20 | # | ||
21 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" | ||
22 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | ||
23 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | ||
24 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE | ||
25 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL | ||
26 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR | ||
27 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER | ||
28 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, | ||
29 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
30 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||
31 | |||
32 | extends SceneTree | ||
33 | |||
34 | var Parser = preload("res://addons/protobuf/parser.gd") | ||
35 | var Util = preload("res://addons/protobuf/protobuf_util.gd") | ||
36 | |||
37 | func error(msg : String): | ||
38 | push_error(msg) | ||
39 | quit() | ||
40 | |||
41 | func _init(): | ||
42 | var arguments = {} | ||
43 | for argument in OS.get_cmdline_args(): | ||
44 | if argument.find("=") > -1: | ||
45 | var key_value = argument.split("=") | ||
46 | arguments[key_value[0].lstrip("--")] = key_value[1] | ||
47 | |||
48 | if !arguments.has("input") || !arguments.has("output"): | ||
49 | error("Expected 2 Parameters: input and output") | ||
50 | |||
51 | var input_file_name = arguments["input"] | ||
52 | var output_file_name = arguments["output"] | ||
53 | |||
54 | var file = FileAccess.open(input_file_name, FileAccess.READ) | ||
55 | if file == null: | ||
56 | error("File: '" + input_file_name + "' not found.") | ||
57 | |||
58 | var parser = Parser.new() | ||
59 | |||
60 | if parser.work(Util.extract_dir(input_file_name), Util.extract_filename(input_file_name), \ | ||
61 | output_file_name, "res://addons/protobuf/protobuf_core.gd"): | ||
62 | print("Compiled '", input_file_name, "' to '", output_file_name, "'.") | ||
63 | else: | ||
64 | error("Compilation failed.") | ||
65 | |||
66 | quit() | ||
diff --git a/vendor/godobuf/addons/protobuf/protobuf_core.gd b/vendor/godobuf/addons/protobuf/protobuf_core.gd new file mode 100644 index 0000000..7098413 --- /dev/null +++ b/vendor/godobuf/addons/protobuf/protobuf_core.gd | |||
@@ -0,0 +1,668 @@ | |||
1 | # | ||
2 | # BSD 3-Clause License | ||
3 | # | ||
4 | # Copyright (c) 2018 - 2023, Oleg Malyavkin | ||
5 | # All rights reserved. | ||
6 | # | ||
7 | # Redistribution and use in source and binary forms, with or without | ||
8 | # modification, are permitted provided that the following conditions are met: | ||
9 | # | ||
10 | # * Redistributions of source code must retain the above copyright notice, this | ||
11 | # list of conditions and the following disclaimer. | ||
12 | # | ||
13 | # * Redistributions in binary form must reproduce the above copyright notice, | ||
14 | # this list of conditions and the following disclaimer in the documentation | ||
15 | # and/or other materials provided with the distribution. | ||
16 | # | ||
17 | # * Neither the name of the copyright holder nor the names of its | ||
18 | # contributors may be used to endorse or promote products derived from | ||
19 | # this software without specific prior written permission. | ||
20 | # | ||
21 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" | ||
22 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | ||
23 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | ||
24 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE | ||
25 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL | ||
26 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR | ||
27 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER | ||
28 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, | ||
29 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
30 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||
31 | |||
32 | # DEBUG_TAB redefine this " " if you need, example: const DEBUG_TAB = "\t" | ||
33 | |||
34 | const PROTO_VERSION = 0 | ||
35 | |||
36 | const DEBUG_TAB : String = " " | ||
37 | |||
38 | enum PB_ERR { | ||
39 | NO_ERRORS = 0, | ||
40 | VARINT_NOT_FOUND = -1, | ||
41 | REPEATED_COUNT_NOT_FOUND = -2, | ||
42 | REPEATED_COUNT_MISMATCH = -3, | ||
43 | LENGTHDEL_SIZE_NOT_FOUND = -4, | ||
44 | LENGTHDEL_SIZE_MISMATCH = -5, | ||
45 | PACKAGE_SIZE_MISMATCH = -6, | ||
46 | UNDEFINED_STATE = -7, | ||
47 | PARSE_INCOMPLETE = -8, | ||
48 | REQUIRED_FIELDS = -9 | ||
49 | } | ||
50 | |||
51 | enum PB_DATA_TYPE { | ||
52 | INT32 = 0, | ||
53 | SINT32 = 1, | ||
54 | UINT32 = 2, | ||
55 | INT64 = 3, | ||
56 | SINT64 = 4, | ||
57 | UINT64 = 5, | ||
58 | BOOL = 6, | ||
59 | ENUM = 7, | ||
60 | FIXED32 = 8, | ||
61 | SFIXED32 = 9, | ||
62 | FLOAT = 10, | ||
63 | FIXED64 = 11, | ||
64 | SFIXED64 = 12, | ||
65 | DOUBLE = 13, | ||
66 | STRING = 14, | ||
67 | BYTES = 15, | ||
68 | MESSAGE = 16, | ||
69 | MAP = 17 | ||
70 | } | ||
71 | |||
72 | const DEFAULT_VALUES_2 = { | ||
73 | PB_DATA_TYPE.INT32: null, | ||
74 | PB_DATA_TYPE.SINT32: null, | ||
75 | PB_DATA_TYPE.UINT32: null, | ||
76 | PB_DATA_TYPE.INT64: null, | ||
77 | PB_DATA_TYPE.SINT64: null, | ||
78 | PB_DATA_TYPE.UINT64: null, | ||
79 | PB_DATA_TYPE.BOOL: null, | ||
80 | PB_DATA_TYPE.ENUM: null, | ||
81 | PB_DATA_TYPE.FIXED32: null, | ||
82 | PB_DATA_TYPE.SFIXED32: null, | ||
83 | PB_DATA_TYPE.FLOAT: null, | ||
84 | PB_DATA_TYPE.FIXED64: null, | ||
85 | PB_DATA_TYPE.SFIXED64: null, | ||
86 | PB_DATA_TYPE.DOUBLE: null, | ||
87 | PB_DATA_TYPE.STRING: null, | ||
88 | PB_DATA_TYPE.BYTES: null, | ||
89 | PB_DATA_TYPE.MESSAGE: null, | ||
90 | PB_DATA_TYPE.MAP: null | ||
91 | } | ||
92 | |||
93 | const DEFAULT_VALUES_3 = { | ||
94 | PB_DATA_TYPE.INT32: 0, | ||
95 | PB_DATA_TYPE.SINT32: 0, | ||
96 | PB_DATA_TYPE.UINT32: 0, | ||
97 | PB_DATA_TYPE.INT64: 0, | ||
98 | PB_DATA_TYPE.SINT64: 0, | ||
99 | PB_DATA_TYPE.UINT64: 0, | ||
100 | PB_DATA_TYPE.BOOL: false, | ||
101 | PB_DATA_TYPE.ENUM: 0, | ||
102 | PB_DATA_TYPE.FIXED32: 0, | ||
103 | PB_DATA_TYPE.SFIXED32: 0, | ||
104 | PB_DATA_TYPE.FLOAT: 0.0, | ||
105 | PB_DATA_TYPE.FIXED64: 0, | ||
106 | PB_DATA_TYPE.SFIXED64: 0, | ||
107 | PB_DATA_TYPE.DOUBLE: 0.0, | ||
108 | PB_DATA_TYPE.STRING: "", | ||
109 | PB_DATA_TYPE.BYTES: [], | ||
110 | PB_DATA_TYPE.MESSAGE: null, | ||
111 | PB_DATA_TYPE.MAP: [] | ||
112 | } | ||
113 | |||
114 | enum PB_TYPE { | ||
115 | VARINT = 0, | ||
116 | FIX64 = 1, | ||
117 | LENGTHDEL = 2, | ||
118 | STARTGROUP = 3, | ||
119 | ENDGROUP = 4, | ||
120 | FIX32 = 5, | ||
121 | UNDEFINED = 8 | ||
122 | } | ||
123 | |||
124 | enum PB_RULE { | ||
125 | OPTIONAL = 0, | ||
126 | REQUIRED = 1, | ||
127 | REPEATED = 2, | ||
128 | RESERVED = 3 | ||
129 | } | ||
130 | |||
131 | enum PB_SERVICE_STATE { | ||
132 | FILLED = 0, | ||
133 | UNFILLED = 1 | ||
134 | } | ||
135 | |||
136 | class PBField: | ||
137 | func _init(a_name : String, a_type : int, a_rule : int, a_tag : int, packed : bool, a_value = null): | ||
138 | name = a_name | ||
139 | type = a_type | ||
140 | rule = a_rule | ||
141 | tag = a_tag | ||
142 | option_packed = packed | ||
143 | value = a_value | ||
144 | |||
145 | var name : String | ||
146 | var type : int | ||
147 | var rule : int | ||
148 | var tag : int | ||
149 | var option_packed : bool | ||
150 | var value | ||
151 | var is_map_field : bool = false | ||
152 | var option_default : bool = false | ||
153 | |||
154 | class PBTypeTag: | ||
155 | var ok : bool = false | ||
156 | var type : int | ||
157 | var tag : int | ||
158 | var offset : int | ||
159 | |||
160 | class PBServiceField: | ||
161 | var field : PBField | ||
162 | var func_ref = null | ||
163 | var state : int = PB_SERVICE_STATE.UNFILLED | ||
164 | |||
165 | class PBPacker: | ||
166 | static func convert_signed(n : int) -> int: | ||
167 | if n < -2147483648: | ||
168 | return (n << 1) ^ (n >> 63) | ||
169 | else: | ||
170 | return (n << 1) ^ (n >> 31) | ||
171 | |||
172 | static func deconvert_signed(n : int) -> int: | ||
173 | if n & 0x01: | ||
174 | return ~(n >> 1) | ||
175 | else: | ||
176 | return (n >> 1) | ||
177 | |||
178 | static func pack_varint(value) -> PackedByteArray: | ||
179 | var varint : PackedByteArray = PackedByteArray() | ||
180 | if typeof(value) == TYPE_BOOL: | ||
181 | if value: | ||
182 | value = 1 | ||
183 | else: | ||
184 | value = 0 | ||
185 | for _i in range(9): | ||
186 | var b = value & 0x7F | ||
187 | value >>= 7 | ||
188 | if value: | ||
189 | varint.append(b | 0x80) | ||
190 | else: | ||
191 | varint.append(b) | ||
192 | break | ||
193 | if varint.size() == 9 && (varint[8] & 0x80 != 0): | ||
194 | varint.append(0x01) | ||
195 | return varint | ||
196 | |||
197 | static func pack_bytes(value, count : int, data_type : int) -> PackedByteArray: | ||
198 | var bytes : PackedByteArray = PackedByteArray() | ||
199 | if data_type == PB_DATA_TYPE.FLOAT: | ||
200 | var spb : StreamPeerBuffer = StreamPeerBuffer.new() | ||
201 | spb.put_float(value) | ||
202 | bytes = spb.get_data_array() | ||
203 | elif data_type == PB_DATA_TYPE.DOUBLE: | ||
204 | var spb : StreamPeerBuffer = StreamPeerBuffer.new() | ||
205 | spb.put_double(value) | ||
206 | bytes = spb.get_data_array() | ||
207 | else: | ||
208 | for _i in range(count): | ||
209 | bytes.append(value & 0xFF) | ||
210 | value >>= 8 | ||
211 | return bytes | ||
212 | |||
213 | static func unpack_bytes(bytes : PackedByteArray, index : int, count : int, data_type : int): | ||
214 | if data_type == PB_DATA_TYPE.FLOAT: | ||
215 | return bytes.decode_float(index) | ||
216 | elif data_type == PB_DATA_TYPE.DOUBLE: | ||
217 | return bytes.decode_double(index) | ||
218 | else: | ||
219 | # Convert to big endian | ||
220 | var slice: PackedByteArray = bytes.slice(index, index + count) | ||
221 | slice.reverse() | ||
222 | return slice | ||
223 | |||
224 | static func unpack_varint(varint_bytes) -> int: | ||
225 | var value : int = 0 | ||
226 | var i: int = varint_bytes.size() - 1 | ||
227 | while i > -1: | ||
228 | value = (value << 7) | (varint_bytes[i] & 0x7F) | ||
229 | i -= 1 | ||
230 | return value | ||
231 | |||
232 | static func pack_type_tag(type : int, tag : int) -> PackedByteArray: | ||
233 | return pack_varint((tag << 3) | type) | ||
234 | |||
235 | static func isolate_varint(bytes : PackedByteArray, index : int) -> PackedByteArray: | ||
236 | var i: int = index | ||
237 | while i <= index + 10: # Protobuf varint max size is 10 bytes | ||
238 | if !(bytes[i] & 0x80): | ||
239 | return bytes.slice(index, i + 1) | ||
240 | i += 1 | ||
241 | return [] # Unreachable | ||
242 | |||
243 | static func unpack_type_tag(bytes : PackedByteArray, index : int) -> PBTypeTag: | ||
244 | var varint_bytes : PackedByteArray = isolate_varint(bytes, index) | ||
245 | var result : PBTypeTag = PBTypeTag.new() | ||
246 | if varint_bytes.size() != 0: | ||
247 | result.ok = true | ||
248 | result.offset = varint_bytes.size() | ||
249 | var unpacked : int = unpack_varint(varint_bytes) | ||
250 | result.type = unpacked & 0x07 | ||
251 | result.tag = unpacked >> 3 | ||
252 | return result | ||
253 | |||
254 | static func pack_length_delimeted(type : int, tag : int, bytes : PackedByteArray) -> PackedByteArray: | ||
255 | var result : PackedByteArray = pack_type_tag(type, tag) | ||
256 | result.append_array(pack_varint(bytes.size())) | ||
257 | result.append_array(bytes) | ||
258 | return result | ||
259 | |||
260 | static func pb_type_from_data_type(data_type : int) -> int: | ||
261 | if data_type == PB_DATA_TYPE.INT32 || data_type == PB_DATA_TYPE.SINT32 || data_type == PB_DATA_TYPE.UINT32 || data_type == PB_DATA_TYPE.INT64 || data_type == PB_DATA_TYPE.SINT64 || data_type == PB_DATA_TYPE.UINT64 || data_type == PB_DATA_TYPE.BOOL || data_type == PB_DATA_TYPE.ENUM: | ||
262 | return PB_TYPE.VARINT | ||
263 | elif data_type == PB_DATA_TYPE.FIXED32 || data_type == PB_DATA_TYPE.SFIXED32 || data_type == PB_DATA_TYPE.FLOAT: | ||
264 | return PB_TYPE.FIX32 | ||
265 | elif data_type == PB_DATA_TYPE.FIXED64 || data_type == PB_DATA_TYPE.SFIXED64 || data_type == PB_DATA_TYPE.DOUBLE: | ||
266 | return PB_TYPE.FIX64 | ||
267 | elif data_type == PB_DATA_TYPE.STRING || data_type == PB_DATA_TYPE.BYTES || data_type == PB_DATA_TYPE.MESSAGE || data_type == PB_DATA_TYPE.MAP: | ||
268 | return PB_TYPE.LENGTHDEL | ||
269 | else: | ||
270 | return PB_TYPE.UNDEFINED | ||
271 | |||
272 | static func pack_field(field : PBField) -> PackedByteArray: | ||
273 | var type : int = pb_type_from_data_type(field.type) | ||
274 | var type_copy : int = type | ||
275 | if field.rule == PB_RULE.REPEATED && field.option_packed: | ||
276 | type = PB_TYPE.LENGTHDEL | ||
277 | var head : PackedByteArray = pack_type_tag(type, field.tag) | ||
278 | var data : PackedByteArray = PackedByteArray() | ||
279 | if type == PB_TYPE.VARINT: | ||
280 | var value | ||
281 | if field.rule == PB_RULE.REPEATED: | ||
282 | for v in field.value: | ||
283 | data.append_array(head) | ||
284 | if field.type == PB_DATA_TYPE.SINT32 || field.type == PB_DATA_TYPE.SINT64: | ||
285 | value = convert_signed(v) | ||
286 | else: | ||
287 | value = v | ||
288 | data.append_array(pack_varint(value)) | ||
289 | return data | ||
290 | else: | ||
291 | if field.type == PB_DATA_TYPE.SINT32 || field.type == PB_DATA_TYPE.SINT64: | ||
292 | value = convert_signed(field.value) | ||
293 | else: | ||
294 | value = field.value | ||
295 | data = pack_varint(value) | ||
296 | elif type == PB_TYPE.FIX32: | ||
297 | if field.rule == PB_RULE.REPEATED: | ||
298 | for v in field.value: | ||
299 | data.append_array(head) | ||
300 | data.append_array(pack_bytes(v, 4, field.type)) | ||
301 | return data | ||
302 | else: | ||
303 | data.append_array(pack_bytes(field.value, 4, field.type)) | ||
304 | elif type == PB_TYPE.FIX64: | ||
305 | if field.rule == PB_RULE.REPEATED: | ||
306 | for v in field.value: | ||
307 | data.append_array(head) | ||
308 | data.append_array(pack_bytes(v, 8, field.type)) | ||
309 | return data | ||
310 | else: | ||
311 | data.append_array(pack_bytes(field.value, 8, field.type)) | ||
312 | elif type == PB_TYPE.LENGTHDEL: | ||
313 | if field.rule == PB_RULE.REPEATED: | ||
314 | if type_copy == PB_TYPE.VARINT: | ||
315 | if field.type == PB_DATA_TYPE.SINT32 || field.type == PB_DATA_TYPE.SINT64: | ||
316 | var signed_value : int | ||
317 | for v in field.value: | ||
318 | signed_value = convert_signed(v) | ||
319 | data.append_array(pack_varint(signed_value)) | ||
320 | else: | ||
321 | for v in field.value: | ||
322 | data.append_array(pack_varint(v)) | ||
323 | return pack_length_delimeted(type, field.tag, data) | ||
324 | elif type_copy == PB_TYPE.FIX32: | ||
325 | for v in field.value: | ||
326 | data.append_array(pack_bytes(v, 4, field.type)) | ||
327 | return pack_length_delimeted(type, field.tag, data) | ||
328 | elif type_copy == PB_TYPE.FIX64: | ||
329 | for v in field.value: | ||
330 | data.append_array(pack_bytes(v, 8, field.type)) | ||
331 | return pack_length_delimeted(type, field.tag, data) | ||
332 | elif field.type == PB_DATA_TYPE.STRING: | ||
333 | for v in field.value: | ||
334 | var obj = v.to_utf8_buffer() | ||
335 | data.append_array(pack_length_delimeted(type, field.tag, obj)) | ||
336 | return data | ||
337 | elif field.type == PB_DATA_TYPE.BYTES: | ||
338 | for v in field.value: | ||
339 | data.append_array(pack_length_delimeted(type, field.tag, v)) | ||
340 | return data | ||
341 | elif typeof(field.value[0]) == TYPE_OBJECT: | ||
342 | for v in field.value: | ||
343 | var obj : PackedByteArray = v.to_bytes() | ||
344 | data.append_array(pack_length_delimeted(type, field.tag, obj)) | ||
345 | return data | ||
346 | else: | ||
347 | if field.type == PB_DATA_TYPE.STRING: | ||
348 | var str_bytes : PackedByteArray = field.value.to_utf8_buffer() | ||
349 | if PROTO_VERSION == 2 || (PROTO_VERSION == 3 && str_bytes.size() > 0): | ||
350 | data.append_array(str_bytes) | ||
351 | return pack_length_delimeted(type, field.tag, data) | ||
352 | if field.type == PB_DATA_TYPE.BYTES: | ||
353 | if PROTO_VERSION == 2 || (PROTO_VERSION == 3 && field.value.size() > 0): | ||
354 | data.append_array(field.value) | ||
355 | return pack_length_delimeted(type, field.tag, data) | ||
356 | elif typeof(field.value) == TYPE_OBJECT: | ||
357 | var obj : PackedByteArray = field.value.to_bytes() | ||
358 | if obj.size() > 0: | ||
359 | data.append_array(obj) | ||
360 | return pack_length_delimeted(type, field.tag, data) | ||
361 | else: | ||
362 | pass | ||
363 | if data.size() > 0: | ||
364 | head.append_array(data) | ||
365 | return head | ||
366 | else: | ||
367 | return data | ||
368 | |||
369 | static func skip_unknown_field(bytes : PackedByteArray, offset : int, type : int) -> int: | ||
370 | if type == PB_TYPE.VARINT: | ||
371 | return offset + isolate_varint(bytes, offset).size() | ||
372 | if type == PB_TYPE.FIX64: | ||
373 | return offset + 8 | ||
374 | if type == PB_TYPE.LENGTHDEL: | ||
375 | var length_bytes : PackedByteArray = isolate_varint(bytes, offset) | ||
376 | var length : int = unpack_varint(length_bytes) | ||
377 | return offset + length_bytes.size() + length | ||
378 | if type == PB_TYPE.FIX32: | ||
379 | return offset + 4 | ||
380 | return PB_ERR.UNDEFINED_STATE | ||
381 | |||
382 | static func unpack_field(bytes : PackedByteArray, offset : int, field : PBField, type : int, message_func_ref) -> int: | ||
383 | if field.rule == PB_RULE.REPEATED && type != PB_TYPE.LENGTHDEL && field.option_packed: | ||
384 | var count = isolate_varint(bytes, offset) | ||
385 | if count.size() > 0: | ||
386 | offset += count.size() | ||
387 | count = unpack_varint(count) | ||
388 | if type == PB_TYPE.VARINT: | ||
389 | var val | ||
390 | var counter = offset + count | ||
391 | while offset < counter: | ||
392 | val = isolate_varint(bytes, offset) | ||
393 | if val.size() > 0: | ||
394 | offset += val.size() | ||
395 | val = unpack_varint(val) | ||
396 | if field.type == PB_DATA_TYPE.SINT32 || field.type == PB_DATA_TYPE.SINT64: | ||
397 | val = deconvert_signed(val) | ||
398 | elif field.type == PB_DATA_TYPE.BOOL: | ||
399 | if val: | ||
400 | val = true | ||
401 | else: | ||
402 | val = false | ||
403 | field.value.append(val) | ||
404 | else: | ||
405 | return PB_ERR.REPEATED_COUNT_MISMATCH | ||
406 | return offset | ||
407 | elif type == PB_TYPE.FIX32 || type == PB_TYPE.FIX64: | ||
408 | var type_size | ||
409 | if type == PB_TYPE.FIX32: | ||
410 | type_size = 4 | ||
411 | else: | ||
412 | type_size = 8 | ||
413 | var val | ||
414 | var counter = offset + count | ||
415 | while offset < counter: | ||
416 | if (offset + type_size) > bytes.size(): | ||
417 | return PB_ERR.REPEATED_COUNT_MISMATCH | ||
418 | val = unpack_bytes(bytes, offset, type_size, field.type) | ||
419 | offset += type_size | ||
420 | field.value.append(val) | ||
421 | return offset | ||
422 | else: | ||
423 | return PB_ERR.REPEATED_COUNT_NOT_FOUND | ||
424 | else: | ||
425 | if type == PB_TYPE.VARINT: | ||
426 | var val = isolate_varint(bytes, offset) | ||
427 | if val.size() > 0: | ||
428 | offset += val.size() | ||
429 | val = unpack_varint(val) | ||
430 | if field.type == PB_DATA_TYPE.SINT32 || field.type == PB_DATA_TYPE.SINT64: | ||
431 | val = deconvert_signed(val) | ||
432 | elif field.type == PB_DATA_TYPE.BOOL: | ||
433 | if val: | ||
434 | val = true | ||
435 | else: | ||
436 | val = false | ||
437 | if field.rule == PB_RULE.REPEATED: | ||
438 | field.value.append(val) | ||
439 | else: | ||
440 | field.value = val | ||
441 | else: | ||
442 | return PB_ERR.VARINT_NOT_FOUND | ||
443 | return offset | ||
444 | elif type == PB_TYPE.FIX32 || type == PB_TYPE.FIX64: | ||
445 | var type_size | ||
446 | if type == PB_TYPE.FIX32: | ||
447 | type_size = 4 | ||
448 | else: | ||
449 | type_size = 8 | ||
450 | var val | ||
451 | if (offset + type_size) > bytes.size(): | ||
452 | return PB_ERR.REPEATED_COUNT_MISMATCH | ||
453 | val = unpack_bytes(bytes, offset, type_size, field.type) | ||
454 | offset += type_size | ||
455 | if field.rule == PB_RULE.REPEATED: | ||
456 | field.value.append(val) | ||
457 | else: | ||
458 | field.value = val | ||
459 | return offset | ||
460 | elif type == PB_TYPE.LENGTHDEL: | ||
461 | var inner_size = isolate_varint(bytes, offset) | ||
462 | if inner_size.size() > 0: | ||
463 | offset += inner_size.size() | ||
464 | inner_size = unpack_varint(inner_size) | ||
465 | if inner_size >= 0: | ||
466 | if inner_size + offset > bytes.size(): | ||
467 | return PB_ERR.LENGTHDEL_SIZE_MISMATCH | ||
468 | if message_func_ref != null: | ||
469 | var message = message_func_ref.call() | ||
470 | if inner_size > 0: | ||
471 | var sub_offset = message.from_bytes(bytes, offset, inner_size + offset) | ||
472 | if sub_offset > 0: | ||
473 | if sub_offset - offset >= inner_size: | ||
474 | offset = sub_offset | ||
475 | return offset | ||
476 | else: | ||
477 | return PB_ERR.LENGTHDEL_SIZE_MISMATCH | ||
478 | return sub_offset | ||
479 | else: | ||
480 | return offset | ||
481 | elif field.type == PB_DATA_TYPE.STRING: | ||
482 | var str_bytes : PackedByteArray = bytes.slice(offset, inner_size + offset) | ||
483 | if field.rule == PB_RULE.REPEATED: | ||
484 | field.value.append(str_bytes.get_string_from_utf8()) | ||
485 | else: | ||
486 | field.value = str_bytes.get_string_from_utf8() | ||
487 | return offset + inner_size | ||
488 | elif field.type == PB_DATA_TYPE.BYTES: | ||
489 | var val_bytes : PackedByteArray = bytes.slice(offset, inner_size + offset) | ||
490 | if field.rule == PB_RULE.REPEATED: | ||
491 | field.value.append(val_bytes) | ||
492 | else: | ||
493 | field.value = val_bytes | ||
494 | return offset + inner_size | ||
495 | else: | ||
496 | return PB_ERR.LENGTHDEL_SIZE_NOT_FOUND | ||
497 | else: | ||
498 | return PB_ERR.LENGTHDEL_SIZE_NOT_FOUND | ||
499 | return PB_ERR.UNDEFINED_STATE | ||
500 | |||
501 | static func unpack_message(data, bytes : PackedByteArray, offset : int, limit : int) -> int: | ||
502 | while true: | ||
503 | var tt : PBTypeTag = unpack_type_tag(bytes, offset) | ||
504 | if tt.ok: | ||
505 | offset += tt.offset | ||
506 | if data.has(tt.tag): | ||
507 | var service : PBServiceField = data[tt.tag] | ||
508 | var type : int = pb_type_from_data_type(service.field.type) | ||
509 | if type == tt.type || (tt.type == PB_TYPE.LENGTHDEL && service.field.rule == PB_RULE.REPEATED && service.field.option_packed): | ||
510 | var res : int = unpack_field(bytes, offset, service.field, type, service.func_ref) | ||
511 | if res > 0: | ||
512 | service.state = PB_SERVICE_STATE.FILLED | ||
513 | offset = res | ||
514 | if offset == limit: | ||
515 | return offset | ||
516 | elif offset > limit: | ||
517 | return PB_ERR.PACKAGE_SIZE_MISMATCH | ||
518 | elif res < 0: | ||
519 | return res | ||
520 | else: | ||
521 | break | ||
522 | else: | ||
523 | var res : int = skip_unknown_field(bytes, offset, tt.type) | ||
524 | if res > 0: | ||
525 | offset = res | ||
526 | if offset == limit: | ||
527 | return offset | ||
528 | elif offset > limit: | ||
529 | return PB_ERR.PACKAGE_SIZE_MISMATCH | ||
530 | elif res < 0: | ||
531 | return res | ||
532 | else: | ||
533 | break | ||
534 | else: | ||
535 | return offset | ||
536 | return PB_ERR.UNDEFINED_STATE | ||
537 | |||
538 | static func pack_message(data) -> PackedByteArray: | ||
539 | var DEFAULT_VALUES | ||
540 | if PROTO_VERSION == 2: | ||
541 | DEFAULT_VALUES = DEFAULT_VALUES_2 | ||
542 | elif PROTO_VERSION == 3: | ||
543 | DEFAULT_VALUES = DEFAULT_VALUES_3 | ||
544 | var result : PackedByteArray = PackedByteArray() | ||
545 | var keys : Array = data.keys() | ||
546 | keys.sort() | ||
547 | for i in keys: | ||
548 | if data[i].field.value != null: | ||
549 | if data[i].state == PB_SERVICE_STATE.UNFILLED \ | ||
550 | && !data[i].field.is_map_field \ | ||
551 | && typeof(data[i].field.value) == typeof(DEFAULT_VALUES[data[i].field.type]) \ | ||
552 | && data[i].field.value == DEFAULT_VALUES[data[i].field.type]: | ||
553 | continue | ||
554 | elif data[i].field.rule == PB_RULE.REPEATED && data[i].field.value.size() == 0: | ||
555 | continue | ||
556 | result.append_array(pack_field(data[i].field)) | ||
557 | elif data[i].field.rule == PB_RULE.REQUIRED: | ||
558 | print("Error: required field is not filled: Tag:", data[i].field.tag) | ||
559 | return PackedByteArray() | ||
560 | return result | ||
561 | |||
562 | static func check_required(data) -> bool: | ||
563 | var keys : Array = data.keys() | ||
564 | for i in keys: | ||
565 | if data[i].field.rule == PB_RULE.REQUIRED && data[i].state == PB_SERVICE_STATE.UNFILLED: | ||
566 | return false | ||
567 | return true | ||
568 | |||
569 | static func construct_map(key_values): | ||
570 | var result = {} | ||
571 | for kv in key_values: | ||
572 | result[kv.get_key()] = kv.get_value() | ||
573 | return result | ||
574 | |||
575 | static func tabulate(text : String, nesting : int) -> String: | ||
576 | var tab : String = "" | ||
577 | for _i in range(nesting): | ||
578 | tab += DEBUG_TAB | ||
579 | return tab + text | ||
580 | |||
581 | static func value_to_string(value, field : PBField, nesting : int) -> String: | ||
582 | var result : String = "" | ||
583 | var text : String | ||
584 | if field.type == PB_DATA_TYPE.MESSAGE: | ||
585 | result += "{" | ||
586 | nesting += 1 | ||
587 | text = message_to_string(value.data, nesting) | ||
588 | if text != "": | ||
589 | result += "\n" + text | ||
590 | nesting -= 1 | ||
591 | result += tabulate("}", nesting) | ||
592 | else: | ||
593 | nesting -= 1 | ||
594 | result += "}" | ||
595 | elif field.type == PB_DATA_TYPE.BYTES: | ||
596 | result += "<" | ||
597 | for i in range(value.size()): | ||
598 | result += str(value[i]) | ||
599 | if i != (value.size() - 1): | ||
600 | result += ", " | ||
601 | result += ">" | ||
602 | elif field.type == PB_DATA_TYPE.STRING: | ||
603 | result += "\"" + value + "\"" | ||
604 | elif field.type == PB_DATA_TYPE.ENUM: | ||
605 | result += "ENUM::" + str(value) | ||
606 | else: | ||
607 | result += str(value) | ||
608 | return result | ||
609 | |||
610 | static func field_to_string(field : PBField, nesting : int) -> String: | ||
611 | var result : String = tabulate(field.name + ": ", nesting) | ||
612 | if field.type == PB_DATA_TYPE.MAP: | ||
613 | if field.value.size() > 0: | ||
614 | result += "(\n" | ||
615 | nesting += 1 | ||
616 | for i in range(field.value.size()): | ||
617 | var local_key_value = field.value[i].data[1].field | ||
618 | result += tabulate(value_to_string(local_key_value.value, local_key_value, nesting), nesting) + ": " | ||
619 | local_key_value = field.value[i].data[2].field | ||
620 | result += value_to_string(local_key_value.value, local_key_value, nesting) | ||
621 | if i != (field.value.size() - 1): | ||
622 | result += "," | ||
623 | result += "\n" | ||
624 | nesting -= 1 | ||
625 | result += tabulate(")", nesting) | ||
626 | else: | ||
627 | result += "()" | ||
628 | elif field.rule == PB_RULE.REPEATED: | ||
629 | if field.value.size() > 0: | ||
630 | result += "[\n" | ||
631 | nesting += 1 | ||
632 | for i in range(field.value.size()): | ||
633 | result += tabulate(str(i) + ": ", nesting) | ||
634 | result += value_to_string(field.value[i], field, nesting) | ||
635 | if i != (field.value.size() - 1): | ||
636 | result += "," | ||
637 | result += "\n" | ||
638 | nesting -= 1 | ||
639 | result += tabulate("]", nesting) | ||
640 | else: | ||
641 | result += "[]" | ||
642 | else: | ||
643 | result += value_to_string(field.value, field, nesting) | ||
644 | result += ";\n" | ||
645 | return result | ||
646 | |||
647 | static func message_to_string(data, nesting : int = 0) -> String: | ||
648 | var DEFAULT_VALUES | ||
649 | if PROTO_VERSION == 2: | ||
650 | DEFAULT_VALUES = DEFAULT_VALUES_2 | ||
651 | elif PROTO_VERSION == 3: | ||
652 | DEFAULT_VALUES = DEFAULT_VALUES_3 | ||
653 | var result : String = "" | ||
654 | var keys : Array = data.keys() | ||
655 | keys.sort() | ||
656 | for i in keys: | ||
657 | if data[i].field.value != null: | ||
658 | if data[i].state == PB_SERVICE_STATE.UNFILLED \ | ||
659 | && !data[i].field.is_map_field \ | ||
660 | && typeof(data[i].field.value) == typeof(DEFAULT_VALUES[data[i].field.type]) \ | ||
661 | && data[i].field.value == DEFAULT_VALUES[data[i].field.type]: | ||
662 | continue | ||
663 | elif data[i].field.rule == PB_RULE.REPEATED && data[i].field.value.size() == 0: | ||
664 | continue | ||
665 | result += field_to_string(data[i].field, nesting) | ||
666 | elif data[i].field.rule == PB_RULE.REQUIRED: | ||
667 | result += data[i].field.name + ": " + "error" | ||
668 | return result | ||
diff --git a/vendor/godobuf/addons/protobuf/protobuf_util.gd b/vendor/godobuf/addons/protobuf/protobuf_util.gd new file mode 100644 index 0000000..5941cb8 --- /dev/null +++ b/vendor/godobuf/addons/protobuf/protobuf_util.gd | |||
@@ -0,0 +1,46 @@ | |||
1 | # | ||
2 | # BSD 3-Clause License | ||
3 | # | ||
4 | # Copyright (c) 2018, Oleg Malyavkin | ||
5 | # All rights reserved. | ||
6 | # | ||
7 | # Redistribution and use in source and binary forms, with or without | ||
8 | # modification, are permitted provided that the following conditions are met: | ||
9 | # | ||
10 | # * Redistributions of source code must retain the above copyright notice, this | ||
11 | # list of conditions and the following disclaimer. | ||
12 | # | ||
13 | # * Redistributions in binary form must reproduce the above copyright notice, | ||
14 | # this list of conditions and the following disclaimer in the documentation | ||
15 | # and/or other materials provided with the distribution. | ||
16 | # | ||
17 | # * Neither the name of the copyright holder nor the names of its | ||
18 | # contributors may be used to endorse or promote products derived from | ||
19 | # this software without specific prior written permission. | ||
20 | # | ||
21 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" | ||
22 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | ||
23 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | ||
24 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE | ||
25 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL | ||
26 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR | ||
27 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER | ||
28 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, | ||
29 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
30 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||
31 | |||
32 | static func extract_dir(file_path): | ||
33 | var parts = file_path.split("/", false) | ||
34 | parts.remove_at(parts.size() - 1) | ||
35 | var path | ||
36 | if file_path.begins_with("/"): | ||
37 | path = "/" | ||
38 | else: | ||
39 | path = "" | ||
40 | for part in parts: | ||
41 | path += part + "/" | ||
42 | return path | ||
43 | |||
44 | static func extract_filename(file_path): | ||
45 | var parts = file_path.split("/", false) | ||
46 | return parts[parts.size() - 1] | ||
diff --git a/vendor/godobuf/default_env.tres b/vendor/godobuf/default_env.tres new file mode 100644 index 0000000..20207a4 --- /dev/null +++ b/vendor/godobuf/default_env.tres | |||
@@ -0,0 +1,7 @@ | |||
1 | [gd_resource type="Environment" load_steps=2 format=2] | ||
2 | |||
3 | [sub_resource type="ProceduralSky" id=1] | ||
4 | |||
5 | [resource] | ||
6 | background_mode = 2 | ||
7 | background_sky = SubResource( 1 ) | ||
diff --git a/vendor/godobuf/logo.png b/vendor/godobuf/logo.png new file mode 100644 index 0000000..4ff9029 --- /dev/null +++ b/vendor/godobuf/logo.png | |||
Binary files differ | |||
diff --git a/vendor/godobuf/logo.png.import b/vendor/godobuf/logo.png.import new file mode 100644 index 0000000..43df7a6 --- /dev/null +++ b/vendor/godobuf/logo.png.import | |||
@@ -0,0 +1,35 @@ | |||
1 | [remap] | ||
2 | |||
3 | importer="texture" | ||
4 | type="StreamTexture" | ||
5 | path="res://.import/logo.png-cca8726399059c8d4f806e28e356b14d.stex" | ||
6 | metadata={ | ||
7 | "vram_texture": false | ||
8 | } | ||
9 | |||
10 | [deps] | ||
11 | |||
12 | source_file="res://logo.png" | ||
13 | dest_files=[ "res://.import/logo.png-cca8726399059c8d4f806e28e356b14d.stex" ] | ||
14 | |||
15 | [params] | ||
16 | |||
17 | compress/mode=0 | ||
18 | compress/lossy_quality=0.7 | ||
19 | compress/hdr_mode=0 | ||
20 | compress/bptc_ldr=0 | ||
21 | compress/normal_map=0 | ||
22 | flags/repeat=0 | ||
23 | flags/filter=true | ||
24 | flags/mipmaps=false | ||
25 | flags/anisotropic=false | ||
26 | flags/srgb=2 | ||
27 | process/fix_alpha_border=true | ||
28 | process/premult_alpha=false | ||
29 | process/HDR_as_SRGB=false | ||
30 | process/invert_color=false | ||
31 | process/normal_map_invert_y=false | ||
32 | stream=false | ||
33 | size_limit=0 | ||
34 | detect_3d=true | ||
35 | svg/scale=1.0 | ||
diff --git a/vendor/godobuf/project.godot b/vendor/godobuf/project.godot new file mode 100644 index 0000000..8cef0a4 --- /dev/null +++ b/vendor/godobuf/project.godot | |||
@@ -0,0 +1,26 @@ | |||
1 | ; Engine configuration file. | ||
2 | ; It's best edited using the editor UI and not directly, | ||
3 | ; since the parameters that go here are not all obvious. | ||
4 | ; | ||
5 | ; Format: | ||
6 | ; [section] ; section goes between [] | ||
7 | ; param=value ; assign values to parameters | ||
8 | |||
9 | config_version=4 | ||
10 | |||
11 | _global_script_classes=[ ] | ||
12 | _global_script_class_icons={ | ||
13 | } | ||
14 | |||
15 | [application] | ||
16 | |||
17 | config/name="Protobuf Plugin" | ||
18 | config/icon="res://logo.png" | ||
19 | |||
20 | [editor_plugins] | ||
21 | |||
22 | enabled=PoolStringArray( "res://addons/protobuf/plugin.cfg" ) | ||
23 | |||
24 | [rendering] | ||
25 | |||
26 | environment/default_environment="res://default_env.tres" | ||