about summary refs log tree commit diff stats
diff options
context:
space:
mode:
-rw-r--r--README.md79
-rw-r--r--apworld/README.md48
-rw-r--r--apworld/__init__.py16
-rw-r--r--apworld/docs/en_Lingo_2.md4
-rw-r--r--apworld/items.py24
-rw-r--r--apworld/options.py37
-rw-r--r--apworld/player_logic.py155
-rw-r--r--apworld/regions.py28
-rw-r--r--apworld/requirements.txt2
-rw-r--r--apworld/rules.py35
-rw-r--r--apworld/static_logic.py19
-rw-r--r--apworld/version.py1
-rw-r--r--client/Archipelago/gamedata.gd47
-rw-r--r--client/Archipelago/manager.gd20
-rw-r--r--client/Archipelago/panel.gd101
-rw-r--r--client/Archipelago/player.gd25
-rw-r--r--client/Archipelago/settings_screen.gd44
-rw-r--r--client/Archipelago/teleport.gd38
-rw-r--r--client/Archipelago/teleportListener.gd11
-rw-r--r--client/Archipelago/visibilityListener.gd38
-rw-r--r--client/Archipelago/worldportListener.gd4
-rw-r--r--client/README.md90
-rw-r--r--client/archipelago.tscn4
-rw-r--r--data/README.md13
-rw-r--r--data/connections.txtpb58
-rw-r--r--data/ids.yaml21
-rw-r--r--data/maps/daedalus/connections.txtpb11
-rw-r--r--data/maps/daedalus/doors.txtpb74
-rw-r--r--data/maps/daedalus/rooms/Wonderland.txtpb1
-rw-r--r--data/maps/the_bearer/connections.txtpb5
-rw-r--r--data/maps/the_bearer/rooms/Back Area.txtpb6
-rw-r--r--data/maps/the_bearer/rooms/Tree Entrance.txtpb6
-rw-r--r--data/maps/the_darkroom/connections.txtpb15
-rw-r--r--data/maps/the_darkroom/rooms/Congruent Entrance.txtpb7
-rw-r--r--data/maps/the_darkroom/rooms/Cyan Hallway.txtpb7
-rw-r--r--data/maps/the_darkroom/rooms/Double Sided Entrance.txtpb7
-rw-r--r--data/maps/the_darkroom/rooms/First Room.txtpb12
-rw-r--r--data/maps/the_darkroom/rooms/Second Room.txtpb6
-rw-r--r--data/maps/the_entry/connections.txtpb15
-rw-r--r--data/maps/the_entry/doors.txtpb4
-rw-r--r--data/maps/the_entry/rooms/Flipped Second Room.txtpb7
-rw-r--r--data/maps/the_entry/rooms/Four Rooms Entrance.txtpb7
-rw-r--r--data/maps/the_entry/rooms/Liberated Entrance.txtpb6
-rw-r--r--data/maps/the_entry/rooms/Link Area.txtpb12
-rw-r--r--data/maps/the_entry/rooms/Literate Entrance.txtpb6
-rw-r--r--data/maps/the_entry/rooms/Starting Room.txtpb4
-rw-r--r--data/maps/the_gallery/doors.txtpb34
-rw-r--r--data/maps/the_great/doors.txtpb5
-rw-r--r--data/maps/the_great/rooms/West Side.txtpb1
-rw-r--r--data/maps/the_owl/doors.txtpb2
-rw-r--r--data/metadata.txtpb23
-rw-r--r--proto/data.proto5
-rw-r--r--proto/human.proto9
-rw-r--r--tools/assign_ids/main.cpp18
-rw-r--r--tools/datapacker/main.cpp10
-rw-r--r--tools/validator/human_processor.cpp26
-rw-r--r--tools/validator/structs.h3
-rw-r--r--tools/validator/validator.cpp51
-rw-r--r--vendor/godobuf/LICENSE29
-rw-r--r--vendor/godobuf/README4
-rw-r--r--vendor/godobuf/addons/protobuf/parser.gd2254
-rw-r--r--vendor/godobuf/addons/protobuf/plugin.cfg7
-rw-r--r--vendor/godobuf/addons/protobuf/protobuf_cmdln.gd66
-rw-r--r--vendor/godobuf/addons/protobuf/protobuf_core.gd668
-rw-r--r--vendor/godobuf/addons/protobuf/protobuf_util.gd46
-rw-r--r--vendor/godobuf/default_env.tres7
-rw-r--r--vendor/godobuf/logo.pngbin0 -> 19026 bytes
-rw-r--r--vendor/godobuf/logo.png.import35
-rw-r--r--vendor/godobuf/project.godot26
69 files changed, 4345 insertions, 164 deletions
diff --git a/README.md b/README.md new file mode 100644 index 0000000..481061b --- /dev/null +++ b/README.md
@@ -0,0 +1,79 @@
1# lingo2-archipelago
2
3[Archipelago](https://archipelago.gg/) is an open-source project that supports
4randomizing a number of different games and combining them into one cooperative
5experience. Items from each game are hidden in other games. For more information
6about Archipelago, you can look at their website.
7
8This is a project that modifies the game
9[Lingo 2](https://www.lingothegame.com/lingo2.html) so that it can be played as
10part of an Archipelago multiworld game.
11
12There are multiple parts of this project:
13
14- [Client](https://code.fourisland.com/lingo2-archipelago/about/client/README.md)
15- [Apworld](https://code.fourisland.com/lingo2-archipelago/about/apworld/README.md)
16- [Data](https://code.fourisland.com/lingo2-archipelago/about/data/README.md)
17
18## Frequently Asked Questions
19
20### Why aren't the starting room letters shuffled?
21
22The letter requirements for solving puzzles are very restrictive, especially in
23the early game. It is possible for the generator to find some subset of letters
24and doors to place in the starting room such that you are not trapped, but this
25places a lot of strain on generation and leads to significantly more generation
26failures.
27
28As a result, the starting room letters (H1, I1, N1, and T1) are always present
29in the starting room, even when remote letter shuffle is enabled. These letters
30will _also_ count as clearing a check, so you will send out another item at the
31same time as collecting the letter.
32
33### What about wall snipes?
34
35"Wall sniping" refers to the fact that you are able to solve puzzles on the
36other side of opaque walls. The player is never expected to or required to do
37this in normal gameplay. This randomizer does not change how wall snipes work,
38but it will likewise never require the use of them.
39
40### How do cyan doors work?
41
42In the base game, there are a number of cyan-colored doors that ordinarily open
43once you collect H2 in The Repetitive. There are also a handful of panels that
44only appear upon getting H2 as well, which the apworld treats the same as the
45cyan doors.
46
47There is an option that lets you choose how these doors and panels behave. By
48default, they act the same as in the base game: they only open or appear after
49collecting H2. Note that this means the actual H2 collectable in The Repetitive.
50Receiving H2 via remote letter shuffle does not count for this requirement.
51However, you can also make cyan doors activate upon collecting or receiving your
52first double letter, regardless of what it is or if it's remote. Finally, you
53can lock cyan doors behind an item called "Cyan Doors".
54
55It is important to note, however, that the Cyan Door Behavior option only
56applies to cyan doors that are not already affected by another type of
57shuffling. When door shuffle is on, the following cyan doors are activated by
58individual items and are not impacted by your choice of Cyan Door Behavior:
59
60- The entrance to The Tower from The Great (The Great - Tower Entrance)
61- The entrance to The Butterfly from The Bearer (The Bearer - Butterfly
62 Entrance)
63- The entrance to The Repetitive from The Entry (The Entry - Repetitive
64 Entrance)
65- The eye painting near the yellow color hallway in Daedalus (Daedalus - Eye
66 Painting)
67
68Additionally, when control center color shuffle is enabled, the orange door in
69The Unkempt (which ordinarily doubles as a cyan door) opens upon receiving the
70Control Center Orange Doors item, instead of following the Cyan Door Behavior
71option.
72
73### Help! I lost C/G in The Congruent!
74
75If you place C or G into the relevant keyholders in The Congruent, the keyholder
76disappears. You can retrieve your letter immediately by pressing C or G again
77while standing in the same position, as the keyholder is still there, just
78invisible. If you have already left the room, though, there is an easier way to
79get your letters back: just use the Key Return in The Entry.
diff --git a/apworld/README.md b/apworld/README.md new file mode 100644 index 0000000..13374b2 --- /dev/null +++ b/apworld/README.md
@@ -0,0 +1,48 @@
1# Lingo 2 Apworld
2
3The Lingo 2 Apworld allows you to generate Archipelago Multiworlds containing
4Lingo 2.
5
6## Installation
7
81. Download the Lingo 2 Apworld from
9 [the releases page](https://code.fourisland.com/lingo2-archipelago/about/apworld/CHANGELOG.md).
102. If you do not already have it, download and install the
11 [Archipelago software](https://github.com/ArchipelagoMW/Archipelago/releases/).
123. Double click on `lingo2.apworld` to install it, or copy it manually to the
13 `custom_worlds` folder of your Archipelago installation.
14
15## Running from source
16
17The apworld is mostly written in Python, which does not need to be compiled.
18However, there are two files that need to be generated before the apworld can be
19used.
20
21The first file is `data.binpb`, the datafile containing the randomizer logic.
22You can read about how to generate it on
23[its own README page](https://code.fourisland.com/lingo2-archipelago/about/data/README.md).
24Once you have it, put it in a subfolder of `apworld` called `generated`.
25
26The second generated file is `data_pb2.py`. This file allows Archipelago to read
27the datafile. We use `protoc`, the Protocol Buffer compiler, to generate it. As
28of 0.6.3, Archipelago has protobuf 3.20.3 packaged with it, which means we need
29to compile our proto file with a similar version.
30
31If you followed the steps to generate `data.binpb` and compiled the `datapacker`
32tool yourself, you will already have protobuf version 3.21.12 installed through
33vcpkg. You can then run a command similar to this in order to generate the
34python file.
35
36```shell
37.\out\build\x64-Debug\vcpkg_installed\x64-windows\tools\protobuf\protoc.exe -Iproto\ ^
38 --python_out=apworld\generated\ .\proto\data.proto
39```
40
41The exact path to `protoc.exe` is going to depend on where vcpkg installed its
42packages. The above location is where Visual Studio will probably put it.
43
44After generating those two files, the apworld should be functional. You can copy
45it into an Archipelago source tree (rename the folder `apworld` to `lingo2` if
46you do so) if you want to edit/debug the code. Otherwise, you can zip up the
47folder and rename it to `lingo2.apworld` in order to package it for
48distribution.
diff --git a/apworld/__init__.py b/apworld/__init__.py index c45e8b3..54f870f 100644 --- a/apworld/__init__.py +++ b/apworld/__init__.py
@@ -1,18 +1,27 @@
1""" 1"""
2Archipelago init file for Lingo 2 2Archipelago init file for Lingo 2
3""" 3"""
4from BaseClasses import ItemClassification, Item 4from BaseClasses import ItemClassification, Item, Tutorial
5from worlds.AutoWorld import WebWorld, World 5from worlds.AutoWorld import WebWorld, World
6from .items import Lingo2Item 6from .items import Lingo2Item
7from .options import Lingo2Options 7from .options import Lingo2Options
8from .player_logic import Lingo2PlayerLogic 8from .player_logic import Lingo2PlayerLogic
9from .regions import create_regions 9from .regions import create_regions
10from .static_logic import Lingo2StaticLogic 10from .static_logic import Lingo2StaticLogic
11from .version import APWORLD_VERSION
11 12
12 13
13class Lingo2WebWorld(WebWorld): 14class Lingo2WebWorld(WebWorld):
14 rich_text_options_doc = True 15 rich_text_options_doc = True
15 theme = "grass" 16 theme = "grass"
17 tutorials = [Tutorial(
18 "Multiworld Setup Guide",
19 "A guide to playing Lingo 2 with Archipelago.",
20 "English",
21 "en_Lingo_2.md",
22 "setup/en",
23 ["hatkirby"]
24 )]
16 25
17 26
18class Lingo2World(World): 27class Lingo2World(World):
@@ -32,6 +41,8 @@ class Lingo2World(World):
32 static_logic = Lingo2StaticLogic() 41 static_logic = Lingo2StaticLogic()
33 item_name_to_id = static_logic.item_name_to_id 42 item_name_to_id = static_logic.item_name_to_id
34 location_name_to_id = static_logic.location_name_to_id 43 location_name_to_id = static_logic.location_name_to_id
44 item_name_groups = static_logic.item_name_groups
45 location_name_groups = static_logic.location_name_groups
35 46
36 player_logic: Lingo2PlayerLogic 47 player_logic: Lingo2PlayerLogic
37 48
@@ -71,12 +82,15 @@ class Lingo2World(World):
71 "keyholder_sanity", 82 "keyholder_sanity",
72 "shuffle_control_center_colors", 83 "shuffle_control_center_colors",
73 "shuffle_doors", 84 "shuffle_doors",
85 "shuffle_gallery_paintings",
74 "shuffle_letters", 86 "shuffle_letters",
87 "shuffle_symbols",
75 "victory_condition", 88 "victory_condition",
76 ] 89 ]
77 90
78 slot_data = { 91 slot_data = {
79 **self.options.as_dict(*slot_options), 92 **self.options.as_dict(*slot_options),
93 "version": [self.static_logic.get_data_version(), APWORLD_VERSION],
80 } 94 }
81 95
82 return slot_data 96 return slot_data
diff --git a/apworld/docs/en_Lingo_2.md b/apworld/docs/en_Lingo_2.md new file mode 100644 index 0000000..977795a --- /dev/null +++ b/apworld/docs/en_Lingo_2.md
@@ -0,0 +1,4 @@
1# Lingo 2
2
3See [the project README](https://code.fourisland.com/lingo2-archipelago/about/)
4for installation instructions and frequently asked questions. \ No newline at end of file
diff --git a/apworld/items.py b/apworld/items.py index 971a709..32568a3 100644 --- a/apworld/items.py +++ b/apworld/items.py
@@ -1,5 +1,29 @@
1from .generated import data_pb2 as data_pb2
1from BaseClasses import Item 2from BaseClasses import Item
2 3
3 4
4class Lingo2Item(Item): 5class Lingo2Item(Item):
5 game: str = "Lingo 2" 6 game: str = "Lingo 2"
7
8
9SYMBOL_ITEMS: dict[data_pb2.PuzzleSymbol, str] = {
10 data_pb2.PuzzleSymbol.SUN: "Sun Symbol",
11 data_pb2.PuzzleSymbol.SPARKLES: "Sparkles Symbol",
12 data_pb2.PuzzleSymbol.ZERO: "Zero Symbol",
13 data_pb2.PuzzleSymbol.EXAMPLE: "Example Symbol",
14 data_pb2.PuzzleSymbol.BOXES: "Boxes Symbol",
15 data_pb2.PuzzleSymbol.PLANET: "Planet Symbol",
16 data_pb2.PuzzleSymbol.PYRAMID: "Pyramid Symbol",
17 data_pb2.PuzzleSymbol.CROSS: "Cross Symbol",
18 data_pb2.PuzzleSymbol.SWEET: "Sweet Symbol",
19 data_pb2.PuzzleSymbol.GENDER: "Gender Symbol",
20 data_pb2.PuzzleSymbol.AGE: "Age Symbol",
21 data_pb2.PuzzleSymbol.SOUND: "Sound Symbol",
22 data_pb2.PuzzleSymbol.ANAGRAM: "Anagram Symbol",
23 data_pb2.PuzzleSymbol.JOB: "Job Symbol",
24 data_pb2.PuzzleSymbol.STARS: "Stars Symbol",
25 data_pb2.PuzzleSymbol.NULL: "Null Symbol",
26 data_pb2.PuzzleSymbol.EVAL: "Eval Symbol",
27 data_pb2.PuzzleSymbol.LINGO: "Lingo Symbol",
28 data_pb2.PuzzleSymbol.QUESTION: "Question Symbol",
29}
diff --git a/apworld/options.py b/apworld/options.py index 2197b0f..4f0b32a 100644 --- a/apworld/options.py +++ b/apworld/options.py
@@ -1,9 +1,9 @@
1from dataclasses import dataclass 1from dataclasses import dataclass
2 2
3from Options import PerGameCommonOptions, Toggle, Choice 3from Options import PerGameCommonOptions, Toggle, Choice, DefaultOnToggle
4 4
5 5
6class ShuffleDoors(Toggle): 6class ShuffleDoors(DefaultOnToggle):
7 """If enabled, most doors will open from receiving an item rather than fulfilling the in-game requirements.""" 7 """If enabled, most doors will open from receiving an item rather than fulfilling the in-game requirements."""
8 display_name = "Shuffle Doors" 8 display_name = "Shuffle Doors"
9 9
@@ -16,6 +16,11 @@ class ShuffleControlCenterColors(Toggle):
16 display_name = "Shuffle Control Center Colors" 16 display_name = "Shuffle Control Center Colors"
17 17
18 18
19class ShuffleGalleryPaintings(Toggle):
20 """If enabled, gallery paintings will appear from receiving an item rather than by triggering them normally."""
21 display_name = "Shuffle Gallery Paintings"
22
23
19class ShuffleLetters(Choice): 24class ShuffleLetters(Choice):
20 """ 25 """
21 Controls how letter unlocks are handled. Note that H1, I1, N1, and T1 will always be present at their vanilla 26 Controls how letter unlocks are handled. Note that H1, I1, N1, and T1 will always be present at their vanilla
@@ -39,6 +44,14 @@ class ShuffleLetters(Choice):
39 option_item_cyan = 4 44 option_item_cyan = 4
40 45
41 46
47class ShuffleSymbols(Toggle):
48 """
49 If enabled, 19 items will be added to the pool, representing the different symbols that can appear on a panel.
50 Players will be prevented from solving puzzles with symbols on them until all of the required symbols are unlocked.
51 """
52 display_name = "Shuffle Symbols"
53
54
42class KeyholderSanity(Toggle): 55class KeyholderSanity(Toggle):
43 """ 56 """
44 If enabled, 26 locations will be created for placing each key into its respective Green Ending keyholder. 57 If enabled, 26 locations will be created for placing each key into its respective Green Ending keyholder.
@@ -80,7 +93,23 @@ class DaedalusRoofAccess(Toggle):
80 93
81 94
82class VictoryCondition(Choice): 95class VictoryCondition(Choice):
83 """Victory condition.""" 96 """
97 This option determines what your goal is.
98
99 - **Gray Ending** (The Colorful)
100 - **Purple Ending** (The Sun Temple). This ordinarily requires all level 1 (purple) letters.
101 - **Mint Ending** (typing EXIT into the keyholders in Control Center)
102 - **Black Ending** (The Graveyard)
103 - **Blue Ending** (The Words)
104 - **Cyan Ending** (The Parthenon). This ordinarily requires almost all level 2 (cyan) letters.
105 - **Red Ending** (The Tower)
106 - **Plum Ending** (The Wondrous / The Door)
107 - **Orange Ending** (the castle in Daedalus)
108 - **Gold Ending** (The Gold). This involves going through the color rooms in Daedalus.
109 - **Yellow Ending** (The Gallery). This requires unlocking all gallery paintings.
110 - **Green Ending** (The Ancient). This requires filling all keyholders with specific letters.
111 - **White Ending** (Control Center). This combines every other ending.
112 """
84 display_name = "Victory Condition" 113 display_name = "Victory Condition"
85 option_gray_ending = 0 114 option_gray_ending = 0
86 option_purple_ending = 1 115 option_purple_ending = 1
@@ -101,7 +130,9 @@ class VictoryCondition(Choice):
101class Lingo2Options(PerGameCommonOptions): 130class Lingo2Options(PerGameCommonOptions):
102 shuffle_doors: ShuffleDoors 131 shuffle_doors: ShuffleDoors
103 shuffle_control_center_colors: ShuffleControlCenterColors 132 shuffle_control_center_colors: ShuffleControlCenterColors
133 shuffle_gallery_paintings: ShuffleGalleryPaintings
104 shuffle_letters: ShuffleLetters 134 shuffle_letters: ShuffleLetters
135 shuffle_symbols: ShuffleSymbols
105 keyholder_sanity: KeyholderSanity 136 keyholder_sanity: KeyholderSanity
106 cyan_door_behavior: CyanDoorBehavior 137 cyan_door_behavior: CyanDoorBehavior
107 daedalus_roof_access: DaedalusRoofAccess 138 daedalus_roof_access: DaedalusRoofAccess
diff --git a/apworld/player_logic.py b/apworld/player_logic.py index c94b809..8e2a523 100644 --- a/apworld/player_logic.py +++ b/apworld/player_logic.py
@@ -1,6 +1,7 @@
1from enum import IntEnum, auto 1from enum import IntEnum, auto
2 2
3from .generated import data_pb2 as data_pb2 3from .generated import data_pb2 as data_pb2
4from .items import SYMBOL_ITEMS
4from typing import TYPE_CHECKING, NamedTuple 5from typing import TYPE_CHECKING, NamedTuple
5 6
6from .options import VictoryCondition, ShuffleLetters, CyanDoorBehavior 7from .options import VictoryCondition, ShuffleLetters, CyanDoorBehavior
@@ -23,21 +24,38 @@ class AccessRequirements:
23 items: set[str] 24 items: set[str]
24 progressives: dict[str, int] 25 progressives: dict[str, int]
25 rooms: set[str] 26 rooms: set[str]
26 symbols: set[str]
27 letters: dict[str, int] 27 letters: dict[str, int]
28 cyans: bool 28 cyans: bool
29 29
30 # This is an AND of ORs. 30 # This is an AND of ORs.
31 or_logic: list[list["AccessRequirements"]] 31 or_logic: list[list["AccessRequirements"]]
32 32
33 # When complete_at is set, at least that many of the requirements in possibilities must be accessible. This should
34 # only be used for doors with complete_at > 1, as or_logic is more efficient for complete_at == 1.
35 complete_at: int | None
36 possibilities: list["AccessRequirements"]
37
33 def __init__(self): 38 def __init__(self):
34 self.items = set() 39 self.items = set()
35 self.progressives = dict() 40 self.progressives = dict()
36 self.rooms = set() 41 self.rooms = set()
37 self.symbols = set()
38 self.letters = dict() 42 self.letters = dict()
39 self.cyans = False 43 self.cyans = False
40 self.or_logic = list() 44 self.or_logic = list()
45 self.complete_at = None
46 self.possibilities = list()
47
48 def copy(self) -> "AccessRequirements":
49 reqs = AccessRequirements()
50 reqs.items = self.items.copy()
51 reqs.progressives = self.progressives.copy()
52 reqs.rooms = self.rooms.copy()
53 reqs.letters = self.letters.copy()
54 reqs.cyans = self.cyans
55 reqs.or_logic = [[other_req.copy() for other_req in disjunction] for disjunction in self.or_logic]
56 reqs.complete_at = self.complete_at
57 reqs.possibilities = self.possibilities.copy()
58 return reqs
41 59
42 def merge(self, other: "AccessRequirements"): 60 def merge(self, other: "AccessRequirements"):
43 for item in other.items: 61 for item in other.items:
@@ -49,9 +67,6 @@ class AccessRequirements:
49 for room in other.rooms: 67 for room in other.rooms:
50 self.rooms.add(room) 68 self.rooms.add(room)
51 69
52 for symbol in other.symbols:
53 self.symbols.add(symbol)
54
55 for letter, level in other.letters.items(): 70 for letter, level in other.letters.items():
56 self.letters[letter] = max(self.letters.get(letter, 0), level) 71 self.letters[letter] = max(self.letters.get(letter, 0), level)
57 72
@@ -60,6 +75,70 @@ class AccessRequirements:
60 for disjunction in other.or_logic: 75 for disjunction in other.or_logic:
61 self.or_logic.append(disjunction) 76 self.or_logic.append(disjunction)
62 77
78 if other.complete_at is not None:
79 # Merging multiple requirements that use complete_at sucks, and is part of why we want to minimize use of
80 # it. If both requirements use complete_at, we will cheat by using the or_logic field, which supports
81 # conjunctions of requirements.
82 if self.complete_at is not None:
83 print("Merging requirements with complete_at > 1. This is messy and should be avoided!")
84
85 left_req = AccessRequirements()
86 left_req.complete_at = self.complete_at
87 left_req.possibilities = self.possibilities
88 self.or_logic.append([left_req])
89
90 self.complete_at = None
91 self.possibilities = list()
92
93 right_req = AccessRequirements()
94 right_req.complete_at = other.complete_at
95 right_req.possibilities = other.possibilities
96 self.or_logic.append([right_req])
97 else:
98 self.complete_at = other.complete_at
99 self.possibilities = other.possibilities
100
101 def is_empty(self) -> bool:
102 return (len(self.items) == 0 and len(self.progressives) == 0 and len(self.rooms) == 0 and len(self.letters) == 0
103 and not self.cyans and len(self.or_logic) == 0 and self.complete_at is None)
104
105 def __eq__(self, other: "AccessRequirements"):
106 return (self.items == other.items and self.progressives == other.progressives and self.rooms == other.rooms and
107 self.letters == other.letters and self.cyans == other.cyans and self.or_logic == other.or_logic and
108 self.complete_at == other.complete_at and self.possibilities == other.possibilities)
109
110 def simplify(self):
111 resimplify = False
112
113 if len(self.or_logic) > 0:
114 old_or_logic = self.or_logic
115
116 def remove_redundant(sub_reqs: "AccessRequirements"):
117 sub_reqs.letters = {l: v for l, v in sub_reqs.letters.items() if self.letters.get(l, 0) < v}
118
119 self.or_logic = []
120 for disjunction in old_or_logic:
121 new_disjunction = []
122 for ssr in disjunction:
123 remove_redundant(ssr)
124 if not ssr.is_empty():
125 new_disjunction.append(ssr)
126 else:
127 new_disjunction.clear()
128 break
129 if len(new_disjunction) == 1:
130 self.merge(new_disjunction[0])
131 resimplify = True
132 elif len(new_disjunction) > 1:
133 if all(cjr == new_disjunction[0] for cjr in new_disjunction):
134 self.merge(new_disjunction[0])
135 resimplify = True
136 else:
137 self.or_logic.append(new_disjunction)
138
139 if resimplify:
140 self.simplify()
141
63 def __repr__(self): 142 def __repr__(self):
64 parts = [] 143 parts = []
65 if len(self.items) > 0: 144 if len(self.items) > 0:
@@ -68,14 +147,16 @@ class AccessRequirements:
68 parts.append(f"progressives={self.progressives}") 147 parts.append(f"progressives={self.progressives}")
69 if len(self.rooms) > 0: 148 if len(self.rooms) > 0:
70 parts.append(f"rooms={self.rooms}") 149 parts.append(f"rooms={self.rooms}")
71 if len(self.symbols) > 0:
72 parts.append(f"symbols={self.symbols}")
73 if len(self.letters) > 0: 150 if len(self.letters) > 0:
74 parts.append(f"letters={self.letters}") 151 parts.append(f"letters={self.letters}")
75 if self.cyans: 152 if self.cyans:
76 parts.append(f"cyans=True") 153 parts.append(f"cyans=True")
77 if len(self.or_logic) > 0: 154 if len(self.or_logic) > 0:
78 parts.append(f"or_logic={self.or_logic}") 155 parts.append(f"or_logic={self.or_logic}")
156 if self.complete_at is not None:
157 parts.append(f"complete_at={self.complete_at}")
158 if len(self.possibilities) > 0:
159 parts.append(f"possibilities={self.possibilities}")
79 return f"AccessRequirements({", ".join(parts)})" 160 return f"AccessRequirements({", ".join(parts)})"
80 161
81 162
@@ -158,6 +239,9 @@ class Lingo2PlayerLogic:
158 not self.world.options.shuffle_control_center_colors): 239 not self.world.options.shuffle_control_center_colors):
159 continue 240 continue
160 241
242 if door.type == data_pb2.DoorType.GALLERY_PAINTING and not self.world.options.shuffle_gallery_paintings:
243 continue
244
161 door_item_name = self.world.static_logic.get_door_item_name(door) 245 door_item_name = self.world.static_logic.get_door_item_name(door)
162 self.item_by_door[door.id] = (door_item_name, 1) 246 self.item_by_door[door.id] = (door_item_name, 1)
163 self.real_items.append(door_item_name) 247 self.real_items.append(door_item_name)
@@ -231,6 +315,10 @@ class Lingo2PlayerLogic:
231 self.locations_by_room.setdefault(keyholder.room_id, []).append(PlayerLocation(keyholder.ap_id, 315 self.locations_by_room.setdefault(keyholder.room_id, []).append(PlayerLocation(keyholder.ap_id,
232 reqs)) 316 reqs))
233 317
318 if self.world.options.shuffle_symbols:
319 for symbol_name in SYMBOL_ITEMS.values():
320 self.real_items.append(symbol_name)
321
234 def get_panel_reqs(self, panel_id: int, answer: str | None) -> AccessRequirements: 322 def get_panel_reqs(self, panel_id: int, answer: str | None) -> AccessRequirements:
235 if answer is None: 323 if answer is None:
236 if panel_id not in self.panel_reqs: 324 if panel_id not in self.panel_reqs:
@@ -253,25 +341,35 @@ class Lingo2PlayerLogic:
253 self.add_solution_reqs(reqs, answer) 341 self.add_solution_reqs(reqs, answer)
254 elif len(panel.proxies) > 0: 342 elif len(panel.proxies) > 0:
255 possibilities = [] 343 possibilities = []
344 already_filled = False
256 345
257 for proxy in panel.proxies: 346 for proxy in panel.proxies:
258 proxy_reqs = AccessRequirements() 347 proxy_reqs = AccessRequirements()
259 self.add_solution_reqs(proxy_reqs, proxy.answer) 348 self.add_solution_reqs(proxy_reqs, proxy.answer)
260 349
261 possibilities.append(proxy_reqs) 350 if not proxy_reqs.is_empty():
351 possibilities.append(proxy_reqs)
352 else:
353 already_filled = True
354 break
262 355
263 if not any(proxy.answer == panel.answer for proxy in panel.proxies): 356 if not already_filled and not any(proxy.answer == panel.answer for proxy in panel.proxies):
264 proxy_reqs = AccessRequirements() 357 proxy_reqs = AccessRequirements()
265 self.add_solution_reqs(proxy_reqs, panel.answer) 358 self.add_solution_reqs(proxy_reqs, panel.answer)
266 359
267 possibilities.append(proxy_reqs) 360 if not proxy_reqs.is_empty():
361 possibilities.append(proxy_reqs)
362 else:
363 already_filled = True
268 364
269 reqs.or_logic.append(possibilities) 365 if not already_filled:
366 reqs.or_logic.append(possibilities)
270 else: 367 else:
271 self.add_solution_reqs(reqs, panel.answer) 368 self.add_solution_reqs(reqs, panel.answer)
272 369
273 for symbol in panel.symbols: 370 if self.world.options.shuffle_symbols:
274 reqs.symbols.add(symbol) 371 for symbol in panel.symbols:
372 reqs.items.add(SYMBOL_ITEMS.get(symbol))
275 373
276 if panel.HasField("required_door"): 374 if panel.HasField("required_door"):
277 door_reqs = self.get_door_open_reqs(panel.required_door) 375 door_reqs = self.get_door_open_reqs(panel.required_door)
@@ -294,18 +392,26 @@ class Lingo2PlayerLogic:
294 door = self.world.static_logic.objects.doors[door_id] 392 door = self.world.static_logic.objects.doors[door_id]
295 reqs = AccessRequirements() 393 reqs = AccessRequirements()
296 394
297 # TODO: lavender_cubes, endings
298 if not door.HasField("complete_at") or door.complete_at == 0: 395 if not door.HasField("complete_at") or door.complete_at == 0:
299 for proxy in door.panels: 396 for proxy in door.panels:
300 panel_reqs = self.get_panel_reqs(proxy.panel, proxy.answer if proxy.HasField("answer") else None) 397 panel_reqs = self.get_panel_reqs(proxy.panel, proxy.answer if proxy.HasField("answer") else None)
301 reqs.merge(panel_reqs) 398 reqs.merge(panel_reqs)
302 elif door.complete_at == 1: 399 elif door.complete_at == 1:
303 reqs.or_logic.append([self.get_panel_reqs(proxy.panel, 400 disjunction = []
304 proxy.answer if proxy.HasField("answer") else None) 401 for proxy in door.panels:
305 for proxy in door.panels]) 402 proxy_reqs = self.get_panel_reqs(proxy.panel, proxy.answer if proxy.HasField("answer") else None)
403 if proxy_reqs.is_empty():
404 disjunction.clear()
405 break
406 else:
407 disjunction.append(proxy_reqs)
408 if len(disjunction) > 0:
409 reqs.or_logic.append(disjunction)
306 else: 410 else:
307 # TODO: Handle complete_at > 1 411 reqs.complete_at = door.complete_at
308 pass 412 for proxy in door.panels:
413 panel_reqs = self.get_panel_reqs(proxy.panel, proxy.answer if proxy.HasField("answer") else None)
414 reqs.possibilities.append(panel_reqs)
309 415
310 if door.HasField("control_center_color"): 416 if door.HasField("control_center_color"):
311 # TODO: Logic for ensuring two CC states aren't needed at once. 417 # TODO: Logic for ensuring two CC states aren't needed at once.
@@ -316,7 +422,8 @@ class Lingo2PlayerLogic:
316 if self.world.options.cyan_door_behavior == CyanDoorBehavior.option_collect_h2: 422 if self.world.options.cyan_door_behavior == CyanDoorBehavior.option_collect_h2:
317 reqs.rooms.add("The Repetitive - Main Room") 423 reqs.rooms.add("The Repetitive - Main Room")
318 elif self.world.options.cyan_door_behavior == CyanDoorBehavior.option_any_double_letter: 424 elif self.world.options.cyan_door_behavior == CyanDoorBehavior.option_any_double_letter:
319 reqs.cyans = True 425 if self.world.options.shuffle_letters != ShuffleLetters.option_unlocked:
426 reqs.cyans = True
320 elif self.world.options.cyan_door_behavior == CyanDoorBehavior.option_item: 427 elif self.world.options.cyan_door_behavior == CyanDoorBehavior.option_item:
321 # There shouldn't be any locations that are cyan doors. 428 # There shouldn't be any locations that are cyan doors.
322 pass 429 pass
@@ -335,12 +442,18 @@ class Lingo2PlayerLogic:
335 442
336 for ending_id in door.endings: 443 for ending_id in door.endings:
337 ending = self.world.static_logic.objects.endings[ending_id] 444 ending = self.world.static_logic.objects.endings[ending_id]
338 reqs.items.add(f"{ending.name.capitalize()} Ending (Achieved)") 445
446 if self.world.options.victory_condition.current_key.removesuffix("_ending").upper() == ending.name:
447 reqs.items.add("Victory")
448 else:
449 reqs.items.add(f"{ending.name.capitalize()} Ending (Achieved)")
339 450
340 for sub_door_id in door.doors: 451 for sub_door_id in door.doors:
341 sub_reqs = self.get_door_open_reqs(sub_door_id) 452 sub_reqs = self.get_door_open_reqs(sub_door_id)
342 reqs.merge(sub_reqs) 453 reqs.merge(sub_reqs)
343 454
455 reqs.simplify()
456
344 return reqs 457 return reqs
345 458
346 # This gets the requirements to open a door within the world. When a door is shuffled, this means having the item 459 # This gets the requirements to open a door within the world. When a door is shuffled, this means having the item
diff --git a/apworld/regions.py b/apworld/regions.py index e30493c..4f1dd55 100644 --- a/apworld/regions.py +++ b/apworld/regions.py
@@ -11,12 +11,18 @@ if TYPE_CHECKING:
11 11
12 12
13def create_region(room, world: "Lingo2World") -> Region: 13def create_region(room, world: "Lingo2World") -> Region:
14 new_region = Region(world.static_logic.get_room_region_name(room.id), world.player, world.multiworld) 14 return Region(world.static_logic.get_room_region_name(room.id), world.player, world.multiworld)
15 15
16
17def create_locations(room, new_region: Region, world: "Lingo2World", regions: dict[str, Region]):
16 for location in world.player_logic.locations_by_room.get(room.id, {}): 18 for location in world.player_logic.locations_by_room.get(room.id, {}):
19 reqs = location.reqs.copy()
20 if new_region.name in reqs.rooms:
21 reqs.rooms.remove(new_region.name)
22
17 new_location = Lingo2Location(world.player, world.static_logic.location_id_to_name[location.code], 23 new_location = Lingo2Location(world.player, world.static_logic.location_id_to_name[location.code],
18 location.code, new_region) 24 location.code, new_region)
19 new_location.access_rule = make_location_lambda(location.reqs, world) 25 new_location.access_rule = make_location_lambda(reqs, world, regions)
20 new_region.locations.append(new_location) 26 new_region.locations.append(new_location)
21 27
22 for event_name, item_name in world.player_logic.event_loc_item_by_room.get(room.id, {}).items(): 28 for event_name, item_name in world.player_logic.event_loc_item_by_room.get(room.id, {}).items():
@@ -25,17 +31,23 @@ def create_region(room, world: "Lingo2World") -> Region:
25 new_location.place_locked_item(event_item) 31 new_location.place_locked_item(event_item)
26 new_region.locations.append(new_location) 32 new_region.locations.append(new_location)
27 33
28 return new_region
29
30
31def create_regions(world: "Lingo2World"): 34def create_regions(world: "Lingo2World"):
32 regions = { 35 regions = {
33 "Menu": Region("Menu", world.player, world.multiworld) 36 "Menu": Region("Menu", world.player, world.multiworld)
34 } 37 }
35 38
39 region_and_room = []
40
41 # Create the regions in two stages. First, make the actual region objects and memoize them. Then, add all of the
42 # locations. This allows us to reference the actual region objects in the access rules for the locations, which is
43 # faster than having to look them up during access checking.
36 for room in world.static_logic.objects.rooms: 44 for room in world.static_logic.objects.rooms:
37 region = create_region(room, world) 45 region = create_region(room, world)
38 regions[region.name] = region 46 regions[region.name] = region
47 region_and_room.append((region, room))
48
49 for (region, room) in region_and_room:
50 create_locations(room, region, world, regions)
39 51
40 regions["Menu"].connect(regions["The Entry - Starting Room"], "Start Game") 52 regions["Menu"].connect(regions["The Entry - Starting Room"], "Start Game")
41 53
@@ -82,14 +94,18 @@ def create_regions(world: "Lingo2World"):
82 else: 94 else:
83 connection_name = f"{connection_name} (via panel {panel.name})" 95 connection_name = f"{connection_name} (via panel {panel.name})"
84 96
97 reqs.simplify()
98
85 if from_region in regions and to_region in regions: 99 if from_region in regions and to_region in regions:
86 connection = Entrance(world.player, connection_name, regions[from_region]) 100 connection = Entrance(world.player, connection_name, regions[from_region])
87 connection.access_rule = make_location_lambda(reqs, world) 101 connection.access_rule = make_location_lambda(reqs, world, regions)
88 102
89 regions[from_region].exits.append(connection) 103 regions[from_region].exits.append(connection)
90 connection.connect(regions[to_region]) 104 connection.connect(regions[to_region])
91 105
92 for region in reqs.rooms: 106 for region in reqs.rooms:
107 if region == from_region:
108 continue
93 world.multiworld.register_indirect_condition(regions[region], connection) 109 world.multiworld.register_indirect_condition(regions[region], connection)
94 110
95 world.multiworld.regions += regions.values() 111 world.multiworld.regions += regions.values()
diff --git a/apworld/requirements.txt b/apworld/requirements.txt index 49ca0a7..dbc395b 100644 --- a/apworld/requirements.txt +++ b/apworld/requirements.txt
@@ -1 +1 @@
protobuf==3.20.3 \ No newline at end of file protobuf==3.20.3
diff --git a/apworld/rules.py b/apworld/rules.py index 56486fa..c077858 100644 --- a/apworld/rules.py +++ b/apworld/rules.py
@@ -1,14 +1,15 @@
1from collections.abc import Callable 1from collections.abc import Callable
2from typing import TYPE_CHECKING 2from typing import TYPE_CHECKING
3 3
4from BaseClasses import CollectionState 4from BaseClasses import CollectionState, Region
5from .player_logic import AccessRequirements 5from .player_logic import AccessRequirements
6 6
7if TYPE_CHECKING: 7if TYPE_CHECKING:
8 from . import Lingo2World 8 from . import Lingo2World
9 9
10 10
11def lingo2_can_satisfy_requirements(state: CollectionState, reqs: AccessRequirements, world: "Lingo2World") -> bool: 11def lingo2_can_satisfy_requirements(state: CollectionState, reqs: AccessRequirements, regions: list[Region],
12 world: "Lingo2World") -> bool:
12 if not all(state.has(item, world.player) for item in reqs.items): 13 if not all(state.has(item, world.player) for item in reqs.items):
13 return False 14 return False
14 15
@@ -18,7 +19,8 @@ def lingo2_can_satisfy_requirements(state: CollectionState, reqs: AccessRequirem
18 if not all(state.can_reach_region(region_name, world.player) for region_name in reqs.rooms): 19 if not all(state.can_reach_region(region_name, world.player) for region_name in reqs.rooms):
19 return False 20 return False
20 21
21 # TODO: symbols 22 if not all(state.can_reach(region) for region in regions):
23 return False
22 24
23 for letter_key, letter_level in reqs.letters.items(): 25 for letter_key, letter_level in reqs.letters.items():
24 if not state.has(letter_key, world.player, letter_level): 26 if not state.has(letter_key, world.player, letter_level):
@@ -30,11 +32,32 @@ def lingo2_can_satisfy_requirements(state: CollectionState, reqs: AccessRequirem
30 return False 32 return False
31 33
32 if len(reqs.or_logic) > 0: 34 if len(reqs.or_logic) > 0:
33 if not all(any(lingo2_can_satisfy_requirements(state, sub_reqs, world) for sub_reqs in subjunction) 35 if not all(any(lingo2_can_satisfy_requirements(state, sub_reqs, [], world) for sub_reqs in subjunction)
34 for subjunction in reqs.or_logic): 36 for subjunction in reqs.or_logic):
35 return False 37 return False
36 38
39 if reqs.complete_at is not None:
40 completed = 0
41 checked = 0
42 for possibility in reqs.possibilities:
43 checked += 1
44 if lingo2_can_satisfy_requirements(state, possibility, [], world):
45 completed += 1
46 if completed >= reqs.complete_at:
47 break
48 elif len(reqs.possibilities) - checked + completed < reqs.complete_at:
49 # There aren't enough remaining possibilities for the check to pass.
50 return False
51 if completed < reqs.complete_at:
52 return False
53
37 return True 54 return True
38 55
39def make_location_lambda(reqs: AccessRequirements, world: "Lingo2World") -> Callable[[CollectionState], bool]: 56def make_location_lambda(reqs: AccessRequirements, world: "Lingo2World",
40 return lambda state: lingo2_can_satisfy_requirements(state, reqs, world) 57 regions: dict[str, Region]) -> Callable[[CollectionState], bool]:
58 # Replace required rooms with regions for the top level requirement, which saves looking up the regions during rule
59 # checking.
60 required_regions = [regions[room_name] for room_name in reqs.rooms]
61 new_reqs = reqs.copy()
62 new_reqs.rooms.clear()
63 return lambda state: lingo2_can_satisfy_requirements(state, new_reqs, required_regions, world)
diff --git a/apworld/static_logic.py b/apworld/static_logic.py index 3f6cdea..1ace1e7 100644 --- a/apworld/static_logic.py +++ b/apworld/static_logic.py
@@ -1,4 +1,5 @@
1from .generated import data_pb2 as data_pb2 1from .generated import data_pb2 as data_pb2
2from .items import SYMBOL_ITEMS
2import pkgutil 3import pkgutil
3 4
4class Lingo2StaticLogic: 5class Lingo2StaticLogic:
@@ -8,9 +9,14 @@ class Lingo2StaticLogic:
8 item_name_to_id: dict[str, int] 9 item_name_to_id: dict[str, int]
9 location_name_to_id: dict[str, int] 10 location_name_to_id: dict[str, int]
10 11
12 item_name_groups: dict[str, list[str]]
13 location_name_groups: dict[str, list[str]]
14
11 def __init__(self): 15 def __init__(self):
12 self.item_id_to_name = {} 16 self.item_id_to_name = {}
13 self.location_id_to_name = {} 17 self.location_id_to_name = {}
18 self.item_name_groups = {}
19 self.location_name_groups = {}
14 20
15 file = pkgutil.get_data(__name__, "generated/data.binpb") 21 file = pkgutil.get_data(__name__, "generated/data.binpb")
16 self.objects = data_pb2.AllObjects() 22 self.objects = data_pb2.AllObjects()
@@ -29,17 +35,21 @@ class Lingo2StaticLogic:
29 letter_name = f"{letter.key.upper()}{'2' if letter.level2 else '1'}" 35 letter_name = f"{letter.key.upper()}{'2' if letter.level2 else '1'}"
30 location_name = f"{self.get_room_object_map_name(letter)} - {letter_name}" 36 location_name = f"{self.get_room_object_map_name(letter)} - {letter_name}"
31 self.location_id_to_name[letter.ap_id] = location_name 37 self.location_id_to_name[letter.ap_id] = location_name
38 self.location_name_groups.setdefault("Letters", []).append(location_name)
32 39
33 if not letter.level2: 40 if not letter.level2:
34 self.item_id_to_name[letter.ap_id] = letter.key.upper() 41 self.item_id_to_name[letter.ap_id] = letter.key.upper()
42 self.item_name_groups.setdefault("Letters", []).append(letter.key.upper())
35 43
36 for mastery in self.objects.masteries: 44 for mastery in self.objects.masteries:
37 location_name = f"{self.get_room_object_map_name(mastery)} - Mastery" 45 location_name = f"{self.get_room_object_map_name(mastery)} - Mastery"
38 self.location_id_to_name[mastery.ap_id] = location_name 46 self.location_id_to_name[mastery.ap_id] = location_name
47 self.location_name_groups.setdefault("Masteries", []).append(location_name)
39 48
40 for ending in self.objects.endings: 49 for ending in self.objects.endings:
41 location_name = f"{self.get_room_object_map_name(ending)} - {ending.name.title()} Ending" 50 location_name = f"{self.get_room_object_map_name(ending)} - {ending.name.title()} Ending"
42 self.location_id_to_name[ending.ap_id] = location_name 51 self.location_id_to_name[ending.ap_id] = location_name
52 self.location_name_groups.setdefault("Endings", []).append(location_name)
43 53
44 for progressive in self.objects.progressives: 54 for progressive in self.objects.progressives:
45 self.item_id_to_name[progressive.ap_id] = progressive.name 55 self.item_id_to_name[progressive.ap_id] = progressive.name
@@ -51,9 +61,13 @@ class Lingo2StaticLogic:
51 if keyholder.HasField("key"): 61 if keyholder.HasField("key"):
52 location_name = f"{self.get_room_object_location_prefix(keyholder)} - {keyholder.key.upper()} Keyholder" 62 location_name = f"{self.get_room_object_location_prefix(keyholder)} - {keyholder.key.upper()} Keyholder"
53 self.location_id_to_name[keyholder.ap_id] = location_name 63 self.location_id_to_name[keyholder.ap_id] = location_name
64 self.location_name_groups.setdefault("Keyholders", []).append(location_name)
54 65
55 self.item_id_to_name[self.objects.special_ids["A Job Well Done"]] = "A Job Well Done" 66 self.item_id_to_name[self.objects.special_ids["A Job Well Done"]] = "A Job Well Done"
56 67
68 for symbol_name in SYMBOL_ITEMS.values():
69 self.item_id_to_name[self.objects.special_ids[symbol_name]] = symbol_name
70
57 self.item_name_to_id = {name: ap_id for ap_id, name in self.item_id_to_name.items()} 71 self.item_name_to_id = {name: ap_id for ap_id, name in self.item_id_to_name.items()}
58 self.location_name_to_id = {name: ap_id for ap_id, name in self.location_id_to_name.items()} 72 self.location_name_to_id = {name: ap_id for ap_id, name in self.location_id_to_name.items()}
59 73
@@ -80,7 +94,7 @@ class Lingo2StaticLogic:
80 if door.type != data_pb2.DoorType.STANDARD: 94 if door.type != data_pb2.DoorType.STANDARD:
81 return None 95 return None
82 96
83 if len(door.keyholders) > 0 or len(door.endings) > 0: 97 if len(door.keyholders) > 0 or len(door.endings) > 0 or not door.HasField("complete_at"):
84 return None 98 return None
85 99
86 if len(door.panels) > 4: 100 if len(door.panels) > 4:
@@ -140,3 +154,6 @@ class Lingo2StaticLogic:
140 return f"{game_map.display_name} ({room.panel_display_name})" 154 return f"{game_map.display_name} ({room.panel_display_name})"
141 else: 155 else:
142 return game_map.display_name 156 return game_map.display_name
157
158 def get_data_version(self) -> int:
159 return self.objects.version
diff --git a/apworld/version.py b/apworld/version.py new file mode 100644 index 0000000..87f8797 --- /dev/null +++ b/apworld/version.py
@@ -0,0 +1 @@
APWORLD_VERSION = 2
diff --git a/client/Archipelago/gamedata.gd b/client/Archipelago/gamedata.gd index f7a5d90..d8d16ed 100644 --- a/client/Archipelago/gamedata.gd +++ b/client/Archipelago/gamedata.gd
@@ -5,15 +5,41 @@ var SCRIPT_proto
5var objects 5var objects
6var door_id_by_map_node_path = {} 6var door_id_by_map_node_path = {}
7var painting_id_by_map_node_path = {} 7var painting_id_by_map_node_path = {}
8var panel_id_by_map_node_path = {}
8var door_id_by_ap_id = {} 9var door_id_by_ap_id = {}
9var map_id_by_name = {} 10var map_id_by_name = {}
10var progressive_id_by_ap_id = {} 11var progressive_id_by_ap_id = {}
11var letter_id_by_ap_id = {} 12var letter_id_by_ap_id = {}
13var symbol_item_ids = []
14
15var kSYMBOL_ITEMS
12 16
13 17
14func _init(proto_script): 18func _init(proto_script):
15 SCRIPT_proto = proto_script 19 SCRIPT_proto = proto_script
16 20
21 kSYMBOL_ITEMS = {
22 SCRIPT_proto.PuzzleSymbol.SUN: "Sun Symbol",
23 SCRIPT_proto.PuzzleSymbol.SPARKLES: "Sparkles Symbol",
24 SCRIPT_proto.PuzzleSymbol.ZERO: "Zero Symbol",
25 SCRIPT_proto.PuzzleSymbol.EXAMPLE: "Example Symbol",
26 SCRIPT_proto.PuzzleSymbol.BOXES: "Boxes Symbol",
27 SCRIPT_proto.PuzzleSymbol.PLANET: "Planet Symbol",
28 SCRIPT_proto.PuzzleSymbol.PYRAMID: "Pyramid Symbol",
29 SCRIPT_proto.PuzzleSymbol.CROSS: "Cross Symbol",
30 SCRIPT_proto.PuzzleSymbol.SWEET: "Sweet Symbol",
31 SCRIPT_proto.PuzzleSymbol.GENDER: "Gender Symbol",
32 SCRIPT_proto.PuzzleSymbol.AGE: "Age Symbol",
33 SCRIPT_proto.PuzzleSymbol.SOUND: "Sound Symbol",
34 SCRIPT_proto.PuzzleSymbol.ANAGRAM: "Anagram Symbol",
35 SCRIPT_proto.PuzzleSymbol.JOB: "Job Symbol",
36 SCRIPT_proto.PuzzleSymbol.STARS: "Stars Symbol",
37 SCRIPT_proto.PuzzleSymbol.NULL: "Null Symbol",
38 SCRIPT_proto.PuzzleSymbol.EVAL: "Eval Symbol",
39 SCRIPT_proto.PuzzleSymbol.LINGO: "Lingo Symbol",
40 SCRIPT_proto.PuzzleSymbol.QUESTION: "Question Symbol",
41 }
42
17 43
18func load(data_bytes): 44func load(data_bytes):
19 objects = SCRIPT_proto.AllObjects.new() 45 objects = SCRIPT_proto.AllObjects.new()
@@ -58,6 +84,19 @@ func load(data_bytes):
58 for letter in objects.get_letters(): 84 for letter in objects.get_letters():
59 letter_id_by_ap_id[letter.get_ap_id()] = letter.get_id() 85 letter_id_by_ap_id[letter.get_ap_id()] = letter.get_id()
60 86
87 for panel in objects.get_panels():
88 var room = objects.get_rooms()[panel.get_room_id()]
89 var map = objects.get_maps()[room.get_map_id()]
90
91 if not map.get_name() in panel_id_by_map_node_path:
92 panel_id_by_map_node_path[map.get_name()] = {}
93
94 var map_data = panel_id_by_map_node_path[map.get_name()]
95 map_data[panel.get_path()] = panel.get_id()
96
97 for symbol_name in kSYMBOL_ITEMS.values():
98 symbol_item_ids.append(objects.get_special_ids()[symbol_name])
99
61 100
62func get_door_for_map_node_path(map_name, node_path): 101func get_door_for_map_node_path(map_name, node_path):
63 if not door_id_by_map_node_path.has(map_name): 102 if not door_id_by_map_node_path.has(map_name):
@@ -67,6 +106,14 @@ func get_door_for_map_node_path(map_name, node_path):
67 return map_data.get(node_path, null) 106 return map_data.get(node_path, null)
68 107
69 108
109func get_panel_for_map_node_path(map_name, node_path):
110 if not panel_id_by_map_node_path.has(map_name):
111 return null
112
113 var map_data = panel_id_by_map_node_path[map_name]
114 return map_data.get(node_path, null)
115
116
70func get_door_ap_id(door_id): 117func get_door_ap_id(door_id):
71 var door = objects.get_doors()[door_id] 118 var door = objects.get_doors()[door_id]
72 if door.has_ap_id(): 119 if door.has_ap_id():
diff --git a/client/Archipelago/manager.gd b/client/Archipelago/manager.gd index cd0654f..8a15728 100644 --- a/client/Archipelago/manager.gd +++ b/client/Archipelago/manager.gd
@@ -1,6 +1,6 @@
1extends Node 1extends Node
2 2
3const my_version = "0.1.0" 3const MOD_VERSION = 2
4 4
5var SCRIPT_client 5var SCRIPT_client
6var SCRIPT_keyboard 6var SCRIPT_keyboard
@@ -41,12 +41,15 @@ const kCYAN_DOOR_BEHAVIOR_H2 = 0
41const kCYAN_DOOR_BEHAVIOR_DOUBLE_LETTER = 1 41const kCYAN_DOOR_BEHAVIOR_DOUBLE_LETTER = 1
42const kCYAN_DOOR_BEHAVIOR_ITEM = 2 42const kCYAN_DOOR_BEHAVIOR_ITEM = 2
43 43
44var apworld_version = [0, 0]
44var cyan_door_behavior = kCYAN_DOOR_BEHAVIOR_H2 45var cyan_door_behavior = kCYAN_DOOR_BEHAVIOR_H2
45var daedalus_roof_access = false 46var daedalus_roof_access = false
46var keyholder_sanity = false 47var keyholder_sanity = false
47var shuffle_control_center_colors = false 48var shuffle_control_center_colors = false
48var shuffle_doors = false 49var shuffle_doors = false
50var shuffle_gallery_paintings = false
49var shuffle_letters = kSHUFFLE_LETTERS_VANILLA 51var shuffle_letters = kSHUFFLE_LETTERS_VANILLA
52var shuffle_symbols = false
50var victory_condition = -1 53var victory_condition = -1
51 54
52signal could_not_connect 55signal could_not_connect
@@ -183,6 +186,11 @@ func _process_item(item, index, from, flags, amount):
183 if not letter.has_level2() or not letter.get_level2(): 186 if not letter.has_level2() or not letter.get_level2():
184 _process_key_item(letter.get_key(), amount) 187 _process_key_item(letter.get_key(), amount)
185 188
189 if gamedata.symbol_item_ids.has(item):
190 var player = get_tree().get_root().get_node_or_null("scene/player")
191 if player != null:
192 player.emit_signal("evaluate_solvability")
193
186 # Show a message about the item if it's new. 194 # Show a message about the item if it's new.
187 if index != null and index > _last_new_item: 195 if index != null and index > _last_new_item:
188 _last_new_item = index 196 _last_new_item = index
@@ -355,9 +363,14 @@ func _client_connected(slot_data):
355 keyholder_sanity = bool(slot_data.get("keyholder_sanity", false)) 363 keyholder_sanity = bool(slot_data.get("keyholder_sanity", false))
356 shuffle_control_center_colors = bool(slot_data.get("shuffle_control_center_colors", false)) 364 shuffle_control_center_colors = bool(slot_data.get("shuffle_control_center_colors", false))
357 shuffle_doors = bool(slot_data.get("shuffle_doors", false)) 365 shuffle_doors = bool(slot_data.get("shuffle_doors", false))
366 shuffle_gallery_paintings = bool(slot_data.get("shuffle_gallery_paintings", false))
358 shuffle_letters = int(slot_data.get("shuffle_letters", 0)) 367 shuffle_letters = int(slot_data.get("shuffle_letters", 0))
368 shuffle_symbols = bool(slot_data.get("shuffle_symbols", false))
359 victory_condition = int(slot_data.get("victory_condition", 0)) 369 victory_condition = int(slot_data.get("victory_condition", 0))
360 370
371 if slot_data.has("version"):
372 apworld_version = [int(slot_data["version"][0]), int(slot_data["version"][1])]
373
361 # Set up item locks. 374 # Set up item locks.
362 _item_locks = {} 375 _item_locks = {}
363 376
@@ -392,6 +405,11 @@ func _client_connected(slot_data):
392 for door in door_group.get_doors(): 405 for door in door_group.get_doors():
393 _item_locks[door] = [door_group.get_ap_id(), 1] 406 _item_locks[door] = [door_group.get_ap_id(), 1]
394 407
408 if shuffle_gallery_paintings:
409 for door in gamedata.objects.get_doors():
410 if door.get_type() == gamedata.SCRIPT_proto.DoorType.GALLERY_PAINTING:
411 _item_locks[door.get_id()] = [door.get_ap_id(), 1]
412
395 if cyan_door_behavior == kCYAN_DOOR_BEHAVIOR_ITEM: 413 if cyan_door_behavior == kCYAN_DOOR_BEHAVIOR_ITEM:
396 for door_group in gamedata.objects.get_door_groups(): 414 for door_group in gamedata.objects.get_door_groups():
397 if door_group.get_type() == gamedata.SCRIPT_proto.DoorGroupType.CYAN_DOORS: 415 if door_group.get_type() == gamedata.SCRIPT_proto.DoorGroupType.CYAN_DOORS:
diff --git a/client/Archipelago/panel.gd b/client/Archipelago/panel.gd new file mode 100644 index 0000000..fdaaf0e --- /dev/null +++ b/client/Archipelago/panel.gd
@@ -0,0 +1,101 @@
1extends "res://scripts/nodes/panel.gd"
2
3var panel_logic = null
4var symbol_solvable = true
5
6var black = load("res://assets/materials/black.material")
7
8
9func _ready():
10 super._ready()
11
12 var node_path = String(
13 get_tree().get_root().get_node("scene").get_path_to(self).get_concatenated_names()
14 )
15
16 var gamedata = global.get_node("Gamedata")
17 var panel_id = gamedata.get_panel_for_map_node_path(global.map, node_path)
18 if panel_id != null:
19 var ap = global.get_node("Archipelago")
20 if ap.shuffle_symbols:
21 if global.map == "the_entry" and node_path == "Panels/Entry/front_1":
22 clue = "i"
23 symbol = ""
24
25 setField("clue", clue)
26 setField("symbol", symbol)
27
28 panel_logic = gamedata.objects.get_panels()[panel_id]
29 checkSymbolSolvable()
30
31 if not symbol_solvable:
32 get_tree().get_root().get_node("scene/player").connect(
33 "evaluate_solvability", evaluateSolvability
34 )
35
36
37func checkSymbolSolvable():
38 var old_solvable = symbol_solvable
39 symbol_solvable = true
40
41 if panel_logic == null:
42 # There's no logic for this panel.
43 return
44
45 var ap = global.get_node("Archipelago")
46 if not ap.shuffle_symbols:
47 # Symbols aren't item-locked.
48 return
49
50 var gamedata = global.get_node("Gamedata")
51 for symbol in panel_logic.get_symbols():
52 var item_name = gamedata.kSYMBOL_ITEMS.get(symbol)
53 var item_id = gamedata.objects.get_special_ids()[item_name]
54 if ap.client.getItemAmount(item_id) < 1:
55 symbol_solvable = false
56 break
57
58 if symbol_solvable != old_solvable:
59 if symbol_solvable:
60 setField("clue", clue)
61 setField("symbol", symbol)
62 setField("answer", answer)
63 else:
64 quad_mesh.surface_set_material(0, black)
65 get_node("Hinge/clue").text = "missing"
66 get_node("Hinge/answer").text = "symbols"
67
68
69func checkSolvable(key):
70 checkSymbolSolvable()
71 if not symbol_solvable:
72 return false
73
74 return super.checkSolvable(key)
75
76
77func evaluateSolvability():
78 checkSolvable("")
79
80
81func passedInput(key, skip_focus_check = false):
82 if not symbol_solvable:
83 return
84
85 super.passedInput(key, skip_focus_check)
86
87
88func focus():
89 if not symbol_solvable:
90 has_focus = false
91 return
92
93 super.focus()
94
95
96func unfocus():
97 if not symbol_solvable:
98 has_focus = false
99 return
100
101 super.unfocus()
diff --git a/client/Archipelago/player.gd b/client/Archipelago/player.gd index dd6aa2b..9de3e07 100644 --- a/client/Archipelago/player.gd +++ b/client/Archipelago/player.gd
@@ -16,6 +16,8 @@ const kEndingNameByVictoryValue = {
16 12: "WHITE", 16 12: "WHITE",
17} 17}
18 18
19signal evaluate_solvability
20
19 21
20func _ready(): 22func _ready():
21 var khl_script = load("res://scripts/nodes/keyHolderListener.gd") 23 var khl_script = load("res://scripts/nodes/keyHolderListener.gd")
@@ -188,6 +190,29 @@ func _ready():
188 warp_enter.rotation_degrees.y = 90 190 warp_enter.rotation_degrees.y = 90
189 get_parent().add_child.call_deferred(warp_enter) 191 get_parent().add_child.call_deferred(warp_enter)
190 192
193 if global.map == "the_entry":
194 # Remove door behind X1.
195 var door_node = get_tree().get_root().get_node("/root/scene/Components/Doors/exit_1")
196 door_node.handleTriggered()
197
198 # Display win condition.
199 var sign_prefab = preload("res://objects/nodes/sign.tscn")
200 var sign1 = sign_prefab.instantiate()
201 sign1.position = Vector3(-7, 5, -15.01)
202 sign1.text = "victory"
203 get_parent().add_child.call_deferred(sign1)
204
205 var sign2 = sign_prefab.instantiate()
206 sign2.position = Vector3(-7, 4, -15.01)
207 sign2.text = "%s ending" % kEndingNameByVictoryValue.get(ap.victory_condition, "?")
208
209 var sign2_color = kEndingNameByVictoryValue.get(ap.victory_condition, "coral").to_lower()
210 if sign2_color == "white":
211 sign2_color = "silver"
212
213 sign2.material = load("res://assets/materials/%s.material" % sign2_color)
214 get_parent().add_child.call_deferred(sign2)
215
191 super._ready() 216 super._ready()
192 217
193 await get_tree().process_frame 218 await get_tree().process_frame
diff --git a/client/Archipelago/settings_screen.gd b/client/Archipelago/settings_screen.gd index aaaf72a..14975e5 100644 --- a/client/Archipelago/settings_screen.gd +++ b/client/Archipelago/settings_screen.gd
@@ -40,10 +40,13 @@ func _ready():
40 ResourceLoader.load("user://maps/Archipelago/keyHolderResetterListener.gd") 40 ResourceLoader.load("user://maps/Archipelago/keyHolderResetterListener.gd")
41 ) 41 )
42 installScriptExtension(ResourceLoader.load("user://maps/Archipelago/painting.gd")) 42 installScriptExtension(ResourceLoader.load("user://maps/Archipelago/painting.gd"))
43 installScriptExtension(ResourceLoader.load("user://maps/Archipelago/panel.gd"))
43 installScriptExtension(ResourceLoader.load("user://maps/Archipelago/pauseMenu.gd")) 44 installScriptExtension(ResourceLoader.load("user://maps/Archipelago/pauseMenu.gd"))
44 installScriptExtension(ResourceLoader.load("user://maps/Archipelago/player.gd")) 45 installScriptExtension(ResourceLoader.load("user://maps/Archipelago/player.gd"))
45 installScriptExtension(ResourceLoader.load("user://maps/Archipelago/saver.gd")) 46 installScriptExtension(ResourceLoader.load("user://maps/Archipelago/saver.gd"))
47 installScriptExtension(ResourceLoader.load("user://maps/Archipelago/teleport.gd"))
46 installScriptExtension(ResourceLoader.load("user://maps/Archipelago/teleportListener.gd")) 48 installScriptExtension(ResourceLoader.load("user://maps/Archipelago/teleportListener.gd"))
49 installScriptExtension(ResourceLoader.load("user://maps/Archipelago/visibilityListener.gd"))
47 installScriptExtension(ResourceLoader.load("user://maps/Archipelago/worldportListener.gd")) 50 installScriptExtension(ResourceLoader.load("user://maps/Archipelago/worldportListener.gd"))
48 51
49 var proto_script = load("user://maps/Archipelago/generated/proto.gd") 52 var proto_script = load("user://maps/Archipelago/generated/proto.gd")
@@ -66,6 +69,7 @@ func _ready():
66 global.add_child(textclient_instance) 69 global.add_child(textclient_instance)
67 70
68 var ap = global.get_node("Archipelago") 71 var ap = global.get_node("Archipelago")
72 var gamedata = global.get_node("Gamedata")
69 ap.connect("ap_connected", connectionSuccessful) 73 ap.connect("ap_connected", connectionSuccessful)
70 ap.connect("could_not_connect", connectionUnsuccessful) 74 ap.connect("could_not_connect", connectionUnsuccessful)
71 ap.connect("connect_status", connectionStatus) 75 ap.connect("connect_status", connectionStatus)
@@ -89,13 +93,17 @@ func _ready():
89 history_box.get_popup().connect("id_pressed", historySelected) 93 history_box.get_popup().connect("id_pressed", historySelected)
90 94
91 # Show client version. 95 # Show client version.
92 $Panel/title.text = "ARCHIPELAGO (%s)" % ap.my_version 96 $Panel/title.text = "ARCHIPELAGO (%d.%d)" % [gamedata.objects.get_version(), ap.MOD_VERSION]
93 97
94 # Increase font size in text boxes. 98 # Increase font size in text boxes.
95 $Panel/server_box.add_theme_font_size_override("font_size", 36) 99 $Panel/server_box.add_theme_font_size_override("font_size", 36)
96 $Panel/player_box.add_theme_font_size_override("font_size", 36) 100 $Panel/player_box.add_theme_font_size_override("font_size", 36)
97 $Panel/password_box.add_theme_font_size_override("font_size", 36) 101 $Panel/password_box.add_theme_font_size_override("font_size", 36)
98 102
103 # Set up version mismatch dialog.
104 $Panel/VersionMismatch.connect("confirmed", startGame)
105 $Panel/VersionMismatch.get_cancel_button().pressed.connect(versionMismatchDeclined)
106
99 107
100# Adapted from https://gitlab.com/Delta-V-Modding/Mods/-/blob/main/game/ModLoader.gd 108# Adapted from https://gitlab.com/Delta-V-Modding/Mods/-/blob/main/game/ModLoader.gd
101func installScriptExtension(childScript: Resource): 109func installScriptExtension(childScript: Resource):
@@ -125,6 +133,33 @@ func connectionStatus(message):
125 133
126func connectionSuccessful(): 134func connectionSuccessful():
127 var ap = global.get_node("Archipelago") 135 var ap = global.get_node("Archipelago")
136 var gamedata = global.get_node("Gamedata")
137
138 # Check for major version mismatch.
139 if ap.apworld_version[0] != gamedata.objects.get_version():
140 $Panel/AcceptDialog.exclusive = false
141
142 var popup = self.get_node("Panel/VersionMismatch")
143 popup.title = "Version Mismatch!"
144 popup.dialog_text = (
145 "This slot was generated using v%d.%d of the Lingo 2 apworld,\nwhich has a different major version than this client (v%d.%d).\nIt is highly recommended to play using the correct version of the client.\nYou may experience bugs or logic issues if you continue."
146 % [
147 ap.apworld_version[0],
148 ap.apworld_version[1],
149 gamedata.objects.get_version(),
150 ap.MOD_VERSION
151 ]
152 )
153 popup.exclusive = true
154 popup.popup_centered()
155
156 return
157
158 startGame()
159
160
161func startGame():
162 var ap = global.get_node("Archipelago")
128 163
129 # Save connection details 164 # Save connection details
130 var connection_details = [ap.ap_server, ap.ap_user, ap.ap_pass] 165 var connection_details = [ap.ap_server, ap.ap_user, ap.ap_pass]
@@ -158,9 +193,12 @@ func connectionSuccessful():
158 clearResourceCache("res://objects/nodes/listeners/keyHolderChecker.tscn") 193 clearResourceCache("res://objects/nodes/listeners/keyHolderChecker.tscn")
159 clearResourceCache("res://objects/nodes/listeners/keyHolderResetterListener.tscn") 194 clearResourceCache("res://objects/nodes/listeners/keyHolderResetterListener.tscn")
160 clearResourceCache("res://objects/nodes/listeners/teleportListener.tscn") 195 clearResourceCache("res://objects/nodes/listeners/teleportListener.tscn")
196 clearResourceCache("res://objects/nodes/listeners/visibilityListener.tscn")
161 clearResourceCache("res://objects/nodes/listeners/worldportListener.tscn") 197 clearResourceCache("res://objects/nodes/listeners/worldportListener.tscn")
198 clearResourceCache("res://objects/nodes/panel.tscn")
162 clearResourceCache("res://objects/nodes/player.tscn") 199 clearResourceCache("res://objects/nodes/player.tscn")
163 clearResourceCache("res://objects/nodes/saver.tscn") 200 clearResourceCache("res://objects/nodes/saver.tscn")
201 clearResourceCache("res://objects/nodes/teleport.tscn")
164 clearResourceCache("res://objects/scenes/menus/pause_menu.tscn") 202 clearResourceCache("res://objects/scenes/menus/pause_menu.tscn")
165 203
166 var paintings_dir = DirAccess.open("res://objects/meshes/paintings") 204 var paintings_dir = DirAccess.open("res://objects/meshes/paintings")
@@ -186,6 +224,10 @@ func connectionUnsuccessful(error_message):
186 popup.popup_centered() 224 popup.popup_centered()
187 225
188 226
227func versionMismatchDeclined():
228 $Panel/AcceptDialog.hide()
229
230
189func historySelected(index): 231func historySelected(index):
190 var ap = global.get_node("Archipelago") 232 var ap = global.get_node("Archipelago")
191 var details = ap.connection_history[index] 233 var details = ap.connection_history[index]
diff --git a/client/Archipelago/teleport.gd b/client/Archipelago/teleport.gd new file mode 100644 index 0000000..428d50b --- /dev/null +++ b/client/Archipelago/teleport.gd
@@ -0,0 +1,38 @@
1extends "res://scripts/nodes/teleport.gd"
2
3var item_id
4var item_amount
5
6
7func _ready():
8 var node_path = String(
9 get_tree().get_root().get_node("scene").get_path_to(self).get_concatenated_names()
10 )
11
12 var gamedata = global.get_node("Gamedata")
13 var door_id = gamedata.get_door_for_map_node_path(global.map, node_path)
14 if door_id != null:
15 var ap = global.get_node("Archipelago")
16 var item_lock = ap.get_item_id_for_door(door_id)
17
18 if item_lock != null:
19 item_id = item_lock[0]
20 item_amount = item_lock[1]
21
22 self.senders = []
23 self.senderGroup = []
24 self.nested = false
25 self.complete_at = 0
26 self.max_length = 0
27 self.excludeSenders = []
28
29 call_deferred("_readier")
30
31 super._ready()
32
33
34func _readier():
35 var ap = global.get_node("Archipelago")
36
37 if ap.client.getItemAmount(item_id) >= item_amount:
38 handleTriggered()
diff --git a/client/Archipelago/teleportListener.gd b/client/Archipelago/teleportListener.gd index 4a7deec..6f363af 100644 --- a/client/Archipelago/teleportListener.gd +++ b/client/Archipelago/teleportListener.gd
@@ -9,6 +9,17 @@ func _ready():
9 get_tree().get_root().get_node("scene").get_path_to(self).get_concatenated_names() 9 get_tree().get_root().get_node("scene").get_path_to(self).get_concatenated_names()
10 ) 10 )
11 11
12 if (
13 global.map == "daedalus"
14 and (
15 node_path == "Components/Triggers/teleportListenerConnections"
16 or node_path == "Components/Triggers/teleportListenerConnections2"
17 )
18 ):
19 # Effectively disable these.
20 teleport_point = target_path.position
21 return
22
12 var gamedata = global.get_node("Gamedata") 23 var gamedata = global.get_node("Gamedata")
13 var door_id = gamedata.get_door_for_map_node_path(global.map, node_path) 24 var door_id = gamedata.get_door_for_map_node_path(global.map, node_path)
14 if door_id != null: 25 if door_id != null:
diff --git a/client/Archipelago/visibilityListener.gd b/client/Archipelago/visibilityListener.gd new file mode 100644 index 0000000..5ea17a0 --- /dev/null +++ b/client/Archipelago/visibilityListener.gd
@@ -0,0 +1,38 @@
1extends "res://scripts/nodes/listeners/visibilityListener.gd"
2
3var item_id
4var item_amount
5
6
7func _ready():
8 var node_path = String(
9 get_tree().get_root().get_node("scene").get_path_to(self).get_concatenated_names()
10 )
11
12 var gamedata = global.get_node("Gamedata")
13 var door_id = gamedata.get_door_for_map_node_path(global.map, node_path)
14 if door_id != null:
15 var ap = global.get_node("Archipelago")
16 var item_lock = ap.get_item_id_for_door(door_id)
17
18 if item_lock != null:
19 item_id = item_lock[0]
20 item_amount = item_lock[1]
21
22 self.senders = []
23 self.senderGroup = []
24 self.nested = false
25 self.complete_at = 0
26 self.max_length = 0
27 self.excludeSenders = []
28
29 call_deferred("_readier")
30
31 super._ready()
32
33
34func _readier():
35 var ap = global.get_node("Archipelago")
36
37 if ap.client.getItemAmount(item_id) >= item_amount:
38 handleTriggered()
diff --git a/client/Archipelago/worldportListener.gd b/client/Archipelago/worldportListener.gd index c31c825..5c2faff 100644 --- a/client/Archipelago/worldportListener.gd +++ b/client/Archipelago/worldportListener.gd
@@ -1,8 +1,8 @@
1extends "res://scripts/nodes/listeners/worldportListener.gd" 1extends "res://scripts/nodes/listeners/worldportListener.gd"
2 2
3 3
4func changeScene(): 4func handleTriggered():
5 if exit == "menus/credits": 5 if exit == "menus/credits":
6 return 6 return
7 7
8 super.changeScene() 8 super.handleTriggered()
diff --git a/client/README.md b/client/README.md new file mode 100644 index 0000000..99589c5 --- /dev/null +++ b/client/README.md
@@ -0,0 +1,90 @@
1# Lingo 2 Archipelago Client
2
3The Lingo 2 Archipelago Client is a mod for Lingo 2 that allows you to connect
4to an Archipelago Multiworld and randomize your game.
5
6## Installation
7
81. Download the Lingo 2 Archipelago Randomizer from
9 [the releases page](https://code.fourisland.com/lingo2-archipelago/about/client/CHANGELOG.md).
102. Open up Lingo 2, go to settings, and click View Game Data. This should open
11 up a folder in Windows Explorer.
123. Unzip the randomizer into the "maps" folder. Ensure that archipelago.tscn and
13 the Archipelago folder are both within the maps folder.
14
15**NOTE**: It is important that the major version number of your client matches
16the major version number of the apworld you generated with.
17
18## Joining a Multiworld game
19
201. Launch Lingo 2.
212. Click on Level Selection, and choose Archipelago from the list.
223. The selected player is generally ignored by the mod, and you don't even need
23 to ensure you use the same player between connections. However, if your
24 player name has a gift map associated with it, Lingo 2 will prioritize the
25 gift map over loading the mod, so in that case you should choose another
26 player.
274. Press Play.
285. Enter the Archipelago address, slot name, and password into the fields.
296. Press Connect.
307. Enjoy!
31
32To continue an earlier game, you can perform the exact same steps as above. You
33will probably have to re-select Archipelago from the Level Selection screen, as
34the game does not remember which level you were playing.
35
36**Note**: Running the randomizer modifies the game's memory. If you want to play
37the base game after playing the randomizer, you need to restart Lingo 2 first.
38
39## Running from source
40
41The mod is mostly written in GDScript, which is parsed and executed by Lingo 2
42itself, and thus does not need to be compiled. However, there are two files that
43need to be generated before the client can be run.
44
45The first file is `data.binpb`, the datafile containing the randomizer logic.
46You can read about how to generate it on
47[its own README page](https://code.fourisland.com/lingo2-archipelago/about/data/README.md).
48Once you have it, put it in a subfolder of `client` called `generated`.
49
50The second generated file is `proto.gd`. This file allows Lingo 2 to read the
51datafile. We use a Godot script to generate it, which means
52[the Godot Editor](https://godotengine.org/download/) is required. From the root
53of the repository:
54
55```shell
56cd vendor\godobuf
57godot --headless -s addons\protobuf\protobuf_cmdln.gd --input=..\..\proto\data.proto ^
58 --output=..\..\client\Archipelago\generated\proto.gd
59```
60
61If you are not on Windows, replace the forward slashes with backslashes as
62appropriate (and the caret with a forward slash). You will also probably need to
63replace "godot" at the start of the second line with a path to a Godot Editor
64executable.
65
66After generating those two files, the contents of the `client` folder (minus
67this README) can be pasted into the Lingo 2 maps directory as described above.
68
69## Frequently Asked Questions
70
71### Is my progress saved locally?
72
73Lingo 2 autosaves your progress every time you solve a puzzle, get a
74collectable, or interact with a keyholder. The randomizer generates a savefile
75name based on your Multiworld seed and slot number, so you should be able to
76seamlessly switch between multiworlds and even slots within a multiworld.
77
78The exception to this is different rooms created from the same multiworld seed.
79The client is unable to tell rooms in a seed apart (this is a limitation of the
80Archipelago API), so the client will use the same save file for the same slot in
81different rooms on the same seed. You can work around this by manually moving or
82removing the save file from the level1 save file directory.
83
84If you play the base game again, you will see one or more save files with a long
85name that begins with "zzAP\_". These are the saves for your multiworlds. They
86can be safely deleted after you have completed the associated multiworld. It is
87not recommended to load these save files outside of the randomizer.
88
89A connection to Archipelago is required to resume playing a multiworld. This is
90because the set of items you have received is not stored locally.
diff --git a/client/archipelago.tscn b/client/archipelago.tscn index 40dd46f..a74c69e 100644 --- a/client/archipelago.tscn +++ b/client/archipelago.tscn
@@ -150,6 +150,10 @@ caret_blink = true
150offset_right = 83.0 150offset_right = 83.0
151offset_bottom = 58.0 151offset_bottom = 58.0
152 152
153[node name="VersionMismatch" type="ConfirmationDialog" parent="Panel"]
154offset_right = 83.0
155offset_bottom = 58.0
156
153[node name="connection_history" type="MenuButton" parent="Panel"] 157[node name="connection_history" type="MenuButton" parent="Panel"]
154offset_left = 1239.0 158offset_left = 1239.0
155offset_top = 276.0 159offset_top = 276.0
diff --git a/data/README.md b/data/README.md new file mode 100644 index 0000000..bf0a51b --- /dev/null +++ b/data/README.md
@@ -0,0 +1,13 @@
1# Lingo 2 Randomizer Data
2
3This folder contains the logic for the Lingo 2 randomizer in a human-readable
4format. This data is compiled into a single file and used in the various parts
5of the randomizer project (client, apworld, etc).
6
7The data is structured using [Protocol Buffers](https://protobuf.dev/). The
8schema for the human-readable format is
9[located in the repository](https://code.fourisland.com/lingo2-archipelago/tree/proto/human.proto).
10
11## Compiling
12
13Hi.
diff --git a/data/connections.txtpb b/data/connections.txtpb index a79778f..35e7ef8 100644 --- a/data/connections.txtpb +++ b/data/connections.txtpb
@@ -20,7 +20,7 @@ connections {
20 from { 20 from {
21 port { 21 port {
22 map: "the_entry" 22 map: "the_entry"
23 room: "Flipped Second Room" 23 room: "Four Rooms Entrance"
24 name: "FOUR" 24 name: "FOUR"
25 } 25 }
26 } 26 }
@@ -141,7 +141,7 @@ connections {
141 to { 141 to {
142 port { 142 port {
143 map: "the_darkroom" 143 map: "the_darkroom"
144 room: "First Room" 144 room: "Cyan Hallway"
145 name: "COLORFUL" 145 name: "COLORFUL"
146 } 146 }
147 } 147 }
@@ -157,7 +157,7 @@ connections {
157 to { 157 to {
158 port { 158 port {
159 map: "the_darkroom" 159 map: "the_darkroom"
160 room: "Second Room" 160 room: "Congruent Entrance"
161 name: "CONGRUENT" 161 name: "CONGRUENT"
162 } 162 }
163 } 163 }
@@ -233,7 +233,7 @@ connections {
233 from { 233 from {
234 port { 234 port {
235 map: "the_darkroom" 235 map: "the_darkroom"
236 room: "First Room" 236 room: "Double Sided Entrance"
237 name: "DOUBLESIDED" 237 name: "DOUBLESIDED"
238 } 238 }
239 } 239 }
@@ -308,6 +308,23 @@ connections {
308 name: "GALLERY" 308 name: "GALLERY"
309 } 309 }
310 } 310 }
311 oneway: true
312}
313connections {
314 from {
315 port {
316 map: "the_butterfly"
317 room: "Main Area"
318 name: "GALLERY"
319 }
320 }
321 to {
322 room {
323 map: "the_gallery"
324 name: "Main Area"
325 }
326 }
327 oneway: true
311} 328}
312connections { 329connections {
313 from { 330 from {
@@ -618,7 +635,7 @@ connections {
618 from { 635 from {
619 port { 636 port {
620 map: "the_entry" 637 map: "the_entry"
621 room: "Link Area" 638 room: "Liberated Entrance"
622 name: "BLUE" 639 name: "BLUE"
623 } 640 }
624 } 641 }
@@ -666,7 +683,7 @@ connections {
666 from { 683 from {
667 port { 684 port {
668 map: "the_entry" 685 map: "the_entry"
669 room: "Link Area" 686 room: "Literate Entrance"
670 name: "BROWN" 687 name: "BROWN"
671 } 688 }
672 } 689 }
@@ -841,6 +858,8 @@ connections {
841 } 858 }
842 oneway: true 859 oneway: true
843} 860}
861# Two one-way connections because the CLUE panel only needs to be solved to
862# go from The Great to The Partial.
844connections { 863connections {
845 from { 864 from {
846 port { 865 port {
@@ -856,6 +875,25 @@ connections {
856 name: "GREAT" 875 name: "GREAT"
857 } 876 }
858 } 877 }
878 oneway: true
879}
880connections {
881 from {
882 port {
883 map: "the_partial"
884 room: "Obverse Side"
885 name: "GREAT"
886 }
887 }
888 to {
889 port {
890 map: "the_great"
891 room: "West Side"
892 name: "PARTIAL"
893 }
894 }
895 oneway: true
896 bypass_target_door: true
859} 897}
860connections { 898connections {
861 from { 899 from {
@@ -1435,7 +1473,6 @@ connections {
1435 name: "GREAT" 1473 name: "GREAT"
1436 } 1474 }
1437 } 1475 }
1438 door { map: "the_great" name: "Daedalus Entrance" }
1439 oneway: true 1476 oneway: true
1440} 1477}
1441connections { 1478connections {
@@ -1454,6 +1491,7 @@ connections {
1454 } 1491 }
1455 } 1492 }
1456 oneway: true 1493 oneway: true
1494 bypass_target_door: true
1457} 1495}
1458connections { 1496connections {
1459 from { 1497 from {
@@ -1749,12 +1787,13 @@ connections {
1749 } 1787 }
1750 } 1788 }
1751 oneway: true 1789 oneway: true
1790 bypass_target_door: true
1752} 1791}
1753connections { 1792connections {
1754 from { 1793 from {
1755 port { 1794 port {
1756 map: "the_bearer" 1795 map: "the_bearer"
1757 room: "Back Area" 1796 room: "Tree Entrance"
1758 name: "TREE" 1797 name: "TREE"
1759 } 1798 }
1760 } 1799 }
@@ -1831,7 +1870,6 @@ connections {
1831 } 1870 }
1832} 1871}
1833connections { 1872connections {
1834 # Two one-way connections because the door only blocks one direction.
1835 from { 1873 from {
1836 port { 1874 port {
1837 map: "the_great" 1875 map: "the_great"
@@ -1848,6 +1886,7 @@ connections {
1848 } 1886 }
1849} 1887}
1850connections { 1888connections {
1889 # Two one-way connections because the door only blocks one direction.
1851 from { 1890 from {
1852 port { 1891 port {
1853 map: "the_unkempt" 1892 map: "the_unkempt"
@@ -1880,6 +1919,7 @@ connections {
1880 } 1919 }
1881 } 1920 }
1882 oneway: true 1921 oneway: true
1922 bypass_target_door: true
1883} 1923}
1884connections { 1924connections {
1885 from { 1925 from {
diff --git a/data/ids.yaml b/data/ids.yaml index e2ec985..e38dfcc 100644 --- a/data/ids.yaml +++ b/data/ids.yaml
@@ -1044,6 +1044,7 @@ maps:
1044 House Entrance: 1495 1044 House Entrance: 1495
1045 House Side Door: 1566 1045 House Side Door: 1566
1046 Intense Room Entrance: 1522 1046 Intense Room Entrance: 1522
1047 Lime Hexes: 2810
1047 Magenta Hexes: 2272 1048 Magenta Hexes: 2272
1048 Magic Room Entrance: 1500 1049 Magic Room Entrance: 1500
1049 Magic Room Panels: 1499 1050 Magic Room Panels: 1499
@@ -1124,6 +1125,7 @@ maps:
1124 Starting Room West Wall South Door: 1433 1125 Starting Room West Wall South Door: 1433
1125 Sticks And Stones Door: 1593 1126 Sticks And Stones Door: 1593
1126 Temple of the Eyes Entrance: 1444 1127 Temple of the Eyes Entrance: 1444
1128 Theo Panels: 2811
1127 U2 Room Back Door: 1497 1129 U2 Room Back Door: 1497
1128 U2 Room Back Right Door: 1496 1130 U2 Room Back Right Door: 1496
1129 U2 Room Entrance: 1498 1131 U2 Room Entrance: 1498
@@ -3836,6 +3838,25 @@ endings:
3836 YELLOW: 1206 3838 YELLOW: 1206
3837special: 3839special:
3838 A Job Well Done: 1160 3840 A Job Well Done: 1160
3841 Age Symbol: 2791
3842 Anagram Symbol: 2792
3843 Boxes Symbol: 2793
3844 Cross Symbol: 2794
3845 Eval Symbol: 2795
3846 Example Symbol: 2796
3847 Gender Symbol: 2797
3848 Job Symbol: 2798
3849 Lingo Symbol: 2799
3850 Null Symbol: 2800
3851 Planet Symbol: 2801
3852 Pyramid Symbol: 2802
3853 Question Symbol: 2803
3854 Sound Symbol: 2804
3855 Sparkles Symbol: 2805
3856 Stars Symbol: 2806
3857 Sun Symbol: 2807
3858 Sweet Symbol: 2808
3859 Zero Symbol: 2809
3839progressives: 3860progressives:
3840 Progressive Gold Ending: 2753 3861 Progressive Gold Ending: 2753
3841door_groups: 3862door_groups:
diff --git a/data/maps/daedalus/connections.txtpb b/data/maps/daedalus/connections.txtpb index 09613ae..07b6513 100644 --- a/data/maps/daedalus/connections.txtpb +++ b/data/maps/daedalus/connections.txtpb
@@ -100,6 +100,11 @@ connections {
100 oneway: true 100 oneway: true
101} 101}
102connections { 102connections {
103 from_room: "Outside House"
104 to_room: "Blue Hallway Tall Side"
105 door { name: "House Side Door" }
106}
107connections {
103 from_room: "Purple SE Vestibule" 108 from_room: "Purple SE Vestibule"
104 to_room: "Welcome Back Area" 109 to_room: "Welcome Back Area"
105 oneway: true 110 oneway: true
@@ -1857,3 +1862,9 @@ connections {
1857 oneway: true 1862 oneway: true
1858 roof_access: true 1863 roof_access: true
1859} 1864}
1865connections {
1866 from_room: "Roof"
1867 to_room: "F Keyholder"
1868 oneway: true
1869 roof_access: true
1870}
diff --git a/data/maps/daedalus/doors.txtpb b/data/maps/daedalus/doors.txtpb index ccbf3f0..ace15a1 100644 --- a/data/maps/daedalus/doors.txtpb +++ b/data/maps/daedalus/doors.txtpb
@@ -195,8 +195,8 @@ doors {
195} 195}
196doors { 196doors {
197 name: "Welcome Back Door" 197 name: "Welcome Back Door"
198 type: STANDARD 198 type: LOCATION_ONLY
199 receivers: "Components/Doors/Entry/entry_14" 199 #receivers: "Components/Doors/Entry/entry_14"
200 panels { room: "Welcome Back Area" name: "GREETINGS OLD FRIEND" } 200 panels { room: "Welcome Back Area" name: "GREETINGS OLD FRIEND" }
201 location_room: "Welcome Back Area" 201 location_room: "Welcome Back Area"
202} 202}
@@ -493,7 +493,6 @@ doors {
493 panels { room: "Outside House" name: "WALLS" } 493 panels { room: "Outside House" name: "WALLS" }
494 panels { room: "Outside House" name: "LOCK" } 494 panels { room: "Outside House" name: "LOCK" }
495 location_room: "Outside House" 495 location_room: "Outside House"
496 location_name: "North Purple Vestibules"
497} 496}
498doors { 497doors {
499 name: "Purple NW Vestibule" 498 name: "Purple NW Vestibule"
@@ -1208,70 +1207,37 @@ doors {
1208 type: ITEM_ONLY 1207 type: ITEM_ONLY
1209 receivers: "Components/Doors/Halls/connections_1" 1208 receivers: "Components/Doors/Halls/connections_1"
1210 receivers: "Components/Doors/Halls/connections_3" 1209 receivers: "Components/Doors/Halls/connections_3"
1210 # These have the same effect as the above, but including them here prevents
1211 # them from opening in door shuffle when the J2 door opens.
1212 receivers: "Components/Triggers/teleportListenerConnections3"
1213 receivers: "Components/Triggers/teleportListenerConnections4"
1214 # This door can open from either solving all panels, or just the smiley ones,
1215 # and the latter is obviously a subset of the former so let's just check for
1216 # that.
1211 panels { room: "Hotel" name: "PARKA" } 1217 panels { room: "Hotel" name: "PARKA" }
1212 panels { room: "Hotel" name: "MARLIN" }
1213 panels { room: "Hotel" name: "WHO" }
1214 panels { room: "Hotel" name: "CLOAK" } 1218 panels { room: "Hotel" name: "CLOAK" }
1215 panels { room: "Hotel" name: "MANE" }
1216 panels { room: "Hotel" name: "WHAT" }
1217 panels { room: "Hotel" name: "BLAZER" }
1218 panels { room: "Hotel" name: "WHERE" }
1219 panels { room: "Hotel" name: "DOROTHY" } 1219 panels { room: "Hotel" name: "DOROTHY" }
1220 panels { room: "Hotel" name: "JACKET" }
1221 panels { room: "Hotel" name: "TAIL" }
1222 panels { room: "Hotel" name: "JAWS" } 1220 panels { room: "Hotel" name: "JAWS" }
1223 panels { room: "Hotel" name: "FLOUNDER" }
1224 panels { room: "Hotel" name: "WHEN" } 1221 panels { room: "Hotel" name: "WHEN" }
1225 panels { room: "Hotel" name: "CLAWS" } 1222 panels { room: "Hotel" name: "CLAWS" }
1226 panels { room: "Hotel" name: "BRUCE" }
1227 panels { room: "Hotel" name: "POTATO" } 1223 panels { room: "Hotel" name: "POTATO" }
1228 panels { room: "Hotel" name: "SALAD" }
1229 panels { room: "Hotel" name: "BATHING" }
1230 panels { room: "Hotel" name: "MICRO" } 1224 panels { room: "Hotel" name: "MICRO" }
1231 panels { room: "Hotel" name: "BUSINESS" }
1232 panels { room: "Hotel" name: "WEDDING" }
1233 panels { room: "Hotel" name: "TREE" }
1234 panels { room: "Hotel" name: "RIVER" }
1235 panels { room: "Hotel" name: "TUNING" } 1225 panels { room: "Hotel" name: "TUNING" }
1236 panels { room: "Hotel" name: "BOXING" }
1237 panels { room: "Hotel" name: "TELEPHONE" }
1238 panels { room: "Hotel" name: "LAW" } 1226 panels { room: "Hotel" name: "LAW" }
1239 panels { room: "Hotel" name: "POKER" }
1240 panels { room: "Hotel" name: "CARD" } 1227 panels { room: "Hotel" name: "CARD" }
1241 panels { room: "Hotel" name: "ROAD" } 1228 panels { room: "Hotel" name: "ROAD" }
1242 panels { room: "Hotel" name: "CHOCOLATE" }
1243 panels { room: "Hotel" name: "DEPART" } 1229 panels { room: "Hotel" name: "DEPART" }
1244 panels { room: "Hotel" name: "WITHDRAW" }
1245 panels { room: "Hotel" name: "QUIT" }
1246 panels { room: "Hotel" name: "LEAVE" } 1230 panels { room: "Hotel" name: "LEAVE" }
1247 panels { room: "Hotel" name: "PALE" }
1248 panels { room: "Hotel" name: "JUST" }
1249 panels { room: "Hotel" name: "NEW" }
1250 panels { room: "Hotel" name: "UNTALENTED" }
1251 panels { room: "Hotel" name: "SERVICE" } 1231 panels { room: "Hotel" name: "SERVICE" }
1252 panels { room: "Hotel" name: "FULL" }
1253 panels { room: "Hotel" name: "EVIL" }
1254 panels { room: "Hotel" name: "HONEY" } 1232 panels { room: "Hotel" name: "HONEY" }
1255 panels { room: "Hotel" name: "CRESCENT" }
1256 panels { room: "Hotel" name: "INVALID" } 1233 panels { room: "Hotel" name: "INVALID" }
1257 panels { room: "Hotel" name: "FESTIVAL" } 1234 panels { room: "Hotel" name: "FESTIVAL" }
1258 panels { room: "Hotel" name: "BEAUTIFUL" }
1259 panels { room: "Hotel" name: "WILTED" } 1235 panels { room: "Hotel" name: "WILTED" }
1260 panels { room: "Hotel" name: "DROOPED" }
1261 panels { room: "Hotel" name: "FADED" }
1262 panels { room: "Hotel" name: "WANED" } 1236 panels { room: "Hotel" name: "WANED" }
1263 panels { room: "Hotel" name: "TALL" }
1264 panels { room: "Hotel" name: "CANVAS" }
1265 panels { room: "Hotel" name: "LEVER" }
1266 panels { room: "Hotel" name: "SCULPTURE" }
1267 panels { room: "Hotel" name: "RAGE" } 1237 panels { room: "Hotel" name: "RAGE" }
1268 panels { room: "Hotel" name: "BALL" }
1269 panels { room: "Hotel" name: "FOOL" }
1270 panels { room: "Hotel" name: "VERGE" } 1238 panels { room: "Hotel" name: "VERGE" }
1271 panels { room: "Hotel" name: "ART" }
1272 panels { room: "Hotel" name: "EVER" } 1239 panels { room: "Hotel" name: "EVER" }
1273 panels { room: "Hotel" name: "PAIN" } 1240 panels { room: "Hotel" name: "PAIN" }
1274 panels { room: "Hotel" name: "FOOT" }
1275} 1241}
1276doors { 1242doors {
1277 name: "J2 Door 1" 1243 name: "J2 Door 1"
@@ -2187,6 +2153,7 @@ doors {
2187 receivers: "Components/Doors/Unincorporated/temple_foyer_6" 2153 receivers: "Components/Doors/Unincorporated/temple_foyer_6"
2188 panels { room: "Globe Room" name: "WORD" } 2154 panels { room: "Globe Room" name: "WORD" }
2189 location_room: "Globe Room" 2155 location_room: "Globe Room"
2156 location_name: "Sticks and Stones"
2190} 2157}
2191doors { 2158doors {
2192 name: "Castle Numbers Puzzle" 2159 name: "Castle Numbers Puzzle"
@@ -2306,3 +2273,24 @@ doors {
2306 receivers: "Components/Paintings/Temple of the Eyes/eyeRedStart/teleportListener" 2273 receivers: "Components/Paintings/Temple of the Eyes/eyeRedStart/teleportListener"
2307 double_letters: true 2274 double_letters: true
2308} 2275}
2276doors {
2277 name: "Lime Hexes"
2278 type: LOCATION_ONLY
2279 panels { room: "Tree Entrance" name: "RAT" }
2280 panels { room: "Tree Entrance" name: "DIFFERENCE" }
2281 panels { room: "Tree Entrance" name: "LEANS" }
2282 panels { room: "Tree Entrance" name: "QUESTION" }
2283 panels { room: "Tree Entrance" name: "WHERE" }
2284 panels { room: "Tree Entrance" name: "SUNDER" }
2285 location_room: "Tree Entrance"
2286}
2287doors {
2288 name: "Theo Panels"
2289 type: LOCATION_ONLY
2290 panels { room: "House" name: "GOAT" }
2291 panels { room: "House" name: "AMAZE" }
2292 panels { room: "House" name: "SKINNYHIM" }
2293 panels { room: "House" name: "THEO" }
2294 location_room: "House"
2295 location_name: "All Puzzles"
2296}
diff --git a/data/maps/daedalus/rooms/Wonderland.txtpb b/data/maps/daedalus/rooms/Wonderland.txtpb index 4b69e99..ae9b3f1 100644 --- a/data/maps/daedalus/rooms/Wonderland.txtpb +++ b/data/maps/daedalus/rooms/Wonderland.txtpb
@@ -1,6 +1,5 @@
1name: "Wonderland" 1name: "Wonderland"
2panel_display_name: "Northwest Area" 2panel_display_name: "Northwest Area"
3# TODO: There's a warp from The Entry into here.
4panels { 3panels {
5 name: "APRIL" 4 name: "APRIL"
6 path: "Panels/Wonderland/wonderland_1" 5 path: "Panels/Wonderland/wonderland_1"
diff --git a/data/maps/the_bearer/connections.txtpb b/data/maps/the_bearer/connections.txtpb index 23410f0..ba14d83 100644 --- a/data/maps/the_bearer/connections.txtpb +++ b/data/maps/the_bearer/connections.txtpb
@@ -263,3 +263,8 @@ connections {
263 to_room: "Butterfly Room" 263 to_room: "Butterfly Room"
264 door { name: "Butterfly Entrance" } 264 door { name: "Butterfly Entrance" }
265} 265}
266connections {
267 from_room: "Back Area"
268 to_room: "Tree Entrance"
269 door { name: "Control Center Brown Door" }
270}
diff --git a/data/maps/the_bearer/rooms/Back Area.txtpb b/data/maps/the_bearer/rooms/Back Area.txtpb index 27e175c..b1860de 100644 --- a/data/maps/the_bearer/rooms/Back Area.txtpb +++ b/data/maps/the_bearer/rooms/Back Area.txtpb
@@ -7,12 +7,6 @@ panels {
7 symbols: EXAMPLE 7 symbols: EXAMPLE
8} 8}
9ports { 9ports {
10 name: "TREE"
11 path: "Components/Warps/worldport3"
12 orientation: "north"
13 required_door { name: "Control Center Brown Door" }
14}
15ports {
16 name: "DAEDALUS" 10 name: "DAEDALUS"
17 path: "Components/Warps/worldport2" 11 path: "Components/Warps/worldport2"
18 orientation: "north" 12 orientation: "north"
diff --git a/data/maps/the_bearer/rooms/Tree Entrance.txtpb b/data/maps/the_bearer/rooms/Tree Entrance.txtpb new file mode 100644 index 0000000..97a07da --- /dev/null +++ b/data/maps/the_bearer/rooms/Tree Entrance.txtpb
@@ -0,0 +1,6 @@
1name: "Tree Entrance"
2ports {
3 name: "TREE"
4 path: "Components/Warps/worldport3"
5 orientation: "north"
6}
diff --git a/data/maps/the_darkroom/connections.txtpb b/data/maps/the_darkroom/connections.txtpb index 4093585..1b7ad05 100644 --- a/data/maps/the_darkroom/connections.txtpb +++ b/data/maps/the_darkroom/connections.txtpb
@@ -33,3 +33,18 @@ connections {
33 to_room: "S Room" 33 to_room: "S Room"
34 door { name: "S1 Door" } 34 door { name: "S1 Door" }
35} 35}
36connections {
37 from_room: "First Room"
38 to_room: "Cyan Hallway"
39 door { name: "Colorful Entrance" }
40}
41connections {
42 from_room: "Second Room"
43 to_room: "Congruent Entrance"
44 door { name: "Congruent Entrance" }
45}
46connections {
47 from_room: "First Room"
48 to_room: "Double Sided Entrance"
49 door { name: "Double Sided Entrance" }
50}
diff --git a/data/maps/the_darkroom/rooms/Congruent Entrance.txtpb b/data/maps/the_darkroom/rooms/Congruent Entrance.txtpb new file mode 100644 index 0000000..7ea1286 --- /dev/null +++ b/data/maps/the_darkroom/rooms/Congruent Entrance.txtpb
@@ -0,0 +1,7 @@
1name: "Congruent Entrance"
2panel_display_name: "Second Room"
3ports {
4 name: "CONGRUENT"
5 path: "Components/Warps/worldport7"
6 orientation: "east"
7}
diff --git a/data/maps/the_darkroom/rooms/Cyan Hallway.txtpb b/data/maps/the_darkroom/rooms/Cyan Hallway.txtpb new file mode 100644 index 0000000..308efb1 --- /dev/null +++ b/data/maps/the_darkroom/rooms/Cyan Hallway.txtpb
@@ -0,0 +1,7 @@
1name: "Cyan Hallway"
2panel_display_name: "First Room"
3ports {
4 name: "COLORFUL"
5 path: "Components/Warps/worldport8"
6 orientation: "north"
7}
diff --git a/data/maps/the_darkroom/rooms/Double Sided Entrance.txtpb b/data/maps/the_darkroom/rooms/Double Sided Entrance.txtpb new file mode 100644 index 0000000..9d25108 --- /dev/null +++ b/data/maps/the_darkroom/rooms/Double Sided Entrance.txtpb
@@ -0,0 +1,7 @@
1name: "Double Sided Entrance"
2panel_display_name: "First Room"
3ports {
4 name: "DOUBLESIDED"
5 path: "Components/Warps/worldport6"
6 orientation: "east"
7}
diff --git a/data/maps/the_darkroom/rooms/First Room.txtpb b/data/maps/the_darkroom/rooms/First Room.txtpb index c93f5b4..c635757 100644 --- a/data/maps/the_darkroom/rooms/First Room.txtpb +++ b/data/maps/the_darkroom/rooms/First Room.txtpb
@@ -42,15 +42,3 @@ ports {
42 orientation: "north" 42 orientation: "north"
43 required_door { name: "Second Room Entrance" } 43 required_door { name: "Second Room Entrance" }
44} 44}
45ports {
46 name: "COLORFUL"
47 path: "Components/Warps/worldport8"
48 orientation: "north"
49 required_door { name: "Colorful Entrance" }
50}
51ports {
52 name: "DOUBLESIDED"
53 path: "Components/Warps/worldport6"
54 orientation: "east"
55 required_door { name: "Double Sided Entrance" }
56}
diff --git a/data/maps/the_darkroom/rooms/Second Room.txtpb b/data/maps/the_darkroom/rooms/Second Room.txtpb index baeea12..a3964ea 100644 --- a/data/maps/the_darkroom/rooms/Second Room.txtpb +++ b/data/maps/the_darkroom/rooms/Second Room.txtpb
@@ -47,9 +47,3 @@ ports {
47 orientation: "north" 47 orientation: "north"
48 required_door { name: "Third Room Entrance" } 48 required_door { name: "Third Room Entrance" }
49} 49}
50ports {
51 name: "CONGRUENT"
52 path: "Components/Warps/worldport7"
53 orientation: "east"
54 required_door { name: "Congruent Entrance" }
55}
diff --git a/data/maps/the_entry/connections.txtpb b/data/maps/the_entry/connections.txtpb index a2e325a..9813f85 100644 --- a/data/maps/the_entry/connections.txtpb +++ b/data/maps/the_entry/connections.txtpb
@@ -199,3 +199,18 @@ connections {
199 to_room: "White Hallway To Daedalus" 199 to_room: "White Hallway To Daedalus"
200 door { name: "Control Center White Door" } 200 door { name: "Control Center White Door" }
201} 201}
202connections {
203 from_room: "Flipped Second Room"
204 to_room: "Four Rooms Entrance"
205 door { name: "Flipped Second Room Right Door" }
206}
207connections {
208 from_room: "Link Area"
209 to_room: "Liberated Entrance"
210 door { name: "Liberated Entrance" }
211}
212connections {
213 from_room: "Link Area"
214 to_room: "Literate Entrance"
215 door { name: "Literate Entrance" }
216}
diff --git a/data/maps/the_entry/doors.txtpb b/data/maps/the_entry/doors.txtpb index 6bef160..466f5ce 100644 --- a/data/maps/the_entry/doors.txtpb +++ b/data/maps/the_entry/doors.txtpb
@@ -137,8 +137,10 @@ doors {
137 type: STANDARD 137 type: STANDARD
138 receivers: "Components/Doors/back_left_2" 138 receivers: "Components/Doors/back_left_2"
139 panels { room: "Colored Doors Area" name: "OPEN" answer: "orange" } 139 panels { room: "Colored Doors Area" name: "OPEN" answer: "orange" }
140 # "wall" is supposed to also work. idk man 140 panels { room: "Colored Doors Area" name: "OPEN" answer: "wall" }
141 complete_at: 1
141 location_room: "Colored Doors Area" 142 location_room: "Colored Doors Area"
143 location_name: "OPEN"
142} 144}
143doors { 145doors {
144 name: "Lime Room Entrance" 146 name: "Lime Room Entrance"
diff --git a/data/maps/the_entry/rooms/Flipped Second Room.txtpb b/data/maps/the_entry/rooms/Flipped Second Room.txtpb index 5841ca1..0d518bb 100644 --- a/data/maps/the_entry/rooms/Flipped Second Room.txtpb +++ b/data/maps/the_entry/rooms/Flipped Second Room.txtpb
@@ -21,10 +21,3 @@ paintings {
21 gravity: Y_PLUS 21 gravity: Y_PLUS
22 display_name: "Eye Painting" 22 display_name: "Eye Painting"
23} 23}
24ports {
25 name: "FOUR"
26 path: "Components/Warps/worldport9"
27 orientation: "south"
28 gravity: Y_PLUS
29 required_door { name: "Flipped Second Room Right Door" }
30} \ No newline at end of file
diff --git a/data/maps/the_entry/rooms/Four Rooms Entrance.txtpb b/data/maps/the_entry/rooms/Four Rooms Entrance.txtpb new file mode 100644 index 0000000..689d23e --- /dev/null +++ b/data/maps/the_entry/rooms/Four Rooms Entrance.txtpb
@@ -0,0 +1,7 @@
1name: "Four Rooms Entrance"
2ports {
3 name: "FOUR"
4 path: "Components/Warps/worldport9"
5 orientation: "south"
6 gravity: Y_PLUS
7}
diff --git a/data/maps/the_entry/rooms/Liberated Entrance.txtpb b/data/maps/the_entry/rooms/Liberated Entrance.txtpb new file mode 100644 index 0000000..f0176a0 --- /dev/null +++ b/data/maps/the_entry/rooms/Liberated Entrance.txtpb
@@ -0,0 +1,6 @@
1name: "Liberated Entrance"
2ports {
3 name: "BLUE"
4 path: "worldport8"
5 orientation: "west"
6}
diff --git a/data/maps/the_entry/rooms/Link Area.txtpb b/data/maps/the_entry/rooms/Link Area.txtpb index 689f57a..5b68279 100644 --- a/data/maps/the_entry/rooms/Link Area.txtpb +++ b/data/maps/the_entry/rooms/Link Area.txtpb
@@ -26,15 +26,3 @@ paintings {
26 orientation: "south" 26 orientation: "south"
27 display_name: "Center Painting" 27 display_name: "Center Painting"
28} 28}
29ports {
30 name: "BLUE"
31 path: "worldport8"
32 orientation: "west"
33 required_door { name: "Liberated Entrance" }
34}
35ports {
36 name: "BROWN"
37 path: "worldport9"
38 orientation: "east"
39 required_door { name: "Literate Entrance" }
40} \ No newline at end of file
diff --git a/data/maps/the_entry/rooms/Literate Entrance.txtpb b/data/maps/the_entry/rooms/Literate Entrance.txtpb new file mode 100644 index 0000000..4ec402f --- /dev/null +++ b/data/maps/the_entry/rooms/Literate Entrance.txtpb
@@ -0,0 +1,6 @@
1name: "Literate Entrance"
2ports {
3 name: "BROWN"
4 path: "worldport9"
5 orientation: "east"
6}
diff --git a/data/maps/the_entry/rooms/Starting Room.txtpb b/data/maps/the_entry/rooms/Starting Room.txtpb index bc77e6d..8e8373b 100644 --- a/data/maps/the_entry/rooms/Starting Room.txtpb +++ b/data/maps/the_entry/rooms/Starting Room.txtpb
@@ -24,7 +24,9 @@ panels {
24 path: "Panels/Entry/front_1" 24 path: "Panels/Entry/front_1"
25 clue: "eye" 25 clue: "eye"
26 answer: "i" 26 answer: "i"
27 symbols: ZERO 27 #symbols: ZERO
28 # This panel blocks getting N1 and T1. We will mod it to be I/I with no symbol
29 # when symbol shuffle is on.
28} 30}
29panels { 31panels {
30 name: "HINT" 32 name: "HINT"
diff --git a/data/maps/the_gallery/doors.txtpb b/data/maps/the_gallery/doors.txtpb index a7a5d85..adbc766 100644 --- a/data/maps/the_gallery/doors.txtpb +++ b/data/maps/the_gallery/doors.txtpb
@@ -1,7 +1,7 @@
1# The Gallery is interesting because there's so many cross-map requirements. 1# The Gallery is interesting because there's so many cross-map requirements.
2doors { 2doors {
3 name: "Darkroom Painting" 3 name: "Darkroom Painting"
4 type: ITEM_ONLY 4 type: GALLERY_PAINTING
5 #move_paintings { room: "Main Area" name: "DARKROOM" } 5 #move_paintings { room: "Main Area" name: "DARKROOM" }
6 receivers: "Components/Paintings/darkroom/teleportListener" 6 receivers: "Components/Paintings/darkroom/teleportListener"
7 panels { map: "the_darkroom" room: "First Room" name: "BISON" } 7 panels { map: "the_darkroom" room: "First Room" name: "BISON" }
@@ -27,14 +27,14 @@ doors {
27} 27}
28doors { 28doors {
29 name: "Butterfly Painting" 29 name: "Butterfly Painting"
30 type: ITEM_ONLY 30 type: GALLERY_PAINTING
31 #move_paintings { room: "Main Area" name: "BUTTERFLY" } 31 #move_paintings { room: "Main Area" name: "BUTTERFLY" }
32 receivers: "Components/Paintings/butterfly/teleportListener" 32 receivers: "Components/Paintings/butterfly/teleportListener"
33 rooms { map: "the_butterfly" name: "Main Area" } 33 rooms { map: "the_butterfly" name: "Main Area" }
34} 34}
35doors { 35doors {
36 name: "Between Painting" 36 name: "Between Painting"
37 type: ITEM_ONLY 37 type: GALLERY_PAINTING
38 #move_paintings { room: "Main Area" name: "BETWEEN" } 38 #move_paintings { room: "Main Area" name: "BETWEEN" }
39 receivers: "Components/Paintings/between/teleportListener" 39 receivers: "Components/Paintings/between/teleportListener"
40 panels { map: "the_between" room: "Main Area" name: "SUN" } 40 panels { map: "the_between" room: "Main Area" name: "SUN" }
@@ -70,14 +70,14 @@ doors {
70} 70}
71doors { 71doors {
72 name: "Entry Painting" 72 name: "Entry Painting"
73 type: ITEM_ONLY 73 type: GALLERY_PAINTING
74 #move_paintings { room: "Main Area" name: "ENTRY" } 74 #move_paintings { room: "Main Area" name: "ENTRY" }
75 receivers: "Components/Paintings/eyes/teleportListener" 75 receivers: "Components/Paintings/eyes/teleportListener"
76 panels { map: "the_entry" room: "Eye Room" name: "I" } 76 panels { map: "the_entry" room: "Eye Room" name: "I" }
77} 77}
78doors { 78doors {
79 name: "Wise Painting" 79 name: "Wise Painting"
80 type: ITEM_ONLY 80 type: GALLERY_PAINTING
81 #move_paintings { room: "Main Area" name: "WISE" } 81 #move_paintings { room: "Main Area" name: "WISE" }
82 receivers: "Components/Paintings/triangle/teleportListener" 82 receivers: "Components/Paintings/triangle/teleportListener"
83 panels { map: "the_wise" room: "Entry" name: "INK" } 83 panels { map: "the_wise" room: "Entry" name: "INK" }
@@ -105,7 +105,7 @@ doors {
105} 105}
106doors { 106doors {
107 name: "Tree Painting" 107 name: "Tree Painting"
108 type: ITEM_ONLY 108 type: GALLERY_PAINTING
109 #move_paintings { room: "Main Area" name: "TREE" } 109 #move_paintings { room: "Main Area" name: "TREE" }
110 receivers: "Components/Paintings/Clue Maps/tree/teleportListener" 110 receivers: "Components/Paintings/Clue Maps/tree/teleportListener"
111 panels { map: "the_tree" room: "Main Area" name: "COLOR" } 111 panels { map: "the_tree" room: "Main Area" name: "COLOR" }
@@ -142,35 +142,35 @@ doors {
142} 142}
143doors { 143doors {
144 name: "Unyielding Painting" 144 name: "Unyielding Painting"
145 type: ITEM_ONLY 145 type: GALLERY_PAINTING
146 #move_paintings { room: "Main Area" name: "UNYIELDING" } 146 #move_paintings { room: "Main Area" name: "UNYIELDING" }
147 receivers: "Components/Paintings/Clue Maps/unyielding/teleportListener" 147 receivers: "Components/Paintings/Clue Maps/unyielding/teleportListener"
148 rooms { map: "the_unyielding" name: "Digital Entrance" } 148 rooms { map: "the_unyielding" name: "Digital Entrance" }
149} 149}
150doors { 150doors {
151 name: "Graveyard Painting" 151 name: "Graveyard Painting"
152 type: ITEM_ONLY 152 type: GALLERY_PAINTING
153 #move_paintings { room: "Main Area" name: "GRAVEYARD" } 153 #move_paintings { room: "Main Area" name: "GRAVEYARD" }
154 receivers: "Components/Paintings/Endings/grave/teleportListener" 154 receivers: "Components/Paintings/Endings/grave/teleportListener"
155 rooms { map: "the_graveyard" name: "Outside" } 155 rooms { map: "the_graveyard" name: "Outside" }
156} 156}
157doors { 157doors {
158 name: "Control Center Painting" 158 name: "Control Center Painting"
159 type: ITEM_ONLY 159 type: GALLERY_PAINTING
160 #move_paintings { room: "Main Area" name: "CC" } 160 #move_paintings { room: "Main Area" name: "CC" }
161 receivers: "Components/Paintings/Endings/desert/teleportListener" 161 receivers: "Components/Paintings/Endings/desert/teleportListener"
162 rooms { map: "the_impressive" name: "M2 Room" } 162 rooms { map: "the_impressive" name: "M2 Room" }
163} 163}
164doors { 164doors {
165 name: "Tower Painting" 165 name: "Tower Painting"
166 type: ITEM_ONLY 166 type: GALLERY_PAINTING
167 #move_paintings { room: "Main Area" name: "TOWER" } 167 #move_paintings { room: "Main Area" name: "TOWER" }
168 receivers: "Components/Paintings/Endings/red/teleportListener" 168 receivers: "Components/Paintings/Endings/red/teleportListener"
169 rooms { map: "the_tower" name: "First Floor" } 169 rooms { map: "the_tower" name: "First Floor" }
170} 170}
171doors { 171doors {
172 name: "Wondrous Painting" 172 name: "Wondrous Painting"
173 type: ITEM_ONLY 173 type: GALLERY_PAINTING
174 #move_paintings { room: "Main Area" name: "WONDROUS" } 174 #move_paintings { room: "Main Area" name: "WONDROUS" }
175 receivers: "Components/Paintings/Endings/window/teleportListener" 175 receivers: "Components/Paintings/Endings/window/teleportListener"
176 panels { map: "the_wondrous" room: "Entry" name: "WONDER" } 176 panels { map: "the_wondrous" room: "Entry" name: "WONDER" }
@@ -187,42 +187,42 @@ doors {
187} 187}
188doors { 188doors {
189 name: "Rainbow Painting" 189 name: "Rainbow Painting"
190 type: ITEM_ONLY 190 type: GALLERY_PAINTING
191 #move_paintings { room: "Main Area" name: "RAINBOW" } 191 #move_paintings { room: "Main Area" name: "RAINBOW" }
192 receivers: "Components/Paintings/Endings/rainbow/teleportListener" 192 receivers: "Components/Paintings/Endings/rainbow/teleportListener"
193 rooms { map: "daedalus" name: "Rainbow Start" } 193 rooms { map: "daedalus" name: "Rainbow Start" }
194} 194}
195doors { 195doors {
196 name: "Words Painting" 196 name: "Words Painting"
197 type: ITEM_ONLY 197 type: GALLERY_PAINTING
198 #move_paintings { room: "Main Area" name: "WORDS" } 198 #move_paintings { room: "Main Area" name: "WORDS" }
199 receivers: "Components/Paintings/Endings/words/teleportListener" 199 receivers: "Components/Paintings/Endings/words/teleportListener"
200 rooms { map: "the_words" name: "Main Area" } 200 rooms { map: "the_words" name: "Main Area" }
201} 201}
202doors { 202doors {
203 name: "Colorful Painting" 203 name: "Colorful Painting"
204 type: ITEM_ONLY 204 type: GALLERY_PAINTING
205 #move_paintings { room: "Main Area" name: "COLORFUL" } 205 #move_paintings { room: "Main Area" name: "COLORFUL" }
206 receivers: "Components/Paintings/Endings/colorful/teleportListener" 206 receivers: "Components/Paintings/Endings/colorful/teleportListener"
207 rooms { map: "the_colorful" name: "White Room" } 207 rooms { map: "the_colorful" name: "White Room" }
208} 208}
209doors { 209doors {
210 name: "Castle Painting" 210 name: "Castle Painting"
211 type: ITEM_ONLY 211 type: GALLERY_PAINTING
212 #move_paintings { room: "Main Area" name: "CASTLE" } 212 #move_paintings { room: "Main Area" name: "CASTLE" }
213 receivers: "Components/Paintings/Endings/castle/teleportListener" 213 receivers: "Components/Paintings/Endings/castle/teleportListener"
214 rooms { map: "daedalus" name: "Castle" } 214 rooms { map: "daedalus" name: "Castle" }
215} 215}
216doors { 216doors {
217 name: "Sun Temple Painting" 217 name: "Sun Temple Painting"
218 type: ITEM_ONLY 218 type: GALLERY_PAINTING
219 #move_paintings { room: "Main Area" name: "SUNTEMPLE" } 219 #move_paintings { room: "Main Area" name: "SUNTEMPLE" }
220 receivers: "Components/Paintings/Endings/temple/teleportListener" 220 receivers: "Components/Paintings/Endings/temple/teleportListener"
221 rooms { map: "the_sun_temple" name: "Entrance" } 221 rooms { map: "the_sun_temple" name: "Entrance" }
222} 222}
223doors { 223doors {
224 name: "Ancient Painting" 224 name: "Ancient Painting"
225 type: ITEM_ONLY 225 type: GALLERY_PAINTING
226 #move_paintings { room: "Main Area" name: "ANCIENT" } 226 #move_paintings { room: "Main Area" name: "ANCIENT" }
227 receivers: "Components/Paintings/Endings/cubes/teleportListener" 227 receivers: "Components/Paintings/Endings/cubes/teleportListener"
228 rooms { map: "the_ancient" name: "Outside" } 228 rooms { map: "the_ancient" name: "Outside" }
diff --git a/data/maps/the_great/doors.txtpb b/data/maps/the_great/doors.txtpb index f0f2fde..5d0e90d 100644 --- a/data/maps/the_great/doors.txtpb +++ b/data/maps/the_great/doors.txtpb
@@ -508,3 +508,8 @@ doors {
508 receivers: "Panels/General/entry_7/teleportListener" 508 receivers: "Panels/General/entry_7/teleportListener"
509 double_letters: true 509 double_letters: true
510} 510}
511doors {
512 name: "Partial Entrance"
513 type: EVENT
514 panels { room: "West Side" name: "CLUE" }
515}
diff --git a/data/maps/the_great/rooms/West Side.txtpb b/data/maps/the_great/rooms/West Side.txtpb index daf1718..8279e16 100644 --- a/data/maps/the_great/rooms/West Side.txtpb +++ b/data/maps/the_great/rooms/West Side.txtpb
@@ -76,4 +76,5 @@ ports {
76 path: "Meshes/Blocks/Warps/worldport7" 76 path: "Meshes/Blocks/Warps/worldport7"
77 orientation: "east" 77 orientation: "east"
78 # ER with this is weird; make sure to place on the surface 78 # ER with this is weird; make sure to place on the surface
79 required_door { name: "Partial Entrance" }
79} 80}
diff --git a/data/maps/the_owl/doors.txtpb b/data/maps/the_owl/doors.txtpb index 5ec34c6..9254c2a 100644 --- a/data/maps/the_owl/doors.txtpb +++ b/data/maps/the_owl/doors.txtpb
@@ -235,7 +235,7 @@ doors {
235 type: EVENT 235 type: EVENT
236 #receivers: "Panels/Colors/owl_2/animationListener2" 236 #receivers: "Panels/Colors/owl_2/animationListener2"
237 panels { room: "Connected Area" name: "RANGE" } 237 panels { room: "Connected Area" name: "RANGE" }
238 panels { room: "R2C3 Bottom" name: "BLACK" } 238 panels { room: "Connected Area" name: "WHITE" }
239 panels { room: "Blue Room" name: "SKY" } 239 panels { room: "Blue Room" name: "SKY" }
240} 240}
241doors { 241doors {
diff --git a/data/metadata.txtpb b/data/metadata.txtpb new file mode 100644 index 0000000..57255e6 --- /dev/null +++ b/data/metadata.txtpb
@@ -0,0 +1,23 @@
1version: 3
2# Filler item.
3special_names: "A Job Well Done"
4# Symbol items.
5special_names: "Age Symbol"
6special_names: "Anagram Symbol"
7special_names: "Boxes Symbol"
8special_names: "Cross Symbol"
9special_names: "Eval Symbol"
10special_names: "Example Symbol"
11special_names: "Gender Symbol"
12special_names: "Job Symbol"
13special_names: "Lingo Symbol"
14special_names: "Null Symbol"
15special_names: "Planet Symbol"
16special_names: "Pyramid Symbol"
17special_names: "Question Symbol"
18special_names: "Sound Symbol"
19special_names: "Sparkles Symbol"
20special_names: "Stars Symbol"
21special_names: "Sun Symbol"
22special_names: "Sweet Symbol"
23special_names: "Zero Symbol"
diff --git a/proto/data.proto b/proto/data.proto index 014cbeb..bf216b9 100644 --- a/proto/data.proto +++ b/proto/data.proto
@@ -27,6 +27,9 @@ enum DoorType {
27 27
28 // This door is an item if gravestone shuffle is enabled, and is a location as long as panelsanity is not on. 28 // This door is an item if gravestone shuffle is enabled, and is a location as long as panelsanity is not on.
29 GRAVESTONE = 6; 29 GRAVESTONE = 6;
30
31 // This door is never a location, and is an item as long as gallery painting shuffle is on.
32 GALLERY_PAINTING = 7;
30} 33}
31 34
32enum DoorGroupType { 35enum DoorGroupType {
@@ -260,6 +263,8 @@ message DoorGroup {
260} 263}
261 264
262message AllObjects { 265message AllObjects {
266 optional uint64 version = 15;
267
263 repeated Map maps = 7; 268 repeated Map maps = 7;
264 repeated Room rooms = 1; 269 repeated Room rooms = 1;
265 repeated Door doors = 2; 270 repeated Door doors = 2;
diff --git a/proto/human.proto b/proto/human.proto index d48f687..615ac86 100644 --- a/proto/human.proto +++ b/proto/human.proto
@@ -66,6 +66,10 @@ message HumanConnection {
66 // If true, this connection will only be logically allowed if the Daedalus 66 // If true, this connection will only be logically allowed if the Daedalus
67 // Roof Access option is enabled. 67 // Roof Access option is enabled.
68 optional bool roof_access = 7; 68 optional bool roof_access = 7;
69
70 // This means that the connection intentionally skips the target object's
71 // required door.
72 optional bool bypass_target_door = 8;
69} 73}
70 74
71message HumanConnections { 75message HumanConnections {
@@ -212,6 +216,11 @@ message HumanDoorGroups {
212 repeated HumanDoorGroup door_groups = 1; 216 repeated HumanDoorGroup door_groups = 1;
213} 217}
214 218
219message HumanGlobalMetadata {
220 repeated string special_names = 1;
221 optional uint64 version = 2;
222}
223
215message IdMappings { 224message IdMappings {
216 message RoomIds { 225 message RoomIds {
217 map<string, uint64> panels = 1; 226 map<string, uint64> panels = 1;
diff --git a/tools/assign_ids/main.cpp b/tools/assign_ids/main.cpp index ee55338..3e16f78 100644 --- a/tools/assign_ids/main.cpp +++ b/tools/assign_ids/main.cpp
@@ -44,6 +44,7 @@ class AssignIds {
44 ProcessSpecialIds(); 44 ProcessSpecialIds();
45 ProcessProgressivesFile(datadir_path / "progressives.txtpb"); 45 ProcessProgressivesFile(datadir_path / "progressives.txtpb");
46 ProcessDoorGroupsFile(datadir_path / "door_groups.txtpb"); 46 ProcessDoorGroupsFile(datadir_path / "door_groups.txtpb");
47 ProcessGlobalMetadataFile(datadir_path / "metadata.txtpb");
47 48
48 WriteIds(ids_path); 49 WriteIds(ids_path);
49 50
@@ -288,6 +289,23 @@ class AssignIds {
288 } 289 }
289 } 290 }
290 291
292 void ProcessGlobalMetadataFile(std::filesystem::path path) {
293 if (!std::filesystem::exists(path)) {
294 return;
295 }
296
297 auto h_metadata = ReadMessageFromFile<HumanGlobalMetadata>(path.string());
298 auto& specials = *output_.mutable_special();
299
300 for (const std::string& h_special : h_metadata.special_names()) {
301 if (!id_mappings_.special().contains(h_special)) {
302 specials[h_special] = next_id_++;
303 } else {
304 specials[h_special] = id_mappings_.special().at(h_special);
305 }
306 }
307 }
308
291 private: 309 private:
292 void UpdateNextId(const google::protobuf::Map<std::string, uint64_t>& ids) { 310 void UpdateNextId(const google::protobuf::Map<std::string, uint64_t>& ids) {
293 for (const auto& [_, id] : ids) { 311 for (const auto& [_, id] : ids) {
diff --git a/tools/datapacker/main.cpp b/tools/datapacker/main.cpp index c640de6..6bbb461 100644 --- a/tools/datapacker/main.cpp +++ b/tools/datapacker/main.cpp
@@ -45,6 +45,7 @@ class DataPacker {
45 ProcessMaps(datadir_path); 45 ProcessMaps(datadir_path);
46 ProcessProgressivesFile(datadir_path / "progressives.txtpb"); 46 ProcessProgressivesFile(datadir_path / "progressives.txtpb");
47 ProcessDoorGroupsFile(datadir_path / "door_groups.txtpb"); 47 ProcessDoorGroupsFile(datadir_path / "door_groups.txtpb");
48 ProcessGlobalMetadataFile(datadir_path / "metadata.txtpb");
48 ProcessIdsFile(datadir_path / "ids.yaml"); 49 ProcessIdsFile(datadir_path / "ids.yaml");
49 50
50 { 51 {
@@ -600,6 +601,15 @@ class DataPacker {
600 } 601 }
601 } 602 }
602 603
604 void ProcessGlobalMetadataFile(std::filesystem::path path) {
605 if (!std::filesystem::exists(path)) {
606 return;
607 }
608
609 auto h_metadata = ReadMessageFromFile<HumanGlobalMetadata>(path.string());
610 container_.all_objects().set_version(h_metadata.version());
611 }
612
603 void ProcessIdsFile(std::filesystem::path path) { 613 void ProcessIdsFile(std::filesystem::path path) {
604 auto ids = ReadIdsFromYaml(path.string()); 614 auto ids = ReadIdsFromYaml(path.string());
605 615
diff --git a/tools/validator/human_processor.cpp b/tools/validator/human_processor.cpp index 561225e..2c978bf 100644 --- a/tools/validator/human_processor.cpp +++ b/tools/validator/human_processor.cpp
@@ -394,7 +394,9 @@ class HumanProcessor {
394 } 394 }
395 } else if (human_connection.has_from()) { 395 } else if (human_connection.has_from()) {
396 ProcessSingleConnection(human_connection, human_connection.from(), 396 ProcessSingleConnection(human_connection, human_connection.from(),
397 current_map_name); 397 current_map_name,
398 /*is_target=*/!human_connection.oneway() &&
399 !human_connection.bypass_target_door());
398 } 400 }
399 401
400 if (human_connection.has_to_room()) { 402 if (human_connection.has_to_room()) {
@@ -410,8 +412,9 @@ class HumanProcessor {
410 std::cout << "A global connection used to_room." << std::endl; 412 std::cout << "A global connection used to_room." << std::endl;
411 } 413 }
412 } else if (human_connection.has_to()) { 414 } else if (human_connection.has_to()) {
413 ProcessSingleConnection(human_connection, human_connection.to(), 415 ProcessSingleConnection(
414 current_map_name); 416 human_connection, human_connection.to(), current_map_name,
417 /*is_target=*/!human_connection.bypass_target_door());
415 } 418 }
416 419
417 if (human_connection.has_door()) { 420 if (human_connection.has_door()) {
@@ -432,7 +435,7 @@ class HumanProcessor {
432 void ProcessSingleConnection( 435 void ProcessSingleConnection(
433 const HumanConnection& human_connection, 436 const HumanConnection& human_connection,
434 const HumanConnection::Endpoint& endpoint, 437 const HumanConnection::Endpoint& endpoint,
435 const std::optional<std::string>& current_map_name) { 438 const std::optional<std::string>& current_map_name, bool is_target) {
436 if (endpoint.has_room()) { 439 if (endpoint.has_room()) {
437 auto room_identifier = 440 auto room_identifier =
438 GetCompleteRoomIdentifier(endpoint.room(), current_map_name); 441 GetCompleteRoomIdentifier(endpoint.room(), current_map_name);
@@ -451,6 +454,11 @@ class HumanProcessor {
451 if (painting_identifier) { 454 if (painting_identifier) {
452 PaintingInfo& painting_info = info_.paintings[*painting_identifier]; 455 PaintingInfo& painting_info = info_.paintings[*painting_identifier];
453 painting_info.connections_referenced_by.push_back(human_connection); 456 painting_info.connections_referenced_by.push_back(human_connection);
457
458 if (is_target) {
459 painting_info.target_connections_referenced_by.push_back(
460 human_connection);
461 }
454 } else { 462 } else {
455 // Not sure where else to store this right now. 463 // Not sure where else to store this right now.
456 std::cout 464 std::cout
@@ -463,6 +471,11 @@ class HumanProcessor {
463 if (port_identifier) { 471 if (port_identifier) {
464 PortInfo& port_info = info_.ports[*port_identifier]; 472 PortInfo& port_info = info_.ports[*port_identifier];
465 port_info.connections_referenced_by.push_back(human_connection); 473 port_info.connections_referenced_by.push_back(human_connection);
474
475 if (is_target) {
476 port_info.target_connections_referenced_by.push_back(
477 human_connection);
478 }
466 } else { 479 } else {
467 // Not sure where else to store this right now. 480 // Not sure where else to store this right now.
468 std::cout 481 std::cout
@@ -480,6 +493,11 @@ class HumanProcessor {
480 panel_info.proxies[endpoint.panel().answer()] 493 panel_info.proxies[endpoint.panel().answer()]
481 .connections_referenced_by.push_back(human_connection); 494 .connections_referenced_by.push_back(human_connection);
482 } 495 }
496
497 if (is_target) {
498 panel_info.target_connections_referenced_by.push_back(
499 human_connection);
500 }
483 } 501 }
484 } 502 }
485 } 503 }
diff --git a/tools/validator/structs.h b/tools/validator/structs.h index 17ed33a..d1d45f2 100644 --- a/tools/validator/structs.h +++ b/tools/validator/structs.h
@@ -56,12 +56,14 @@ struct PortInfo {
56 std::vector<HumanPort> definitions; 56 std::vector<HumanPort> definitions;
57 57
58 std::vector<HumanConnection> connections_referenced_by; 58 std::vector<HumanConnection> connections_referenced_by;
59 std::vector<HumanConnection> target_connections_referenced_by;
59}; 60};
60 61
61struct PaintingInfo { 62struct PaintingInfo {
62 std::vector<HumanPainting> definitions; 63 std::vector<HumanPainting> definitions;
63 64
64 std::vector<HumanConnection> connections_referenced_by; 65 std::vector<HumanConnection> connections_referenced_by;
66 std::vector<HumanConnection> target_connections_referenced_by;
65 std::vector<DoorIdentifier> doors_referenced_by; 67 std::vector<DoorIdentifier> doors_referenced_by;
66}; 68};
67 69
@@ -79,6 +81,7 @@ struct PanelInfo {
79 std::string map_area_name; 81 std::string map_area_name;
80 82
81 std::vector<HumanConnection> connections_referenced_by; 83 std::vector<HumanConnection> connections_referenced_by;
84 std::vector<HumanConnection> target_connections_referenced_by;
82 std::vector<DoorIdentifier> doors_referenced_by; 85 std::vector<DoorIdentifier> doors_referenced_by;
83 86
84 std::map<std::string, ProxyInfo> proxies; 87 std::map<std::string, ProxyInfo> proxies;
diff --git a/tools/validator/validator.cpp b/tools/validator/validator.cpp index 4149caa..dd41f5c 100644 --- a/tools/validator/validator.cpp +++ b/tools/validator/validator.cpp
@@ -106,7 +106,8 @@ class Validator {
106 return false; 106 return false;
107 } 107 }
108 108
109 if (h_door.keyholders_size() > 0 || h_door.endings_size() > 0) { 109 if (h_door.keyholders_size() > 0 || h_door.endings_size() > 0 ||
110 h_door.complete_at() > 0) {
110 return true; 111 return true;
111 } 112 }
112 113
@@ -256,6 +257,22 @@ class Validator {
256 std::cout << "Port " << port_identifier.ShortDebugString() 257 std::cout << "Port " << port_identifier.ShortDebugString()
257 << " was defined multiple times." << std::endl; 258 << " was defined multiple times." << std::endl;
258 } 259 }
260
261 if (!port_info.target_connections_referenced_by.empty()) {
262 for (const HumanPort& port : port_info.definitions) {
263 if (port.has_required_door()) {
264 std::cout << "Port " << port_identifier.ShortDebugString()
265 << " has a required door but is the target of a connection:"
266 << std::endl;
267
268 for (const HumanConnection& connection :
269 port_info.target_connections_referenced_by) {
270 std::cout << " CONNECTION " << connection.ShortDebugString()
271 << std::endl;
272 }
273 }
274 }
275 }
259 } 276 }
260 277
261 void ValidatePainting(const PaintingIdentifier& painting_identifier, 278 void ValidatePainting(const PaintingIdentifier& painting_identifier,
@@ -279,6 +296,22 @@ class Validator {
279 std::cout << "Painting " << painting_identifier.ShortDebugString() 296 std::cout << "Painting " << painting_identifier.ShortDebugString()
280 << " was defined multiple times." << std::endl; 297 << " was defined multiple times." << std::endl;
281 } 298 }
299
300 if (!painting_info.target_connections_referenced_by.empty()) {
301 for (const HumanPainting& painting : painting_info.definitions) {
302 if (painting.has_required_door()) {
303 std::cout << "Painting " << painting_identifier.ShortDebugString()
304 << " has a required door but is the target of a connection:"
305 << std::endl;
306
307 for (const HumanConnection& connection :
308 painting_info.target_connections_referenced_by) {
309 std::cout << " CONNECTION " << connection.ShortDebugString()
310 << std::endl;
311 }
312 }
313 }
314 }
282 } 315 }
283 316
284 void ValidatePanel(const PanelIdentifier& panel_identifier, 317 void ValidatePanel(const PanelIdentifier& panel_identifier,
@@ -340,6 +373,22 @@ class Validator {
340 std::cout << "Panel " << panel_identifier.ShortDebugString() 373 std::cout << "Panel " << panel_identifier.ShortDebugString()
341 << " is missing an AP ID." << std::endl; 374 << " is missing an AP ID." << std::endl;
342 } 375 }
376
377 if (!panel_info.target_connections_referenced_by.empty()) {
378 for (const HumanPanel& panel : panel_info.definitions) {
379 if (panel.has_required_door()) {
380 std::cout << "Panel " << panel_identifier.ShortDebugString()
381 << " has a required door but is the target of a connection:"
382 << std::endl;
383
384 for (const HumanConnection& connection :
385 panel_info.target_connections_referenced_by) {
386 std::cout << " CONNECTION " << connection.ShortDebugString()
387 << std::endl;
388 }
389 }
390 }
391 }
343 } 392 }
344 393
345 void ValidateKeyholder(const KeyholderIdentifier& keyholder_identifier, 394 void ValidateKeyholder(const KeyholderIdentifier& keyholder_identifier,
diff --git a/vendor/godobuf/LICENSE b/vendor/godobuf/LICENSE new file mode 100644 index 0000000..5d473d8 --- /dev/null +++ b/vendor/godobuf/LICENSE
@@ -0,0 +1,29 @@
1BSD 3-Clause License
2
3Copyright (c) 2018, oniksan
4All rights reserved.
5
6Redistribution and use in source and binary forms, with or without
7modification, are permitted provided that the following conditions are met:
8
9* Redistributions of source code must retain the above copyright notice, this
10 list of conditions and the following disclaimer.
11
12* Redistributions in binary form must reproduce the above copyright notice,
13 this list of conditions and the following disclaimer in the documentation
14 and/or other materials provided with the distribution.
15
16* Neither the name of the copyright holder nor the names of its
17 contributors may be used to endorse or promote products derived from
18 this software without specific prior written permission.
19
20THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/godobuf/README b/vendor/godobuf/README new file mode 100644 index 0000000..ce716bb --- /dev/null +++ b/vendor/godobuf/README
@@ -0,0 +1,4 @@
1This is a fork of https://github.com/oniksan/godobuf with some minor changes so
2that it is able to compile the Lingo 2 randomizer proto files. The plugin parts
3of the project have also been removed since we only need the command line
4script.
diff --git a/vendor/godobuf/addons/protobuf/parser.gd b/vendor/godobuf/addons/protobuf/parser.gd new file mode 100644 index 0000000..dfc0bdd --- /dev/null +++ b/vendor/godobuf/addons/protobuf/parser.gd
@@ -0,0 +1,2254 @@
1#
2# BSD 3-Clause License
3#
4# Copyright (c) 2018 - 2023, Oleg Malyavkin
5# All rights reserved.
6#
7# Redistribution and use in source and binary forms, with or without
8# modification, are permitted provided that the following conditions are met:
9#
10# * Redistributions of source code must retain the above copyright notice, this
11# list of conditions and the following disclaimer.
12#
13# * Redistributions in binary form must reproduce the above copyright notice,
14# this list of conditions and the following disclaimer in the documentation
15# and/or other materials provided with the distribution.
16#
17# * Neither the name of the copyright holder nor the names of its
18# contributors may be used to endorse or promote products derived from
19# this software without specific prior written permission.
20#
21# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
24# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
25# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
26# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
28# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
29# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31
32extends Node
33
34const PROTO_VERSION_CONST : String = "const PROTO_VERSION = "
35const PROTO_VERSION_DEFAULT : String = PROTO_VERSION_CONST + "0"
36
37class Document:
38
39 func _init(doc_name : String, doc_text : String):
40 name = doc_name
41 text = doc_text
42
43 var name : String
44 var text : String
45
46class TokenPosition:
47 func _init(b : int, e : int):
48 begin = b
49 end = e
50 var begin : int = 0
51 var end : int = 0
52
53class Helper:
54
55 class StringPosition:
56 func _init(s : int, c : int, l : int):
57 str_num = s
58 column = c
59 length = l
60 var str_num : int
61 var column : int
62 var length : int
63
64 static func str_pos(text : String, position : TokenPosition) -> StringPosition:
65 var cur_str : int = 1
66 var cur_col : int = 1
67 var res_str : int = 0
68 var res_col : int = 0
69 var res_length : int = 0
70 for i in range(text.length()):
71 if text[i] == "\n":
72 cur_str += 1
73 cur_col = 0
74 if position.begin == i:
75 res_str = cur_str
76 res_col = cur_col
77 res_length = position.end - position.begin + 1
78 break
79 cur_col += 1
80 return StringPosition.new(res_str, res_col, res_length)
81
82 static func text_pos(tokens : Array, index : int) -> TokenPosition:
83 var res_begin : int = 0
84 var res_end : int = 0
85 if index < tokens.size() && index >= 0:
86 res_begin = tokens[index].position.begin
87 res_end = tokens[index].position.end
88 return TokenPosition.new(res_begin, res_end)
89
90 static func error_string(file_name, col, row, error_text):
91 return file_name + ":" + str(col) + ":" + str(row) + ": error: " + error_text
92
93class AnalyzeResult:
94 var classes : Array = []
95 var fields : Array = []
96 var groups : Array = []
97 var version : int = 0
98 var state : bool = false
99 var tokens : Array = []
100 var syntax : Analysis.TranslationResult
101 var imports : Array = []
102 var doc : Document
103
104 func soft_copy() -> AnalyzeResult:
105 var res : AnalyzeResult = AnalyzeResult.new()
106 res.classes = classes
107 res.fields = fields
108 res.groups = groups
109 res.version = version
110 res.state = state
111 res.tokens = tokens
112 res.syntax = syntax
113 res.imports = imports
114 res.doc = doc
115 return res
116
117class Analysis:
118
119 func _init(path : String, doc : Document):
120 path_dir = path
121 document = doc
122
123 var document : Document
124 var path_dir : String
125
126 const LEX = {
127 LETTER = "[A-Za-z]",
128 DIGIT_DEC = "[0-9]",
129 DIGIT_OCT = "[0-7]",
130 DIGIT_HEX = "[0-9]|[A-F]|[a-f]",
131 BRACKET_ROUND_LEFT = "\\(",
132 BRACKET_ROUND_RIGHT = "\\)",
133 BRACKET_CURLY_LEFT = "\\{",
134 BRACKET_CURLY_RIGHT = "\\}",
135 BRACKET_SQUARE_LEFT = "\\[",
136 BRACKET_SQUARE_RIGHT = "\\]",
137 BRACKET_ANGLE_LEFT = "\\<",
138 BRACKET_ANGLE_RIGHT = "\\>",
139 SEMICOLON = ";",
140 COMMA = ",",
141 EQUAL = "=",
142 SIGN = "\\+|\\-",
143 SPACE = "\\s",
144 QUOTE_SINGLE = "'",
145 QUOTE_DOUBLE = "\"",
146 }
147
148 const TOKEN_IDENT : String = "(" + LEX.LETTER + "+" + "(" + LEX.LETTER + "|" + LEX.DIGIT_DEC + "|" + "_)*)"
149 const TOKEN_FULL_IDENT : String = TOKEN_IDENT + "{0,1}(\\." + TOKEN_IDENT + ")+"
150 const TOKEN_BRACKET_ROUND_LEFT : String = "(" + LEX.BRACKET_ROUND_LEFT + ")"
151 const TOKEN_BRACKET_ROUND_RIGHT : String = "(" + LEX.BRACKET_ROUND_RIGHT + ")"
152 const TOKEN_BRACKET_CURLY_LEFT : String = "(" + LEX.BRACKET_CURLY_LEFT + ")"
153 const TOKEN_BRACKET_CURLY_RIGHT : String = "(" + LEX.BRACKET_CURLY_RIGHT + ")"
154 const TOKEN_BRACKET_SQUARE_LEFT : String = "(" + LEX.BRACKET_SQUARE_LEFT + ")"
155 const TOKEN_BRACKET_SQUARE_RIGHT : String = "(" + LEX.BRACKET_SQUARE_RIGHT + ")"
156 const TOKEN_BRACKET_ANGLE_LEFT : String = "(" + LEX.BRACKET_ANGLE_LEFT + ")"
157 const TOKEN_BRACKET_ANGLE_RIGHT : String = "(" + LEX.BRACKET_ANGLE_RIGHT + ")"
158 const TOKEN_SEMICOLON : String = "(" + LEX.SEMICOLON + ")"
159 const TOKEN_EUQAL : String = "(" + LEX.EQUAL + ")"
160 const TOKEN_SIGN : String = "(" + LEX.SIGN + ")"
161 const TOKEN_LITERAL_DEC : String = "(([1-9])" + LEX.DIGIT_DEC +"*)"
162 const TOKEN_LITERAL_OCT : String = "(0" + LEX.DIGIT_OCT +"*)"
163 const TOKEN_LITERAL_HEX : String = "(0(x|X)(" + LEX.DIGIT_HEX +")+)"
164 const TOKEN_LITERAL_INT : String = "((\\+|\\-){0,1}" + TOKEN_LITERAL_DEC + "|" + TOKEN_LITERAL_OCT + "|" + TOKEN_LITERAL_HEX + ")"
165 const TOKEN_LITERAL_FLOAT_DEC : String = "(" + LEX.DIGIT_DEC + "+)"
166 const TOKEN_LITERAL_FLOAT_EXP : String = "((e|E)(\\+|\\-)?" + TOKEN_LITERAL_FLOAT_DEC + "+)"
167 const TOKEN_LITERAL_FLOAT : String = "((\\+|\\-){0,1}(" + TOKEN_LITERAL_FLOAT_DEC + "\\." + TOKEN_LITERAL_FLOAT_DEC + "?" + TOKEN_LITERAL_FLOAT_EXP + "?)|(" + TOKEN_LITERAL_FLOAT_DEC + TOKEN_LITERAL_FLOAT_EXP + ")|(\\." + TOKEN_LITERAL_FLOAT_DEC + TOKEN_LITERAL_FLOAT_EXP + "?))"
168 const TOKEN_SPACE : String = "(" + LEX.SPACE + ")+"
169 const TOKEN_COMMA : String = "(" + LEX.COMMA + ")"
170 const TOKEN_CHAR_ESC : String = "[\\\\(a|b|f|n|r|t|v|\\\\|'|\")]"
171 const TOKEN_OCT_ESC : String = "[\\\\" + LEX.DIGIT_OCT + "{3}]"
172 const TOKEN_HEX_ESC : String = "[\\\\(x|X)" + LEX.DIGIT_HEX + "{2}]"
173 const TOKEN_CHAR_EXCLUDE : String = "[^\\0\\n\\\\]"
174 const TOKEN_CHAR_VALUE : String = "(" + TOKEN_HEX_ESC + "|" + TOKEN_OCT_ESC + "|" + TOKEN_CHAR_ESC + "|" + TOKEN_CHAR_EXCLUDE + ")"
175 const TOKEN_STRING_SINGLE : String = "('" + TOKEN_CHAR_VALUE + "*?')"
176 const TOKEN_STRING_DOUBLE : String = "(\"" + TOKEN_CHAR_VALUE + "*?\")"
177 const TOKEN_COMMENT_SINGLE : String = "((//[^\\n\\r]*[^\\s])|//)"
178 const TOKEN_COMMENT_MULTI : String = "/\\*(.|[\\n\\r])*?\\*/"
179
180 const TOKEN_SECOND_MESSAGE : String = "^message$"
181 const TOKEN_SECOND_SIMPLE_DATA_TYPE : String = "^(double|float|int32|int64|uint32|uint64|sint32|sint64|fixed32|fixed64|sfixed32|sfixed64|bool|string|bytes)$"
182 const TOKEN_SECOND_ENUM : String = "^enum$"
183 const TOKEN_SECOND_MAP : String = "^map$"
184 const TOKEN_SECOND_ONEOF : String = "^oneof$"
185 const TOKEN_SECOND_LITERAL_BOOL : String = "^(true|false)$"
186 const TOKEN_SECOND_SYNTAX : String = "^syntax$"
187 const TOKEN_SECOND_IMPORT : String = "^import$"
188 const TOKEN_SECOND_PACKAGE : String = "^package$"
189 const TOKEN_SECOND_OPTION : String = "^option$"
190 const TOKEN_SECOND_SERVICE : String = "^service$"
191 const TOKEN_SECOND_RESERVED : String = "^reserved$"
192 const TOKEN_SECOND_IMPORT_QUALIFICATION : String = "^(weak|public)$"
193 const TOKEN_SECOND_FIELD_QUALIFICATION : String = "^(repeated|required|optional)$"
194 const TOKEN_SECOND_ENUM_OPTION : String = "^allow_alias$"
195 const TOKEN_SECOND_QUALIFICATION : String = "^(custom_option|extensions)$"
196 const TOKEN_SECOND_FIELD_OPTION : String = "^packed$"
197
198 class TokenEntrance:
199 func _init(i : int, b : int, e : int, t : String):
200 position = TokenPosition.new(b, e)
201 text = t
202 id = i
203 var position : TokenPosition
204 var text : String
205 var id : int
206
207 enum RANGE_STATE {
208 INCLUDE = 0,
209 EXCLUDE_LEFT = 1,
210 EXCLUDE_RIGHT = 2,
211 OVERLAY = 3,
212 EQUAL = 4,
213 ENTERS = 5
214 }
215
216 class TokenRange:
217 func _init(b : int, e : int, s):
218 position = TokenPosition.new(b, e)
219 state = s
220 var position : TokenPosition
221 var state
222
223 class Token:
224 var _regex : RegEx
225 var _entrance : TokenEntrance = null
226 var _entrances : Array = []
227 var _entrance_index : int = 0
228 var _id : int
229 var _ignore : bool
230 var _clarification : String
231
232 func _init(id : int, clarification : String, regex_str : String, ignore = false):
233 _id = id
234 _regex = RegEx.new()
235 _regex.compile(regex_str)
236 _clarification = clarification
237 _ignore = ignore
238
239 func find(text : String, start : int) -> TokenEntrance:
240 _entrance = null
241 if !_regex.is_valid():
242 return null
243 var match_result : RegExMatch = _regex.search(text, start)
244 if match_result != null:
245 var capture
246 capture = match_result.get_string(0)
247 if capture.is_empty():
248 return null
249 _entrance = TokenEntrance.new(_id, match_result.get_start(0), capture.length() - 1 + match_result.get_start(0), capture)
250 return _entrance
251
252 func find_all(text : String) -> Array:
253 var pos : int = 0
254 clear()
255 while find(text, pos) != null:
256 _entrances.append(_entrance)
257 pos = _entrance.position.end + 1
258 return _entrances
259
260 func add_entrance(entrance) -> void:
261 _entrances.append(entrance)
262
263 func clear() -> void:
264 _entrance = null
265 _entrances = []
266 _entrance_index = 0
267
268 func get_entrances() -> Array:
269 return _entrances
270
271 func remove_entrance(index) -> void:
272 if index < _entrances.size():
273 _entrances.remove_at(index)
274
275 func get_index() -> int:
276 return _entrance_index
277
278 func set_index(index : int) -> void:
279 if index < _entrances.size():
280 _entrance_index = index
281 else:
282 _entrance_index = 0
283
284 func is_ignore() -> bool:
285 return _ignore
286
287 func get_clarification() -> String:
288 return _clarification
289
290 class TokenResult:
291 var tokens : Array = []
292 var errors : Array = []
293
294 enum TOKEN_ID {
295 UNDEFINED = -1,
296 IDENT = 0,
297 FULL_IDENT = 1,
298 BRACKET_ROUND_LEFT = 2,
299 BRACKET_ROUND_RIGHT = 3,
300 BRACKET_CURLY_LEFT = 4,
301 BRACKET_CURLY_RIGHT = 5,
302 BRACKET_SQUARE_LEFT = 6,
303 BRACKET_SQUARE_RIGHT = 7,
304 BRACKET_ANGLE_LEFT = 8,
305 BRACKET_ANGLE_RIGHT = 9,
306 SEMICOLON = 10,
307 EUQAL = 11,
308 SIGN = 12,
309 INT = 13,
310 FLOAT = 14,
311 SPACE = 15,
312 COMMA = 16,
313 STRING_SINGLE = 17,
314 STRING_DOUBLE = 18,
315 COMMENT_SINGLE = 19,
316 COMMENT_MULTI = 20,
317
318 MESSAGE = 21,
319 SIMPLE_DATA_TYPE = 22,
320 ENUM = 23,
321 MAP = 24,
322 ONEOF = 25,
323 LITERAL_BOOL = 26,
324 SYNTAX = 27,
325 IMPORT = 28,
326 PACKAGE = 29,
327 OPTION = 30,
328 SERVICE = 31,
329 RESERVED = 32,
330 IMPORT_QUALIFICATION = 33,
331 FIELD_QUALIFICATION = 34,
332 ENUM_OPTION = 35,
333 QUALIFICATION = 36,
334 FIELD_OPTION = 37,
335
336 STRING = 38
337 }
338
339 var TOKEN = {
340 TOKEN_ID.IDENT: Token.new(TOKEN_ID.IDENT, "Identifier", TOKEN_IDENT),
341 TOKEN_ID.FULL_IDENT: Token.new(TOKEN_ID.FULL_IDENT, "Full identifier", TOKEN_FULL_IDENT),
342 TOKEN_ID.BRACKET_ROUND_LEFT: Token.new(TOKEN_ID.BRACKET_ROUND_LEFT, "(", TOKEN_BRACKET_ROUND_LEFT),
343 TOKEN_ID.BRACKET_ROUND_RIGHT: Token.new(TOKEN_ID.BRACKET_ROUND_RIGHT, ")", TOKEN_BRACKET_ROUND_RIGHT),
344 TOKEN_ID.BRACKET_CURLY_LEFT: Token.new(TOKEN_ID.BRACKET_CURLY_LEFT, "{", TOKEN_BRACKET_CURLY_LEFT),
345 TOKEN_ID.BRACKET_CURLY_RIGHT: Token.new(TOKEN_ID.BRACKET_CURLY_RIGHT, "}", TOKEN_BRACKET_CURLY_RIGHT),
346 TOKEN_ID.BRACKET_SQUARE_LEFT: Token.new(TOKEN_ID.BRACKET_SQUARE_LEFT, "[", TOKEN_BRACKET_SQUARE_LEFT),
347 TOKEN_ID.BRACKET_SQUARE_RIGHT: Token.new(TOKEN_ID.BRACKET_SQUARE_RIGHT, "]", TOKEN_BRACKET_SQUARE_RIGHT),
348 TOKEN_ID.BRACKET_ANGLE_LEFT: Token.new(TOKEN_ID.BRACKET_ANGLE_LEFT, "<", TOKEN_BRACKET_ANGLE_LEFT),
349 TOKEN_ID.BRACKET_ANGLE_RIGHT: Token.new(TOKEN_ID.BRACKET_ANGLE_RIGHT, ">", TOKEN_BRACKET_ANGLE_RIGHT),
350 TOKEN_ID.SEMICOLON: Token.new(TOKEN_ID.SEMICOLON, ";", TOKEN_SEMICOLON),
351 TOKEN_ID.EUQAL: Token.new(TOKEN_ID.EUQAL, "=", TOKEN_EUQAL),
352 TOKEN_ID.INT: Token.new(TOKEN_ID.INT, "Integer", TOKEN_LITERAL_INT),
353 TOKEN_ID.FLOAT: Token.new(TOKEN_ID.FLOAT, "Float", TOKEN_LITERAL_FLOAT),
354 TOKEN_ID.SPACE: Token.new(TOKEN_ID.SPACE, "Space", TOKEN_SPACE),
355 TOKEN_ID.COMMA: Token.new(TOKEN_ID.COMMA, ",", TOKEN_COMMA),
356 TOKEN_ID.STRING_SINGLE: Token.new(TOKEN_ID.STRING_SINGLE, "'String'", TOKEN_STRING_SINGLE),
357 TOKEN_ID.STRING_DOUBLE: Token.new(TOKEN_ID.STRING_DOUBLE, "\"String\"", TOKEN_STRING_DOUBLE),
358 TOKEN_ID.COMMENT_SINGLE: Token.new(TOKEN_ID.COMMENT_SINGLE, "//Comment", TOKEN_COMMENT_SINGLE),
359 TOKEN_ID.COMMENT_MULTI: Token.new(TOKEN_ID.COMMENT_MULTI, "/*Comment*/", TOKEN_COMMENT_MULTI),
360
361 TOKEN_ID.MESSAGE: Token.new(TOKEN_ID.MESSAGE, "Message", TOKEN_SECOND_MESSAGE, true),
362 TOKEN_ID.SIMPLE_DATA_TYPE: Token.new(TOKEN_ID.SIMPLE_DATA_TYPE, "Data type", TOKEN_SECOND_SIMPLE_DATA_TYPE, true),
363 TOKEN_ID.ENUM: Token.new(TOKEN_ID.ENUM, "Enum", TOKEN_SECOND_ENUM, true),
364 TOKEN_ID.MAP: Token.new(TOKEN_ID.MAP, "Map", TOKEN_SECOND_MAP, true),
365 TOKEN_ID.ONEOF: Token.new(TOKEN_ID.ONEOF, "OneOf", TOKEN_SECOND_ONEOF, true),
366 TOKEN_ID.LITERAL_BOOL: Token.new(TOKEN_ID.LITERAL_BOOL, "Bool literal", TOKEN_SECOND_LITERAL_BOOL, true),
367 TOKEN_ID.SYNTAX: Token.new(TOKEN_ID.SYNTAX, "Syntax", TOKEN_SECOND_SYNTAX, true),
368 TOKEN_ID.IMPORT: Token.new(TOKEN_ID.IMPORT, "Import", TOKEN_SECOND_IMPORT, true),
369 TOKEN_ID.PACKAGE: Token.new(TOKEN_ID.PACKAGE, "Package", TOKEN_SECOND_PACKAGE, true),
370 TOKEN_ID.OPTION: Token.new(TOKEN_ID.OPTION, "Option", TOKEN_SECOND_OPTION, true),
371 TOKEN_ID.SERVICE: Token.new(TOKEN_ID.SERVICE, "Service", TOKEN_SECOND_SERVICE, true),
372 TOKEN_ID.RESERVED: Token.new(TOKEN_ID.RESERVED, "Reserved", TOKEN_SECOND_RESERVED, true),
373 TOKEN_ID.IMPORT_QUALIFICATION: Token.new(TOKEN_ID.IMPORT_QUALIFICATION, "Import qualification", TOKEN_SECOND_IMPORT_QUALIFICATION, true),
374 TOKEN_ID.FIELD_QUALIFICATION: Token.new(TOKEN_ID.FIELD_QUALIFICATION, "Field qualification", TOKEN_SECOND_FIELD_QUALIFICATION, true),
375 TOKEN_ID.ENUM_OPTION: Token.new(TOKEN_ID.ENUM_OPTION, "Enum option", TOKEN_SECOND_ENUM_OPTION, true),
376 TOKEN_ID.QUALIFICATION: Token.new(TOKEN_ID.QUALIFICATION, "Qualification", TOKEN_SECOND_QUALIFICATION, true),
377 TOKEN_ID.FIELD_OPTION: Token.new(TOKEN_ID.FIELD_OPTION, "Field option", TOKEN_SECOND_FIELD_OPTION, true),
378
379 TOKEN_ID.STRING: Token.new(TOKEN_ID.STRING, "String", "", true)
380 }
381
382 static func check_range(main : TokenEntrance, current : TokenEntrance) -> TokenRange:
383 if main.position.begin > current.position.begin:
384 if main.position.end > current.position.end:
385 if main.position.begin >= current.position.end:
386 return TokenRange.new(current.position.begin, current.position.end, RANGE_STATE.EXCLUDE_LEFT)
387 else:
388 return TokenRange.new(main.position.begin, current.position.end, RANGE_STATE.OVERLAY)
389 else:
390 return TokenRange.new(current.position.begin, current.position.end, RANGE_STATE.ENTERS)
391 elif main.position.begin < current.position.begin:
392 if main.position.end >= current.position.end:
393 return TokenRange.new(main.position.begin, main.position.end, RANGE_STATE.INCLUDE)
394 else:
395 if main.position.end < current.position.begin:
396 return TokenRange.new(main.position.begin, main.position.end, RANGE_STATE.EXCLUDE_RIGHT)
397 else:
398 return TokenRange.new(main.position.begin, current.position.end, RANGE_STATE.OVERLAY)
399 else:
400 if main.position.end == current.position.end:
401 return TokenRange.new(main.position.begin, main.position.end, RANGE_STATE.EQUAL)
402 elif main.position.end > current.position.end:
403 return TokenRange.new(main.position.begin, main.position.end, RANGE_STATE.INCLUDE)
404 else:
405 return TokenRange.new(current.position.begin, current.position.end, RANGE_STATE.ENTERS)
406
407 func tokenizer() -> TokenResult:
408 for k in TOKEN:
409 if !TOKEN[k].is_ignore():
410 TOKEN[k].find_all(document.text)
411 var second_tokens : Array = []
412 second_tokens.append(TOKEN[TOKEN_ID.MESSAGE])
413 second_tokens.append(TOKEN[TOKEN_ID.SIMPLE_DATA_TYPE])
414 second_tokens.append(TOKEN[TOKEN_ID.ENUM])
415 second_tokens.append(TOKEN[TOKEN_ID.MAP])
416 second_tokens.append(TOKEN[TOKEN_ID.ONEOF])
417 second_tokens.append(TOKEN[TOKEN_ID.LITERAL_BOOL])
418 second_tokens.append(TOKEN[TOKEN_ID.SYNTAX])
419 second_tokens.append(TOKEN[TOKEN_ID.IMPORT])
420 second_tokens.append(TOKEN[TOKEN_ID.PACKAGE])
421 second_tokens.append(TOKEN[TOKEN_ID.OPTION])
422 second_tokens.append(TOKEN[TOKEN_ID.SERVICE])
423 second_tokens.append(TOKEN[TOKEN_ID.RESERVED])
424 second_tokens.append(TOKEN[TOKEN_ID.IMPORT_QUALIFICATION])
425 second_tokens.append(TOKEN[TOKEN_ID.FIELD_QUALIFICATION])
426 second_tokens.append(TOKEN[TOKEN_ID.ENUM_OPTION])
427 second_tokens.append(TOKEN[TOKEN_ID.QUALIFICATION])
428 second_tokens.append(TOKEN[TOKEN_ID.FIELD_OPTION])
429
430 var ident_token : Token = TOKEN[TOKEN_ID.IDENT]
431 for sec_token in second_tokens:
432 var remove_indexes : Array = []
433 for i in range(ident_token.get_entrances().size()):
434 var entrance : TokenEntrance = sec_token.find(ident_token.get_entrances()[i].text, 0)
435 if entrance != null:
436 entrance.position.begin = ident_token.get_entrances()[i].position.begin
437 entrance.position.end = ident_token.get_entrances()[i].position.end
438 sec_token.add_entrance(entrance)
439 remove_indexes.append(i)
440 for i in range(remove_indexes.size()):
441 ident_token.remove_entrance(remove_indexes[i] - i)
442 for v in TOKEN[TOKEN_ID.STRING_DOUBLE].get_entrances():
443 v.id = TOKEN_ID.STRING
444 TOKEN[TOKEN_ID.STRING].add_entrance(v)
445 TOKEN[TOKEN_ID.STRING_DOUBLE].clear()
446 for v in TOKEN[TOKEN_ID.STRING_SINGLE].get_entrances():
447 v.id = TOKEN_ID.STRING
448 TOKEN[TOKEN_ID.STRING].add_entrance(v)
449 TOKEN[TOKEN_ID.STRING_SINGLE].clear()
450 var main_token : TokenEntrance
451 var cur_token : TokenEntrance
452 var main_index : int = -1
453 var token_index_flag : bool = false
454 var result : TokenResult = TokenResult.new()
455 var check : TokenRange
456 var end : bool = false
457 var all : bool = false
458 var repeat : bool = false
459 while true:
460 all = true
461 for k in TOKEN:
462 if main_index == k:
463 continue
464 repeat = false
465 while TOKEN[k].get_entrances().size() > 0:
466 all = false
467 if !token_index_flag:
468 main_index = k
469 main_token = TOKEN[main_index].get_entrances()[0]
470 token_index_flag = true
471 break
472 else:
473 cur_token = TOKEN[k].get_entrances()[0]
474 check = check_range(main_token, cur_token)
475 if check.state == RANGE_STATE.INCLUDE:
476 TOKEN[k].remove_entrance(0)
477 end = true
478 elif check.state == RANGE_STATE.EXCLUDE_LEFT:
479 main_token = cur_token
480 main_index = k
481 end = false
482 repeat = true
483 break
484 elif check.state == RANGE_STATE.EXCLUDE_RIGHT:
485 end = true
486 break
487 elif check.state == RANGE_STATE.OVERLAY || check.state == RANGE_STATE.EQUAL:
488 result.errors.append(check)
489 TOKEN[main_index].remove_entrance(0)
490 TOKEN[k].remove_entrance(0)
491 token_index_flag = false
492 end = false
493 repeat = true
494 break
495 elif check.state == RANGE_STATE.ENTERS:
496 TOKEN[main_index].remove_entrance(0)
497 main_token = cur_token
498 main_index = k
499 end = false
500 repeat = true
501 break
502 if repeat:
503 break
504 if end:
505 if TOKEN[main_index].get_entrances().size() > 0:
506 result.tokens.append(main_token)
507 TOKEN[main_index].remove_entrance(0)
508 token_index_flag = false
509 if all:
510 break
511 return result
512
513 static func check_tokens_integrity(tokens : Array, end : int) -> Array:
514 var cur_index : int = 0
515 var result : Array = []
516 for v in tokens:
517 if v.position.begin > cur_index:
518 result.append(TokenPosition.new(cur_index, v.position.begin))
519 cur_index = v.position.end + 1
520 if cur_index < end:
521 result.append(TokenPosition.new(cur_index, end))
522 return result
523
524 static func comment_space_processing(tokens : Array) -> void:
525 var remove_indexes : Array = []
526 for i in range(tokens.size()):
527 if tokens[i].id == TOKEN_ID.COMMENT_SINGLE || tokens[i].id == TOKEN_ID.COMMENT_MULTI:
528 tokens[i].id = TOKEN_ID.SPACE
529 var space_index : int = -1
530 for i in range(tokens.size()):
531 if tokens[i].id == TOKEN_ID.SPACE:
532 if space_index >= 0:
533 tokens[space_index].position.end = tokens[i].position.end
534 tokens[space_index].text = tokens[space_index].text + tokens[i].text
535 remove_indexes.append(i)
536 else:
537 space_index = i
538 else:
539 space_index = -1
540 for i in range(remove_indexes.size()):
541 tokens.remove_at(remove_indexes[i] - i)
542
543 #Analysis rule
544 enum AR {
545 MAYBE = 1,
546 MUST_ONE = 2,
547 ANY = 3,
548 OR = 4,
549 MAYBE_BEGIN = 5,
550 MAYBE_END = 6,
551 ANY_BEGIN = 7,
552 ANY_END = 8
553 }
554
555 #Space rule (space after token)
556 enum SP {
557 MAYBE = 1,
558 MUST = 2,
559 NO = 3
560 }
561
562 #Analysis Syntax Description
563 class ASD:
564 func _init(t, s : int = SP.MAYBE, r : int = AR.MUST_ONE, i : bool = false):
565 token = t
566 space = s
567 rule = r
568 importance = i
569 var token
570 var space : int
571 var rule : int
572 var importance : bool
573
574 var TEMPLATE_SYNTAX : Array = [
575 Callable(self, "desc_syntax"),
576 ASD.new(TOKEN_ID.SYNTAX),
577 ASD.new(TOKEN_ID.EUQAL),
578 ASD.new(TOKEN_ID.STRING, SP.MAYBE, AR.MUST_ONE, true),
579 ASD.new(TOKEN_ID.SEMICOLON)
580 ]
581
582 var TEMPLATE_IMPORT : Array = [
583 Callable(self, "desc_import"),
584 ASD.new(TOKEN_ID.IMPORT, SP.MUST),
585 ASD.new(TOKEN_ID.IMPORT_QUALIFICATION, SP.MUST, AR.MAYBE, true),
586 ASD.new(TOKEN_ID.STRING, SP.MAYBE, AR.MUST_ONE, true),
587 ASD.new(TOKEN_ID.SEMICOLON)
588 ]
589
590 var TEMPLATE_PACKAGE : Array = [
591 Callable(self, "desc_package"),
592 ASD.new(TOKEN_ID.PACKAGE, SP.MUST),
593 ASD.new([TOKEN_ID.IDENT, TOKEN_ID.FULL_IDENT], SP.MAYBE, AR.OR, true),
594 ASD.new(TOKEN_ID.SEMICOLON)
595 ]
596
597 var TEMPLATE_OPTION : Array = [
598 Callable(self, "desc_option"),
599 ASD.new(TOKEN_ID.OPTION, SP.MUST),
600 ASD.new([TOKEN_ID.IDENT, TOKEN_ID.FULL_IDENT], SP.MAYBE, AR.OR, true),
601 ASD.new(TOKEN_ID.EUQAL),
602 ASD.new([TOKEN_ID.STRING, TOKEN_ID.INT, TOKEN_ID.FLOAT, TOKEN_ID.LITERAL_BOOL], SP.MAYBE, AR.OR, true),
603 ASD.new(TOKEN_ID.SEMICOLON)
604 ]
605
606 var TEMPLATE_FIELD : Array = [
607 Callable(self, "desc_field"),
608 ASD.new(TOKEN_ID.FIELD_QUALIFICATION, SP.MUST, AR.MAYBE, true),
609 ASD.new([TOKEN_ID.SIMPLE_DATA_TYPE, TOKEN_ID.IDENT, TOKEN_ID.FULL_IDENT], SP.MAYBE, AR.OR, true),
610 ASD.new(TOKEN_ID.IDENT, SP.MAYBE, AR.MUST_ONE, true),
611 ASD.new(TOKEN_ID.EUQAL),
612 ASD.new(TOKEN_ID.INT, SP.MAYBE, AR.MUST_ONE, true),
613 ASD.new(TOKEN_ID.BRACKET_SQUARE_LEFT, SP.MAYBE, AR.MAYBE_BEGIN),
614 ASD.new(TOKEN_ID.FIELD_OPTION, SP.MAYBE, AR.MUST_ONE, true),
615 ASD.new(TOKEN_ID.EUQAL),
616 ASD.new(TOKEN_ID.LITERAL_BOOL, SP.MAYBE, AR.MUST_ONE, true),
617 ASD.new(TOKEN_ID.BRACKET_SQUARE_RIGHT, SP.MAYBE, AR.MAYBE_END),
618 ASD.new(TOKEN_ID.SEMICOLON)
619 ]
620
621 var TEMPLATE_FIELD_ONEOF : Array = TEMPLATE_FIELD
622
623 var TEMPLATE_MAP_FIELD : Array = [
624 Callable(self, "desc_map_field"),
625 ASD.new(TOKEN_ID.MAP),
626 ASD.new(TOKEN_ID.BRACKET_ANGLE_LEFT),
627 ASD.new(TOKEN_ID.SIMPLE_DATA_TYPE, SP.MAYBE, AR.MUST_ONE, true),
628 ASD.new(TOKEN_ID.COMMA),
629 ASD.new([TOKEN_ID.SIMPLE_DATA_TYPE, TOKEN_ID.IDENT, TOKEN_ID.FULL_IDENT], SP.MAYBE, AR.OR, true),
630 ASD.new(TOKEN_ID.BRACKET_ANGLE_RIGHT, SP.MUST),
631 ASD.new(TOKEN_ID.IDENT, SP.MAYBE, AR.MUST_ONE, true),
632 ASD.new(TOKEN_ID.EUQAL),
633 ASD.new(TOKEN_ID.INT, SP.MAYBE, AR.MUST_ONE, true),
634 ASD.new(TOKEN_ID.BRACKET_SQUARE_LEFT, SP.MAYBE, AR.MAYBE_BEGIN),
635 ASD.new(TOKEN_ID.FIELD_OPTION, SP.MAYBE, AR.MUST_ONE, true),
636 ASD.new(TOKEN_ID.EUQAL),
637 ASD.new(TOKEN_ID.LITERAL_BOOL, SP.MAYBE, AR.MUST_ONE, true),
638 ASD.new(TOKEN_ID.BRACKET_SQUARE_RIGHT, SP.MAYBE, AR.MAYBE_END),
639 ASD.new(TOKEN_ID.SEMICOLON)
640 ]
641
642 var TEMPLATE_MAP_FIELD_ONEOF : Array = TEMPLATE_MAP_FIELD
643
644 var TEMPLATE_ENUM : Array = [
645 Callable(self, "desc_enum"),
646 ASD.new(TOKEN_ID.ENUM, SP.MUST),
647 ASD.new(TOKEN_ID.IDENT, SP.MAYBE, AR.MUST_ONE, true),
648 ASD.new(TOKEN_ID.BRACKET_CURLY_LEFT),
649 ASD.new(TOKEN_ID.OPTION, SP.MUST, AR.MAYBE_BEGIN),
650 ASD.new(TOKEN_ID.ENUM_OPTION, SP.MAYBE, AR.MUST_ONE, true),
651 ASD.new(TOKEN_ID.EUQAL),
652 ASD.new(TOKEN_ID.LITERAL_BOOL, SP.MAYBE, AR.MUST_ONE, true),
653 ASD.new(TOKEN_ID.SEMICOLON, SP.MAYBE, AR.MAYBE_END),
654 ASD.new(TOKEN_ID.IDENT, SP.MAYBE, AR.ANY_BEGIN, true),
655 ASD.new(TOKEN_ID.EUQAL),
656 ASD.new(TOKEN_ID.INT, SP.MAYBE, AR.MUST_ONE, true),
657 ASD.new(TOKEN_ID.SEMICOLON, SP.MAYBE, AR.ANY_END),
658 ASD.new(TOKEN_ID.BRACKET_CURLY_RIGHT)
659 ]
660
661 var TEMPLATE_MESSAGE_HEAD : Array = [
662 Callable(self, "desc_message_head"),
663 ASD.new(TOKEN_ID.MESSAGE, SP.MUST),
664 ASD.new(TOKEN_ID.IDENT, SP.MAYBE, AR.MUST_ONE, true),
665 ASD.new(TOKEN_ID.BRACKET_CURLY_LEFT)
666 ]
667
668 var TEMPLATE_MESSAGE_TAIL : Array = [
669 Callable(self, "desc_message_tail"),
670 ASD.new(TOKEN_ID.BRACKET_CURLY_RIGHT)
671 ]
672
673 var TEMPLATE_ONEOF_HEAD : Array = [
674 Callable(self, "desc_oneof_head"),
675 ASD.new(TOKEN_ID.ONEOF, SP.MUST),
676 ASD.new(TOKEN_ID.IDENT, SP.MAYBE, AR.MUST_ONE, true),
677 ASD.new(TOKEN_ID.BRACKET_CURLY_LEFT),
678 ]
679
680 var TEMPLATE_ONEOF_TAIL : Array = [
681 Callable(self, "desc_oneof_tail"),
682 ASD.new(TOKEN_ID.BRACKET_CURLY_RIGHT)
683 ]
684
685 var TEMPLATE_BEGIN : Array = [
686 null,
687 ASD.new(TOKEN_ID.SPACE, SP.NO, AR.MAYBE)
688 ]
689
690 var TEMPLATE_END : Array = [
691 null
692 ]
693
694 func get_token_id(tokens : Array, index : int) -> int:
695 if index < tokens.size():
696 return tokens[index].id
697 return TOKEN_ID.UNDEFINED
698
699 enum COMPARE_STATE {
700 DONE = 0,
701 MISMATCH = 1,
702 INCOMPLETE = 2,
703 ERROR_VALUE = 3
704 }
705
706 class TokenCompare:
707 func _init(s : int, i : int, d : String = ""):
708 state = s
709 index = i
710 description = d
711 var state : int
712 var index : int
713 var description : String
714
715 func check_space(tokens : Array, index : int, space) -> int:
716 if get_token_id(tokens, index) == TOKEN_ID.SPACE:
717 if space == SP.MAYBE:
718 return 1
719 elif space == SP.MUST:
720 return 1
721 elif space == SP.NO:
722 return -1
723 else:
724 if space == SP.MUST:
725 return -2
726 return 0
727
728 class IndexedToken:
729 func _init(t : TokenEntrance, i : int):
730 token = t
731 index = i
732 var token : TokenEntrance
733 var index : int
734
735 func token_importance_checkadd(template : ASD, token : TokenEntrance, index : int, importance : Array) -> void:
736 if template.importance:
737 importance.append(IndexedToken.new(token, index))
738
739 class CompareSettings:
740 func _init(ci : int, n : int, pi : int, pn : String = ""):
741 construction_index = ci
742 nesting = n
743 parent_index = pi
744 parent_name = pn
745
746 var construction_index : int
747 var nesting : int
748 var parent_index : int
749 var parent_name : String
750
751 func description_compare(template : Array, tokens : Array, index : int, settings : CompareSettings) -> TokenCompare:
752 var j : int = index
753 var space : int
754 var rule : int
755 var rule_flag : bool
756 var cont : bool
757 var check : int
758 var maybe_group_skip : bool = false
759 var any_group_index : int = -1
760 var any_end_group_index : int = -1
761 var i : int = 0
762 var importance : Array = []
763 while true:
764 i += 1
765 if i >= template.size():
766 break
767 rule_flag = false
768 cont = false
769 rule = template[i].rule
770 space = template[i].space
771 if rule == AR.MAYBE_END && maybe_group_skip:
772 maybe_group_skip = false
773 continue
774 if maybe_group_skip:
775 continue
776 if rule == AR.MAYBE:
777 if template[i].token == get_token_id(tokens, j):
778 token_importance_checkadd(template[i], tokens[j], j, importance)
779 rule_flag = true
780 else:
781 continue
782 elif rule == AR.MUST_ONE || rule == AR.MAYBE_END || rule == AR.ANY_END:
783 if template[i].token == get_token_id(tokens, j):
784 token_importance_checkadd(template[i], tokens[j], j, importance)
785 rule_flag = true
786 elif rule == AR.ANY:
787 var find_any : bool = false
788 while true:
789 if template[i].token == get_token_id(tokens, j):
790 token_importance_checkadd(template[i], tokens[j], j, importance)
791 find_any = true
792 j += 1
793 check = check_space(tokens, j, space)
794 if check < 0:
795 return TokenCompare.new(COMPARE_STATE.INCOMPLETE, j)
796 else:
797 j += check
798 else:
799 if find_any:
800 cont = true
801 break
802 elif rule == AR.OR:
803 var or_tokens = template[i].token
804 for v in or_tokens:
805 if v == get_token_id(tokens, j):
806 token_importance_checkadd(template[i], tokens[j], j, importance)
807 j += 1
808 check = check_space(tokens, j, space)
809 if check < 0:
810 return TokenCompare.new(COMPARE_STATE.INCOMPLETE, j)
811 else:
812 j += check
813 cont = true
814 break
815 elif rule == AR.MAYBE_BEGIN:
816 if template[i].token == get_token_id(tokens, j):
817 token_importance_checkadd(template[i], tokens[j], j, importance)
818 rule_flag = true
819 else:
820 maybe_group_skip = true
821 continue
822 elif rule == AR.ANY_BEGIN:
823 if template[i].token == get_token_id(tokens, j):
824 token_importance_checkadd(template[i], tokens[j], j, importance)
825 rule_flag = true
826 any_group_index = i
827 else:
828 if any_end_group_index > 0:
829 any_group_index = -1
830 i = any_end_group_index
831 any_end_group_index = -1
832 continue
833 if cont:
834 continue
835 if rule_flag:
836 j += 1
837 check = check_space(tokens, j, space)
838 if check < 0:
839 return TokenCompare.new(COMPARE_STATE.INCOMPLETE, j)
840 else:
841 j += check
842 else:
843 if j > index:
844 return TokenCompare.new(COMPARE_STATE.INCOMPLETE, j)
845 else:
846 return TokenCompare.new(COMPARE_STATE.MISMATCH, j)
847 if any_group_index >= 0 && rule == AR.ANY_END:
848 any_end_group_index = i
849 i = any_group_index - 1
850 if template[0] != null:
851 var result : DescriptionResult = template[0].call(importance, settings)
852 if !result.success:
853 return TokenCompare.new(COMPARE_STATE.ERROR_VALUE, result.error, result.description)
854 return TokenCompare.new(COMPARE_STATE.DONE, j)
855
856 var DESCRIPTION : Array = [
857 TEMPLATE_BEGIN, #0
858 TEMPLATE_SYNTAX, #1
859 TEMPLATE_IMPORT, #2
860 TEMPLATE_PACKAGE, #3
861 TEMPLATE_OPTION, #4
862 TEMPLATE_FIELD, #5
863 TEMPLATE_FIELD_ONEOF, #6
864 TEMPLATE_MAP_FIELD, #7
865 TEMPLATE_MAP_FIELD_ONEOF, #8
866 TEMPLATE_ENUM, #9
867 TEMPLATE_MESSAGE_HEAD, #10
868 TEMPLATE_MESSAGE_TAIL, #11
869 TEMPLATE_ONEOF_HEAD, #12
870 TEMPLATE_ONEOF_TAIL, #13
871 TEMPLATE_END #14
872 ]
873
874 enum JUMP {
875 NOTHING = 0, #nothing
876 SIMPLE = 1, #simple jump
877 NESTED_INCREMENT = 2, #nested increment
878 NESTED_DECREMENT = 3, #nested decrement
879 MUST_NESTED_SIMPLE = 4, #check: must be nested > 0
880 MUST_NESTED_INCREMENT = 5, #check: must be nested > 0, then nested increment
881 MUST_NESTED_DECREMENT = 6, #nested decrement, then check: must be nested > 0
882 }
883
884 var TRANSLATION_TABLE : Array = [
885 # BEGIN SYNTAX IMPORT PACKAGE OPTION FIELD FIELD_O MAP_F MAP_F_O ENUM MES_H MES_T ONEOF_H ONEOF_T END
886 [ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], #BEGIN
887 [ 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 2, 0, 0, 0, 1], #SYNTAX
888 [ 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 2, 0, 0, 0, 1], #IMPORT
889 [ 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 2, 0, 0, 0, 1], #PACKAGE
890 [ 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 2, 0, 0, 0, 1], #OPTION
891 [ 0, 0, 0, 0, 0, 4, 0, 4, 0, 1, 2, 3, 5, 0, 0], #FIELD
892 [ 0, 0, 0, 0, 0, 0, 4, 0, 4, 0, 0, 0, 0, 6, 0], #FIELD_ONEOF
893 [ 0, 0, 0, 0, 0, 4, 0, 4, 0, 1, 2, 3, 5, 0, 0], #MAP_F
894 [ 0, 0, 0, 0, 0, 0, 4, 0, 4, 0, 0, 0, 0, 6, 0], #MAP_F_ONEOF
895 [ 0, 0, 0, 0, 0, 4, 0, 4, 0, 1, 2, 3, 5, 0, 1], #ENUM
896 [ 0, 0, 0, 0, 0, 4, 0, 4, 0, 1, 2, 3, 5, 0, 0], #MES_H
897 [ 0, 0, 0, 0, 0, 4, 0, 4, 0, 1, 2, 3, 5, 0, 1], #MES_T
898 [ 0, 0, 0, 0, 0, 0, 4, 0, 4, 0, 0, 0, 0, 0, 0], #ONEOF_H
899 [ 0, 0, 0, 0, 0, 4, 0, 4, 0, 1, 2, 3, 5, 0, 1], #ONEOF_T
900 [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] #END
901 ]
902
903 class Construction:
904 func _init(b : int, e : int, d : int):
905 begin_token_index = b
906 end_token_index = e
907 description = d
908 var begin_token_index : int
909 var end_token_index : int
910 var description : int
911
912 class TranslationResult:
913 var constructions : Array = []
914 var done : bool = false
915 var error_description_id : int = -1
916 var error_description_text : String = ""
917 var parse_token_index : int = 0
918 var error_token_index : int = 0
919
920 func analyze_tokens(tokens : Array) -> TranslationResult:
921 var i : int = 0
922 var result : TranslationResult = TranslationResult.new()
923 var comp : TokenCompare
924 var cur_template_id : int = 0
925 var error : bool = false
926 var template_index : int
927 var comp_set : CompareSettings = CompareSettings.new(result.constructions.size(), 0, -1)
928 comp = description_compare(DESCRIPTION[cur_template_id], tokens, i, comp_set)
929 if comp.state == COMPARE_STATE.DONE:
930 i = comp.index
931 while true:
932 var end : bool = true
933 var find : bool = false
934 for j in range(TRANSLATION_TABLE[cur_template_id].size()):
935 template_index = j
936 if j == DESCRIPTION.size() - 1 && i < tokens.size():
937 end = false
938 if result.error_description_id < 0:
939 error = true
940 break
941 if TRANSLATION_TABLE[cur_template_id][j] > 0:
942 end = false
943 comp_set.construction_index = result.constructions.size()
944 comp = description_compare(DESCRIPTION[j], tokens, i, comp_set)
945 if comp.state == COMPARE_STATE.DONE:
946 if TRANSLATION_TABLE[cur_template_id][j] == JUMP.NESTED_INCREMENT:
947 comp_set.nesting += 1
948 elif TRANSLATION_TABLE[cur_template_id][j] == JUMP.NESTED_DECREMENT:
949 comp_set.nesting -= 1
950 if comp_set.nesting < 0:
951 error = true
952 break
953 elif TRANSLATION_TABLE[cur_template_id][j] == JUMP.MUST_NESTED_SIMPLE:
954 if comp_set.nesting <= 0:
955 error = true
956 break
957 elif TRANSLATION_TABLE[cur_template_id][j] == JUMP.MUST_NESTED_INCREMENT:
958 if comp_set.nesting <= 0:
959 error = true
960 break
961 comp_set.nesting += 1
962 elif TRANSLATION_TABLE[cur_template_id][j] == JUMP.MUST_NESTED_DECREMENT:
963 comp_set.nesting -= 1
964 if comp_set.nesting <= 0:
965 error = true
966 break
967 result.constructions.append(Construction.new(i, comp.index, j))
968 find = true
969 i = comp.index
970 cur_template_id = j
971 if i == tokens.size():
972 if TRANSLATION_TABLE[cur_template_id][DESCRIPTION.size() - 1] == JUMP.SIMPLE:
973 if comp_set.nesting == 0:
974 end = true
975 else:
976 error = true
977 else:
978 error = true
979 elif i > tokens.size():
980 error = true
981 break
982 elif comp.state == COMPARE_STATE.INCOMPLETE:
983 error = true
984 break
985 elif comp.state == COMPARE_STATE.ERROR_VALUE:
986 error = true
987 break
988 if error:
989 result.error_description_text = comp.description
990 result.error_description_id = template_index
991 result.parse_token_index = i
992 if comp.index >= tokens.size():
993 result.error_token_index = tokens.size() - 1
994 else:
995 result.error_token_index = comp.index
996 if end:
997 result.done = true
998 result.error_description_id = -1
999 break
1000 if !find:
1001 break
1002 return result
1003
1004 enum CLASS_TYPE {
1005 ENUM = 0,
1006 MESSAGE = 1,
1007 MAP = 2
1008 }
1009
1010 enum FIELD_TYPE {
1011 UNDEFINED = -1,
1012 INT32 = 0,
1013 SINT32 = 1,
1014 UINT32 = 2,
1015 INT64 = 3,
1016 SINT64 = 4,
1017 UINT64 = 5,
1018 BOOL = 6,
1019 ENUM = 7,
1020 FIXED32 = 8,
1021 SFIXED32 = 9,
1022 FLOAT = 10,
1023 FIXED64 = 11,
1024 SFIXED64 = 12,
1025 DOUBLE = 13,
1026 STRING = 14,
1027 BYTES = 15,
1028 MESSAGE = 16,
1029 MAP = 17
1030 }
1031
1032 enum FIELD_QUALIFICATOR {
1033 OPTIONAL = 0,
1034 REQUIRED = 1,
1035 REPEATED = 2,
1036 RESERVED = 3
1037 }
1038
1039 enum FIELD_OPTION {
1040 PACKED = 0,
1041 NOT_PACKED = 1
1042 }
1043
1044 class ASTClass:
1045 func _init(n : String, t : int, p : int, pn : String, o : String, ci : int):
1046 name = n
1047 type = t
1048 parent_index = p
1049 parent_name = pn
1050 option = o
1051 construction_index = ci
1052 values = []
1053
1054 var name : String
1055 var type : int
1056 var parent_index : int
1057 var parent_name : String
1058 var option : String
1059 var construction_index
1060 var values : Array
1061
1062 func copy() -> ASTClass:
1063 var res : ASTClass = ASTClass.new(name, type, parent_index, parent_name, option, construction_index)
1064 for v in values:
1065 res.values.append(v.copy())
1066 return res
1067
1068 class ASTEnumValue:
1069 func _init(n : String, v : String):
1070 name = n
1071 value = v
1072
1073 var name : String
1074 var value : String
1075
1076 func copy() -> ASTEnumValue:
1077 return ASTEnumValue.new(name, value)
1078
1079 class ASTField:
1080 func _init(t, n : String, tn : String, p : int, q : int, o : int, ci : int, mf : bool):
1081 tag = t
1082 name = n
1083 type_name = tn
1084 parent_class_id = p
1085 qualificator = q
1086 option = o
1087 construction_index = ci
1088 is_map_field = mf
1089
1090 var tag
1091 var name : String
1092 var type_name : String
1093 var parent_class_id : int
1094 var qualificator : int
1095 var option : int
1096 var construction_index : int
1097 var is_map_field : bool
1098 var field_type : int = FIELD_TYPE.UNDEFINED
1099 var type_class_id : int = -1
1100
1101 func copy() -> ASTField:
1102 var res : ASTField = ASTField.new(tag, name, type_name, parent_class_id, qualificator, option, construction_index, is_map_field)
1103 res.field_type = field_type
1104 res.type_class_id = type_class_id
1105 return res
1106
1107 enum AST_GROUP_RULE {
1108 ONEOF = 0,
1109 ALL = 1
1110 }
1111
1112 class ASTFieldGroup:
1113 func _init(n : String, pi : int, r : int):
1114 name = n
1115 parent_class_id = pi
1116 rule = r
1117 opened = true
1118
1119 var name : String
1120 var parent_class_id : int
1121 var rule : int
1122 var field_indexes : Array = []
1123 var opened : bool
1124
1125 func copy() -> ASTFieldGroup:
1126 var res : ASTFieldGroup = ASTFieldGroup.new(name, parent_class_id, rule)
1127 res.opened = opened
1128 for fi in field_indexes:
1129 res.field_indexes.append(fi)
1130 return res
1131
1132 class ASTImport:
1133 func _init(a_path : String, a_public : bool, sha : String):
1134 path = a_path
1135 public = a_public
1136 sha256 = sha
1137
1138 var path : String
1139 var public : bool
1140 var sha256 : String
1141
1142 var class_table : Array = []
1143 var field_table : Array = []
1144 var group_table : Array = []
1145 var import_table : Array = []
1146 var proto_version : int = 0
1147
1148 class DescriptionResult:
1149 func _init(s : bool = true, e = null, d : String = ""):
1150 success = s
1151 error = e
1152 description = d
1153 var success : bool
1154 var error
1155 var description : String
1156
1157 static func get_text_from_token(string_token : TokenEntrance) -> String:
1158 return string_token.text.substr(1, string_token.text.length() - 2)
1159
1160 func desc_syntax(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult:
1161 var result : DescriptionResult = DescriptionResult.new()
1162 var s : String = get_text_from_token(indexed_tokens[0].token)
1163 if s == "proto2":
1164 proto_version = 2
1165 elif s == "proto3":
1166 proto_version = 3
1167 else:
1168 result.success = false
1169 result.error = indexed_tokens[0].index
1170 result.description = "Unspecified version of the protocol. Use \"proto2\" or \"proto3\" syntax string."
1171 return result
1172
1173 func desc_import(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult:
1174 var result : DescriptionResult = DescriptionResult.new()
1175 var offset : int = 0
1176 var public : bool = false
1177 if indexed_tokens[offset].token.id == TOKEN_ID.IMPORT_QUALIFICATION:
1178 if indexed_tokens[offset].token.text == "public":
1179 public = true
1180 offset += 1
1181 var f_name : String = path_dir + get_text_from_token(indexed_tokens[offset].token)
1182 var sha : String = FileAccess.get_sha256(f_name)
1183 if FileAccess.file_exists(f_name):
1184 for i in import_table:
1185 if i.path == f_name:
1186 result.success = false
1187 result.error = indexed_tokens[offset].index
1188 result.description = "File '" + f_name + "' already imported."
1189 return result
1190 if i.sha256 == sha:
1191 result.success = false
1192 result.error = indexed_tokens[offset].index
1193 result.description = "File '" + f_name + "' with matching SHA256 already imported."
1194 return result
1195 import_table.append(ASTImport.new(f_name, public, sha))
1196 else:
1197 result.success = false
1198 result.error = indexed_tokens[offset].index
1199 result.description = "Import file '" + f_name + "' not found."
1200 return result
1201
1202 func desc_package(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult:
1203 printerr("UNRELEASED desc_package: ", indexed_tokens.size(), ", nesting: ", settings.nesting)
1204 var result : DescriptionResult = DescriptionResult.new()
1205 return result
1206
1207 func desc_option(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult:
1208 printerr("UNRELEASED desc_option: ", indexed_tokens.size(), ", nesting: ", settings.nesting)
1209 var result : DescriptionResult = DescriptionResult.new()
1210 return result
1211
1212 func desc_field(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult:
1213 var result : DescriptionResult = DescriptionResult.new()
1214 var qualifcator : int = FIELD_QUALIFICATOR.OPTIONAL
1215 var option : int
1216 var offset : int = 0
1217
1218 if proto_version == 3:
1219 option = FIELD_OPTION.PACKED
1220 if indexed_tokens[offset].token.id == TOKEN_ID.FIELD_QUALIFICATION:
1221 if indexed_tokens[offset].token.text == "repeated":
1222 qualifcator = FIELD_QUALIFICATOR.REPEATED
1223 elif indexed_tokens[offset].token.text == "required" || indexed_tokens[offset].token.text == "optional":
1224 result.success = false
1225 result.error = indexed_tokens[offset].index
1226 result.description = "Using the 'required' or 'optional' qualificator is unacceptable in Protobuf v3."
1227 return result
1228 offset += 1
1229 if proto_version == 2:
1230 option = FIELD_OPTION.NOT_PACKED
1231 if !(group_table.size() > 0 && group_table[group_table.size() - 1].opened):
1232 if indexed_tokens[offset].token.id == TOKEN_ID.FIELD_QUALIFICATION:
1233 if indexed_tokens[offset].token.text == "repeated":
1234 qualifcator = FIELD_QUALIFICATOR.REPEATED
1235 elif indexed_tokens[offset].token.text == "required":
1236 qualifcator = FIELD_QUALIFICATOR.REQUIRED
1237 elif indexed_tokens[offset].token.text == "optional":
1238 qualifcator = FIELD_QUALIFICATOR.OPTIONAL
1239 offset += 1
1240 else:
1241 if class_table[settings.parent_index].type == CLASS_TYPE.MESSAGE:
1242 result.success = false
1243 result.error = indexed_tokens[offset].index
1244 result.description = "Using the 'required', 'optional' or 'repeated' qualificator necessarily in Protobuf v2."
1245 return result
1246 var type_name : String = indexed_tokens[offset].token.text; offset += 1
1247 var field_name : String = indexed_tokens[offset].token.text; offset += 1
1248 var tag : String = indexed_tokens[offset].token.text; offset += 1
1249
1250 if indexed_tokens.size() == offset + 2:
1251 if indexed_tokens[offset].token.text == "packed":
1252 offset += 1
1253 if indexed_tokens[offset].token.text == "true":
1254 option = FIELD_OPTION.PACKED
1255 else:
1256 option = FIELD_OPTION.NOT_PACKED
1257 else:
1258 result.success = false
1259 result.error = indexed_tokens[offset].index
1260 result.description = "Undefined field option."
1261 return result
1262
1263 if group_table.size() > 0:
1264 if group_table[group_table.size() - 1].opened:
1265 if indexed_tokens[0].token.id == TOKEN_ID.FIELD_QUALIFICATION:
1266 result.success = false
1267 result.error = indexed_tokens[0].index
1268 result.description = "Using the 'required', 'optional' or 'repeated' qualificator is unacceptable in 'OneOf' field."
1269 return result
1270 group_table[group_table.size() - 1].field_indexes.append(field_table.size())
1271 field_table.append(ASTField.new(tag, field_name, type_name, settings.parent_index, qualifcator, option, settings.construction_index, false))
1272 return result
1273
1274 func desc_map_field(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult:
1275 var result : DescriptionResult = DescriptionResult.new()
1276 var qualifcator : int = FIELD_QUALIFICATOR.REPEATED
1277 var option : int
1278 var offset : int = 0
1279
1280 if proto_version == 3:
1281 option = FIELD_OPTION.PACKED
1282 if proto_version == 2:
1283 option = FIELD_OPTION.NOT_PACKED
1284
1285 var key_type_name : String = indexed_tokens[offset].token.text; offset += 1
1286 if key_type_name == "float" || key_type_name == "double" || key_type_name == "bytes":
1287 result.success = false
1288 result.error = indexed_tokens[offset - 1].index
1289 result.description = "Map 'key_type' can't be floating point types and bytes."
1290 var type_name : String = indexed_tokens[offset].token.text; offset += 1
1291 var field_name : String = indexed_tokens[offset].token.text; offset += 1
1292 var tag : String = indexed_tokens[offset].token.text; offset += 1
1293
1294 if indexed_tokens.size() == offset + 2:
1295 if indexed_tokens[offset].token.text == "packed":
1296 offset += 1
1297 if indexed_tokens[offset] == "true":
1298 option = FIELD_OPTION.PACKED
1299 else:
1300 option = FIELD_OPTION.NOT_PACKED
1301 else:
1302 result.success = false
1303 result.error = indexed_tokens[offset].index
1304 result.description = "Undefined field option."
1305
1306 if group_table.size() > 0:
1307 if group_table[group_table.size() - 1].opened:
1308 group_table[group_table.size() - 1].field_indexes.append(field_table.size())
1309
1310 class_table.append(ASTClass.new("map_type_" + field_name, CLASS_TYPE.MAP, settings.parent_index, settings.parent_name, "", settings.construction_index))
1311 field_table.append(ASTField.new(tag, field_name, "map_type_" + field_name, settings.parent_index, qualifcator, option, settings.construction_index, false))
1312
1313 field_table.append(ASTField.new(1, "key", key_type_name, class_table.size() - 1, FIELD_QUALIFICATOR.OPTIONAL, option, settings.construction_index, true))
1314 field_table.append(ASTField.new(2, "value", type_name, class_table.size() - 1, FIELD_QUALIFICATOR.OPTIONAL, option, settings.construction_index, true))
1315
1316 return result
1317
1318 func desc_enum(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult:
1319 var result : DescriptionResult = DescriptionResult.new()
1320 var option : String = ""
1321 var offset : int = 0
1322 var type_name : String = indexed_tokens[offset].token.text; offset += 1
1323 if indexed_tokens[offset].token.id == TOKEN_ID.ENUM_OPTION:
1324 if indexed_tokens[offset].token.text == "allow_alias" && indexed_tokens[offset + 1].token.text == "true":
1325 option = "allow_alias"
1326 offset += 2
1327 var value : ASTEnumValue
1328 var enum_class : ASTClass = ASTClass.new(type_name, CLASS_TYPE.ENUM, settings.parent_index, settings.parent_name, option, settings.construction_index)
1329 var first_value : bool = true
1330 while offset < indexed_tokens.size():
1331 if first_value:
1332 if indexed_tokens[offset + 1].token.text != "0":
1333 result.success = false
1334 result.error = indexed_tokens[offset + 1].index
1335 result.description = "For Enums, the default value is the first defined enum value, which must be 0."
1336 break
1337 first_value = false
1338 #if indexed_tokens[offset + 1].token.text[0] == "+" || indexed_tokens[offset + 1].token.text[0] == "-":
1339 # result.success = false
1340 # result.error = indexed_tokens[offset + 1].index
1341 # result.description = "For Enums, signed values are not allowed."
1342 # break
1343 value = ASTEnumValue.new(indexed_tokens[offset].token.text, indexed_tokens[offset + 1].token.text)
1344 enum_class.values.append(value)
1345 offset += 2
1346
1347 class_table.append(enum_class)
1348 return result
1349
1350 func desc_message_head(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult:
1351 var result : DescriptionResult = DescriptionResult.new()
1352 class_table.append(ASTClass.new(indexed_tokens[0].token.text, CLASS_TYPE.MESSAGE, settings.parent_index, settings.parent_name, "", settings.construction_index))
1353 settings.parent_index = class_table.size() - 1
1354 settings.parent_name = settings.parent_name + "." + indexed_tokens[0].token.text
1355 return result
1356
1357 func desc_message_tail(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult:
1358 settings.parent_index = class_table[settings.parent_index].parent_index
1359 settings.parent_name = class_table[settings.parent_index + 1].parent_name
1360 var result : DescriptionResult = DescriptionResult.new()
1361 return result
1362
1363 func desc_oneof_head(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult:
1364 var result : DescriptionResult = DescriptionResult.new()
1365 for g in group_table:
1366 if g.parent_class_id == settings.parent_index && g.name == indexed_tokens[0].token.text:
1367 result.success = false
1368 result.error = indexed_tokens[0].index
1369 result.description = "OneOf name must be unique."
1370 return result
1371 group_table.append(ASTFieldGroup.new(indexed_tokens[0].token.text, settings.parent_index, AST_GROUP_RULE.ONEOF))
1372 return result
1373
1374 func desc_oneof_tail(indexed_tokens : Array, settings : CompareSettings) -> DescriptionResult:
1375 group_table[group_table.size() - 1].opened = false
1376 var result : DescriptionResult = DescriptionResult.new()
1377 return result
1378
1379 func analyze() -> AnalyzeResult:
1380 var analyze_result : AnalyzeResult = AnalyzeResult.new()
1381 analyze_result.doc = document
1382 analyze_result.classes = class_table
1383 analyze_result.fields = field_table
1384 analyze_result.groups = group_table
1385 analyze_result.state = false
1386 var result : TokenResult = tokenizer()
1387 if result.errors.size() > 0:
1388 for v in result.errors:
1389 var spos : Helper.StringPosition = Helper.str_pos(document.text, v.position)
1390 var err_text : String = "Unexpected token intersection " + "'" + document.text.substr(v.position.begin, spos.length) + "'"
1391 printerr(Helper.error_string(document.name, spos.str_num, spos.column, err_text))
1392 else:
1393 var integrity = check_tokens_integrity(result.tokens, document.text.length() - 1)
1394 if integrity.size() > 0:
1395 for v in integrity:
1396 var spos: Helper.StringPosition = Helper.str_pos(document.text, TokenPosition.new(v.begin, v.end))
1397 var err_text : String = "Unexpected token " + "'" + document.text.substr(v.begin, spos.length) + "'"
1398 printerr(Helper.error_string(document.name, spos.str_num, spos.column, err_text))
1399 else:
1400 analyze_result.tokens = result.tokens
1401 comment_space_processing(result.tokens)
1402 var syntax : TranslationResult = analyze_tokens(result.tokens)
1403 if !syntax.done:
1404 var pos_main : TokenPosition = Helper.text_pos(result.tokens, syntax.parse_token_index)
1405 var pos_inner : TokenPosition = Helper.text_pos(result.tokens, syntax.error_token_index)
1406 var spos_main : Helper.StringPosition = Helper.str_pos(document.text, pos_main)
1407 var spos_inner : Helper.StringPosition = Helper.str_pos(document.text, pos_inner)
1408 var err_text : String = "Syntax error in construction '" + result.tokens[syntax.parse_token_index].text + "'. "
1409 err_text += "Unacceptable use '" + result.tokens[syntax.error_token_index].text + "' at:" + str(spos_inner.str_num) + ":" + str(spos_inner.column)
1410 err_text += "\n" + syntax.error_description_text
1411 printerr(Helper.error_string(document.name, spos_main.str_num, spos_main.column, err_text))
1412 else:
1413 analyze_result.version = proto_version
1414 analyze_result.imports = import_table
1415 analyze_result.syntax = syntax
1416 analyze_result.state = true
1417 return analyze_result
1418
1419class Semantic:
1420
1421 var class_table : Array
1422 var field_table : Array
1423 var group_table : Array
1424 var syntax : Analysis.TranslationResult
1425 var tokens : Array
1426 var document : Document
1427
1428 func _init(analyze_result : AnalyzeResult):
1429 class_table = analyze_result.classes
1430 field_table = analyze_result.fields
1431 group_table = analyze_result.groups
1432 syntax = analyze_result.syntax
1433 tokens = analyze_result.tokens
1434 document = analyze_result.doc
1435
1436
1437 enum CHECK_SUBJECT {
1438 CLASS_NAME = 0,
1439 FIELD_NAME = 1,
1440 FIELD_TAG_NUMBER = 2,
1441 FIELD_TYPE = 3
1442 }
1443
1444 var STRING_FIELD_TYPE = {
1445 "int32": Analysis.FIELD_TYPE.INT32,
1446 "sint32": Analysis.FIELD_TYPE.SINT32,
1447 "uint32": Analysis.FIELD_TYPE.UINT32,
1448 "int64": Analysis.FIELD_TYPE.INT64,
1449 "sint64": Analysis.FIELD_TYPE.SINT64,
1450 "uint64": Analysis.FIELD_TYPE.UINT64,
1451 "bool": Analysis.FIELD_TYPE.BOOL,
1452 "fixed32": Analysis.FIELD_TYPE.FIXED32,
1453 "sfixed32": Analysis.FIELD_TYPE.SFIXED32,
1454 "float": Analysis.FIELD_TYPE.FLOAT,
1455 "fixed64": Analysis.FIELD_TYPE.FIXED64,
1456 "sfixed64": Analysis.FIELD_TYPE.SFIXED64,
1457 "double": Analysis.FIELD_TYPE.DOUBLE,
1458 "string": Analysis.FIELD_TYPE.STRING,
1459 "bytes": Analysis.FIELD_TYPE.BYTES,
1460 "map": Analysis.FIELD_TYPE.MAP
1461 }
1462
1463 class CheckResult:
1464 func _init(mci : int, aci : int, ti : int, s : int):
1465 main_construction_index = mci
1466 associated_construction_index = aci
1467 table_index = ti
1468 subject = s
1469
1470 var main_construction_index: int = -1
1471 var associated_construction_index: int = -1
1472 var table_index: int = -1
1473 var subject : int
1474
1475 func check_class_names() -> Array:
1476 var result : Array = []
1477 for i in range(class_table.size()):
1478 var the_class_name : String = class_table[i].parent_name + "." + class_table[i].name
1479 for j in range(i + 1, class_table.size(), 1):
1480 var inner_name : String = class_table[j].parent_name + "." + class_table[j].name
1481 if inner_name == the_class_name:
1482 var check : CheckResult = CheckResult.new(class_table[j].construction_index, class_table[i].construction_index, j, CHECK_SUBJECT.CLASS_NAME)
1483 result.append(check)
1484 break
1485 return result
1486
1487 func check_field_names() -> Array:
1488 var result : Array = []
1489 for i in range(field_table.size()):
1490 var the_class_name : String = class_table[field_table[i].parent_class_id].parent_name + "." + class_table[field_table[i].parent_class_id].name
1491 for j in range(i + 1, field_table.size(), 1):
1492 var inner_name : String = class_table[field_table[j].parent_class_id].parent_name + "." + class_table[field_table[j].parent_class_id].name
1493 if inner_name == the_class_name:
1494 if field_table[i].name == field_table[j].name:
1495 var check : CheckResult = CheckResult.new(field_table[j].construction_index, field_table[i].construction_index, j, CHECK_SUBJECT.FIELD_NAME)
1496 result.append(check)
1497 break
1498 if field_table[i].tag == field_table[j].tag:
1499 var check : CheckResult = CheckResult.new(field_table[j].construction_index, field_table[i].construction_index, j, CHECK_SUBJECT.FIELD_TAG_NUMBER)
1500 result.append(check)
1501 break
1502 return result
1503
1504 func find_full_class_name(the_class_name : String) -> int:
1505 for i in range(class_table.size()):
1506 if the_class_name == class_table[i].parent_name + "." + class_table[i].name:
1507 return i
1508 return -1
1509
1510 func find_class_name(the_class_name : String) -> int:
1511 for i in range(class_table.size()):
1512 if the_class_name == class_table[i].name:
1513 return i
1514 return -1
1515
1516 func get_class_childs(class_index : int) -> Array:
1517 var result : Array = []
1518 for i in range(class_table.size()):
1519 if class_table[i].parent_index == class_index:
1520 result.append(i)
1521 return result
1522
1523 func find_in_childs(the_class_name : String, child_indexes : Array) -> int:
1524 for c in child_indexes:
1525 if the_class_name == class_table[c].name:
1526 return c
1527 return -1
1528
1529 func determine_field_types() -> Array:
1530 var result : Array = []
1531 for f in field_table:
1532 if STRING_FIELD_TYPE.has(f.type_name):
1533 f.field_type = STRING_FIELD_TYPE[f.type_name]
1534 else:
1535 if f.type_name[0] == ".":
1536 f.type_class_id = find_full_class_name(f.type_name)
1537 else:
1538 # Reset result from previous assignment, that can be incorrect because of merging of imports
1539 f.type_class_id = -1
1540 var splited_name : Array = f.type_name.split(".", false)
1541 var cur_class_index : int = f.parent_class_id
1542 var exit : bool = false
1543 while(true):
1544 var find : bool = false
1545 if cur_class_index == -1:
1546 break
1547 for n in splited_name:
1548 var childs_and_parent : Array = get_class_childs(cur_class_index)
1549 var res_index : int = find_in_childs(n, childs_and_parent)
1550 if res_index >= 0:
1551 find = true
1552 cur_class_index = res_index
1553 else:
1554 if find:
1555 exit = true
1556 else:
1557 cur_class_index = class_table[cur_class_index].parent_index
1558 break
1559 if exit:
1560 break
1561 if find:
1562 f.type_class_id = cur_class_index
1563 break
1564 if f.type_class_id == -1:
1565 f.type_class_id = find_full_class_name("." + f.type_name)
1566 for i in range(field_table.size()):
1567 if field_table[i].field_type == Analysis.FIELD_TYPE.UNDEFINED:
1568 if field_table[i].type_class_id == -1:
1569 result.append(CheckResult.new(field_table[i].construction_index, field_table[i].construction_index, i, CHECK_SUBJECT.FIELD_TYPE))
1570 else:
1571 if class_table[field_table[i].type_class_id].type == Analysis.CLASS_TYPE.ENUM:
1572 field_table[i].field_type = Analysis.FIELD_TYPE.ENUM
1573 elif class_table[field_table[i].type_class_id].type == Analysis.CLASS_TYPE.MESSAGE:
1574 field_table[i].field_type = Analysis.FIELD_TYPE.MESSAGE
1575 elif class_table[field_table[i].type_class_id].type == Analysis.CLASS_TYPE.MAP:
1576 field_table[i].field_type = Analysis.FIELD_TYPE.MAP
1577 else:
1578 result.append(CheckResult.new(field_table[i].construction_index, field_table[i].construction_index, i, CHECK_SUBJECT.FIELD_TYPE))
1579 return result
1580
1581 func check_constructions() -> Array:
1582 var cl : Array = check_class_names()
1583 var fl : Array = check_field_names()
1584 var ft : Array = determine_field_types()
1585 return cl + fl + ft
1586
1587 func check() -> bool:
1588 var check_result : Array = check_constructions()
1589 if check_result.size() == 0:
1590 return true
1591 else:
1592 for v in check_result:
1593 var main_tok : int = syntax.constructions[v.main_construction_index].begin_token_index
1594 var assoc_tok : int = syntax.constructions[v.associated_construction_index].begin_token_index
1595 var main_err_pos : Helper.StringPosition = Helper.str_pos(document.text, Helper.text_pos(tokens, main_tok))
1596 var assoc_err_pos : Helper.StringPosition = Helper.str_pos(document.text, Helper.text_pos(tokens, assoc_tok))
1597 var err_text : String
1598 if v.subject == CHECK_SUBJECT.CLASS_NAME:
1599 var class_type = "Undefined"
1600 if class_table[v.table_index].type == Analysis.CLASS_TYPE.ENUM:
1601 class_type = "Enum"
1602 elif class_table[v.table_index].type == Analysis.CLASS_TYPE.MESSAGE:
1603 class_type = "Message"
1604 elif class_table[v.table_index].type == Analysis.CLASS_TYPE.MAP:
1605 class_type = "Map"
1606 err_text = class_type + " name '" + class_table[v.table_index].name + "' is already defined at:" + str(assoc_err_pos.str_num) + ":" + str(assoc_err_pos.column)
1607 elif v.subject == CHECK_SUBJECT.FIELD_NAME:
1608 err_text = "Field name '" + field_table[v.table_index].name + "' is already defined at:" + str(assoc_err_pos.str_num) + ":" + str(assoc_err_pos.column)
1609 elif v.subject == CHECK_SUBJECT.FIELD_TAG_NUMBER:
1610 err_text = "Tag number '" + field_table[v.table_index].tag + "' is already defined at:" + str(assoc_err_pos.str_num) + ":" + str(assoc_err_pos.column)
1611 elif v.subject == CHECK_SUBJECT.FIELD_TYPE:
1612 err_text = "Type '" + field_table[v.table_index].type_name + "' of the '" + field_table[v.table_index].name + "' field undefined"
1613 else:
1614 err_text = "Undefined error"
1615 printerr(Helper.error_string(document.name, main_err_pos.str_num, main_err_pos.column, err_text))
1616 return false
1617
1618class Translator:
1619
1620 var class_table : Array
1621 var field_table : Array
1622 var group_table : Array
1623 var proto_version : int
1624
1625 func _init(analyzer_result : AnalyzeResult):
1626 class_table = analyzer_result.classes
1627 field_table = analyzer_result.fields
1628 group_table = analyzer_result.groups
1629 proto_version = analyzer_result.version
1630
1631 func tabulate(text : String, nesting : int) -> String:
1632 var tab : String = ""
1633 for i in range(nesting):
1634 tab += "\t"
1635 return tab + text
1636
1637 func default_dict_text() -> String:
1638 if proto_version == 2:
1639 return "DEFAULT_VALUES_2"
1640 elif proto_version == 3:
1641 return "DEFAULT_VALUES_3"
1642 return "TRANSLATION_ERROR"
1643
1644 func generate_field_type(field : Analysis.ASTField) -> String:
1645 var text : String = "PB_DATA_TYPE."
1646 if field.field_type == Analysis.FIELD_TYPE.INT32:
1647 return text + "INT32"
1648 elif field.field_type == Analysis.FIELD_TYPE.SINT32:
1649 return text + "SINT32"
1650 elif field.field_type == Analysis.FIELD_TYPE.UINT32:
1651 return text + "UINT32"
1652 elif field.field_type == Analysis.FIELD_TYPE.INT64:
1653 return text + "INT64"
1654 elif field.field_type == Analysis.FIELD_TYPE.SINT64:
1655 return text + "SINT64"
1656 elif field.field_type == Analysis.FIELD_TYPE.UINT64:
1657 return text + "UINT64"
1658 elif field.field_type == Analysis.FIELD_TYPE.BOOL:
1659 return text + "BOOL"
1660 elif field.field_type == Analysis.FIELD_TYPE.ENUM:
1661 return text + "ENUM"
1662 elif field.field_type == Analysis.FIELD_TYPE.FIXED32:
1663 return text + "FIXED32"
1664 elif field.field_type == Analysis.FIELD_TYPE.SFIXED32:
1665 return text + "SFIXED32"
1666 elif field.field_type == Analysis.FIELD_TYPE.FLOAT:
1667 return text + "FLOAT"
1668 elif field.field_type == Analysis.FIELD_TYPE.FIXED64:
1669 return text + "FIXED64"
1670 elif field.field_type == Analysis.FIELD_TYPE.SFIXED64:
1671 return text + "SFIXED64"
1672 elif field.field_type == Analysis.FIELD_TYPE.DOUBLE:
1673 return text + "DOUBLE"
1674 elif field.field_type == Analysis.FIELD_TYPE.STRING:
1675 return text + "STRING"
1676 elif field.field_type == Analysis.FIELD_TYPE.BYTES:
1677 return text + "BYTES"
1678 elif field.field_type == Analysis.FIELD_TYPE.MESSAGE:
1679 return text + "MESSAGE"
1680 elif field.field_type == Analysis.FIELD_TYPE.MAP:
1681 return text + "MAP"
1682 return text
1683
1684 func generate_field_rule(field : Analysis.ASTField) -> String:
1685 var text : String = "PB_RULE."
1686 if field.qualificator == Analysis.FIELD_QUALIFICATOR.OPTIONAL:
1687 return text + "OPTIONAL"
1688 elif field.qualificator == Analysis.FIELD_QUALIFICATOR.REQUIRED:
1689 return text + "REQUIRED"
1690 elif field.qualificator == Analysis.FIELD_QUALIFICATOR.REPEATED:
1691 return text + "REPEATED"
1692 elif field.qualificator == Analysis.FIELD_QUALIFICATOR.RESERVED:
1693 return text + "RESERVED"
1694 return text
1695
1696 func generate_gdscript_type(field : Analysis.ASTField) -> String:
1697 if field.field_type == Analysis.FIELD_TYPE.MESSAGE:
1698 var type_name : String = class_table[field.type_class_id].parent_name + "." + class_table[field.type_class_id].name
1699 return type_name.substr(1, type_name.length() - 1)
1700 return generate_gdscript_simple_type(field)
1701
1702 func generate_gdscript_simple_type(field : Analysis.ASTField) -> String:
1703 if field.field_type == Analysis.FIELD_TYPE.INT32:
1704 return "int"
1705 elif field.field_type == Analysis.FIELD_TYPE.SINT32:
1706 return "int"
1707 elif field.field_type == Analysis.FIELD_TYPE.UINT32:
1708 return "int"
1709 elif field.field_type == Analysis.FIELD_TYPE.INT64:
1710 return "int"
1711 elif field.field_type == Analysis.FIELD_TYPE.SINT64:
1712 return "int"
1713 elif field.field_type == Analysis.FIELD_TYPE.UINT64:
1714 return "int"
1715 elif field.field_type == Analysis.FIELD_TYPE.BOOL:
1716 return "bool"
1717 elif field.field_type == Analysis.FIELD_TYPE.ENUM:
1718 return ""
1719 elif field.field_type == Analysis.FIELD_TYPE.FIXED32:
1720 return "int"
1721 elif field.field_type == Analysis.FIELD_TYPE.SFIXED32:
1722 return "int"
1723 elif field.field_type == Analysis.FIELD_TYPE.FLOAT:
1724 return "float"
1725 elif field.field_type == Analysis.FIELD_TYPE.FIXED64:
1726 return "int"
1727 elif field.field_type == Analysis.FIELD_TYPE.SFIXED64:
1728 return "int"
1729 elif field.field_type == Analysis.FIELD_TYPE.DOUBLE:
1730 return "float"
1731 elif field.field_type == Analysis.FIELD_TYPE.STRING:
1732 return "String"
1733 elif field.field_type == Analysis.FIELD_TYPE.BYTES:
1734 return "PackedByteArray"
1735 return ""
1736
1737 func generate_field_constructor(field_index : int, nesting : int) -> String:
1738 var text : String = ""
1739 var f : Analysis.ASTField = field_table[field_index]
1740 var field_name : String = "__" + f.name
1741 var pbfield_text : String
1742 var default_var_name := field_name + "_default"
1743 if f.qualificator == Analysis.FIELD_QUALIFICATOR.REPEATED:
1744 var type_name := generate_gdscript_type(f)
1745 if type_name:
1746 text = tabulate("var %s: Array[%s] = []\n" % [default_var_name, type_name], nesting)
1747 else:
1748 text = tabulate("var %s: Array = []\n" % [default_var_name], nesting)
1749 pbfield_text += field_name + " = PBField.new("
1750 pbfield_text += "\"" + f.name + "\", "
1751 pbfield_text += generate_field_type(f) + ", "
1752 pbfield_text += generate_field_rule(f) + ", "
1753 pbfield_text += str(f.tag) + ", "
1754 if f.option == Analysis.FIELD_OPTION.PACKED:
1755 pbfield_text += "true"
1756 elif f.option == Analysis.FIELD_OPTION.NOT_PACKED:
1757 pbfield_text += "false"
1758 if f.qualificator == Analysis.FIELD_QUALIFICATOR.REPEATED:
1759 pbfield_text += ", " + default_var_name
1760 else:
1761 pbfield_text += ", " + default_dict_text() + "[" + generate_field_type(f) + "]"
1762 pbfield_text += ")\n"
1763 text += tabulate(pbfield_text, nesting)
1764 if f.is_map_field:
1765 text += tabulate(field_name + ".is_map_field = true\n", nesting)
1766 text += tabulate("service = PBServiceField.new()\n", nesting)
1767 text += tabulate("service.field = " + field_name + "\n", nesting)
1768 if f.field_type == Analysis.FIELD_TYPE.MESSAGE:
1769 if f.qualificator == Analysis.FIELD_QUALIFICATOR.REPEATED:
1770 text += tabulate("service.func_ref = Callable(self, \"add_" + f.name + "\")\n", nesting)
1771 else:
1772 text += tabulate("service.func_ref = Callable(self, \"new_" + f.name + "\")\n", nesting)
1773 elif f.field_type == Analysis.FIELD_TYPE.MAP:
1774 text += tabulate("service.func_ref = Callable(self, \"add_empty_" + f.name + "\")\n", nesting)
1775 text += tabulate("data[" + field_name + ".tag] = service\n", nesting)
1776
1777 return text
1778
1779 func generate_group_clear(field_index : int, nesting : int) -> String:
1780 for g in group_table:
1781 var text : String = ""
1782 var find : bool = false
1783 if g.parent_class_id == field_table[field_index].parent_class_id:
1784 for i in g.field_indexes:
1785 if field_index == i:
1786 find = true
1787 text += tabulate("data[" + field_table[i].tag + "].state = PB_SERVICE_STATE.FILLED\n", nesting)
1788 else:
1789 text += tabulate("__" + field_table[i].name + ".value = " + default_dict_text() + "[" + generate_field_type(field_table[i]) + "]\n", nesting)
1790 text += tabulate("data[" + field_table[i].tag + "].state = PB_SERVICE_STATE.UNFILLED\n", nesting)
1791 if find:
1792 return text
1793 return ""
1794
1795 func generate_has_oneof(field_index : int, nesting : int) -> String:
1796 for g in group_table:
1797 var text : String = ""
1798 if g.parent_class_id == field_table[field_index].parent_class_id:
1799 for i in g.field_indexes:
1800 if field_index == i:
1801 text += tabulate("func has_" + field_table[i].name + "() -> bool:\n", nesting)
1802 nesting += 1
1803 text += tabulate("return data[" + field_table[i].tag + "].state == PB_SERVICE_STATE.FILLED\n", nesting)
1804 return text
1805 return ""
1806
1807 func generate_field(field_index : int, nesting : int) -> String:
1808 var text : String = ""
1809 var f : Analysis.ASTField = field_table[field_index]
1810 var varname : String = "__" + f.name
1811 text += tabulate("var " + varname + ": PBField\n", nesting)
1812 if f.field_type == Analysis.FIELD_TYPE.MESSAGE:
1813 var the_class_name : String = class_table[f.type_class_id].parent_name + "." + class_table[f.type_class_id].name
1814 the_class_name = the_class_name.substr(1, the_class_name.length() - 1)
1815 if f.qualificator != Analysis.FIELD_QUALIFICATOR.OPTIONAL:
1816 text += generate_has_oneof(field_index, nesting)
1817 if f.qualificator == Analysis.FIELD_QUALIFICATOR.REPEATED:
1818 text += tabulate("func get_" + f.name + "() -> Array[" + the_class_name + "]:\n", nesting)
1819 else:
1820 if f.qualificator == Analysis.FIELD_QUALIFICATOR.OPTIONAL:
1821 text += tabulate("func has_" + f.name + "() -> bool:\n", nesting)
1822 nesting += 1
1823 text += tabulate("if " + varname + ".value != null:\n", nesting)
1824 nesting += 1
1825 text += tabulate("return true\n", nesting)
1826 nesting -= 1
1827 text += tabulate("return false\n", nesting)
1828 nesting -= 1
1829 text += tabulate("func get_" + f.name + "() -> " + the_class_name + ":\n", nesting)
1830 nesting += 1
1831 text += tabulate("return " + varname + ".value\n", nesting)
1832 nesting -= 1
1833 text += tabulate("func clear_" + f.name + "() -> void:\n", nesting)
1834 nesting += 1
1835 text += tabulate("data[" + str(f.tag) + "].state = PB_SERVICE_STATE.UNFILLED\n", nesting)
1836 if f.qualificator == Analysis.FIELD_QUALIFICATOR.REPEATED:
1837 text += tabulate(varname + ".value.clear()\n", nesting)
1838 nesting -= 1
1839 text += tabulate("func add_" + f.name + "() -> " + the_class_name + ":\n", nesting)
1840 nesting += 1
1841 text += tabulate("var element = " + the_class_name + ".new()\n", nesting)
1842 text += tabulate(varname + ".value.append(element)\n", nesting)
1843 text += tabulate("return element\n", nesting)
1844 else:
1845 text += tabulate(varname + ".value = " + default_dict_text() + "[" + generate_field_type(f) + "]\n", nesting)
1846 nesting -= 1
1847 text += tabulate("func new_" + f.name + "() -> " + the_class_name + ":\n", nesting)
1848 nesting += 1
1849 text += generate_group_clear(field_index, nesting)
1850 text += tabulate(varname + ".value = " + the_class_name + ".new()\n", nesting)
1851 text += tabulate("return " + varname + ".value\n", nesting)
1852 elif f.field_type == Analysis.FIELD_TYPE.MAP:
1853 var the_parent_class_name : String = class_table[f.type_class_id].parent_name
1854 the_parent_class_name = the_parent_class_name.substr(1, the_parent_class_name.length() - 1)
1855 var the_class_name : String = the_parent_class_name + "." + class_table[f.type_class_id].name
1856
1857 text += generate_has_oneof(field_index, nesting)
1858 text += tabulate("func get_raw_" + f.name + "():\n", nesting)
1859 nesting += 1
1860 text += tabulate("return " + varname + ".value\n", nesting)
1861 nesting -= 1
1862 text += tabulate("func get_" + f.name + "():\n", nesting)
1863 nesting += 1
1864 text += tabulate("return PBPacker.construct_map(" + varname + ".value)\n", nesting)
1865 nesting -= 1
1866 text += tabulate("func clear_" + f.name + "():\n", nesting)
1867 nesting += 1
1868 text += tabulate("data[" + str(f.tag) + "].state = PB_SERVICE_STATE.UNFILLED\n", nesting)
1869 text += tabulate(varname + ".value = " + default_dict_text() + "[" + generate_field_type(f) + "]\n", nesting)
1870 nesting -= 1
1871 for i in range(field_table.size()):
1872 if field_table[i].parent_class_id == f.type_class_id && field_table[i].name == "value":
1873 var gd_type : String = generate_gdscript_simple_type(field_table[i])
1874 var return_type : String = " -> " + the_class_name
1875 var value_return_type : String = ""
1876 if gd_type != "":
1877 value_return_type = return_type
1878 elif field_table[i].field_type == Analysis.FIELD_TYPE.MESSAGE:
1879 value_return_type = " -> " + the_parent_class_name + "." + field_table[i].type_name
1880 text += tabulate("func add_empty_" + f.name + "()" + return_type + ":\n", nesting)
1881 nesting += 1
1882 text += generate_group_clear(field_index, nesting)
1883 text += tabulate("var element = " + the_class_name + ".new()\n", nesting)
1884 text += tabulate(varname + ".value.append(element)\n", nesting)
1885 text += tabulate("return element\n", nesting)
1886 nesting -= 1
1887 if field_table[i].field_type == Analysis.FIELD_TYPE.MESSAGE:
1888 text += tabulate("func add_" + f.name + "(a_key)" + value_return_type + ":\n", nesting)
1889 nesting += 1
1890 text += generate_group_clear(field_index, nesting)
1891 text += tabulate("var idx = -1\n", nesting)
1892 text += tabulate("for i in range(" + varname + ".value.size()):\n", nesting)
1893 nesting += 1
1894 text += tabulate("if " + varname + ".value[i].get_key() == a_key:\n", nesting)
1895 nesting += 1
1896 text += tabulate("idx = i\n", nesting)
1897 text += tabulate("break\n", nesting)
1898 nesting -= 2
1899 text += tabulate("var element = " + the_class_name + ".new()\n", nesting)
1900 text += tabulate("element.set_key(a_key)\n", nesting)
1901 text += tabulate("if idx != -1:\n", nesting)
1902 nesting += 1
1903 text += tabulate(varname + ".value[idx] = element\n", nesting)
1904 nesting -= 1
1905 text += tabulate("else:\n", nesting)
1906 nesting += 1
1907 text += tabulate(varname + ".value.append(element)\n", nesting)
1908 nesting -= 1
1909 text += tabulate("return element.new_value()\n", nesting)
1910 else:
1911 text += tabulate("func add_" + f.name + "(a_key, a_value) -> void:\n", nesting)
1912 nesting += 1
1913 text += generate_group_clear(field_index, nesting)
1914 text += tabulate("var idx = -1\n", nesting)
1915 text += tabulate("for i in range(" + varname + ".value.size()):\n", nesting)
1916 nesting += 1
1917 text += tabulate("if " + varname + ".value[i].get_key() == a_key:\n", nesting)
1918 nesting += 1
1919 text += tabulate("idx = i\n", nesting)
1920 text += tabulate("break\n", nesting)
1921 nesting -= 2
1922 text += tabulate("var element = " + the_class_name + ".new()\n", nesting)
1923 text += tabulate("element.set_key(a_key)\n", nesting)
1924 text += tabulate("element.set_value(a_value)\n", nesting)
1925 text += tabulate("if idx != -1:\n", nesting)
1926 nesting += 1
1927 text += tabulate(varname + ".value[idx] = element\n", nesting)
1928 nesting -= 1
1929 text += tabulate("else:\n", nesting)
1930 nesting += 1
1931 text += tabulate(varname + ".value.append(element)\n", nesting)
1932 nesting -= 1
1933 break
1934 else:
1935 var gd_type : String = generate_gdscript_simple_type(f)
1936 var return_type : String = ""
1937 var argument_type : String = ""
1938 if gd_type != "":
1939 return_type = " -> " + gd_type
1940 argument_type = " : " + gd_type
1941 if f.qualificator != Analysis.FIELD_QUALIFICATOR.OPTIONAL:
1942 text += generate_has_oneof(field_index, nesting)
1943 if f.qualificator == Analysis.FIELD_QUALIFICATOR.REPEATED:
1944 var array_type := "[" + gd_type + "]" if gd_type else ""
1945 text += tabulate("func get_" + f.name + "() -> Array" + array_type + ":\n", nesting)
1946 else:
1947 if f.qualificator == Analysis.FIELD_QUALIFICATOR.OPTIONAL:
1948 text += tabulate("func has_" + f.name + "() -> bool:\n", nesting)
1949 nesting += 1
1950 text += tabulate("if " + varname + ".value != null:\n", nesting)
1951 nesting += 1
1952 text += tabulate("return true\n", nesting)
1953 nesting -= 1
1954 text += tabulate("return false\n", nesting)
1955 nesting -= 1
1956 text += tabulate("func get_" + f.name + "()" + return_type + ":\n", nesting)
1957 nesting += 1
1958 text += tabulate("return " + varname + ".value\n", nesting)
1959 nesting -= 1
1960 text += tabulate("func clear_" + f.name + "() -> void:\n", nesting)
1961 nesting += 1
1962 text += tabulate("data[" + str(f.tag) + "].state = PB_SERVICE_STATE.UNFILLED\n", nesting)
1963 if f.qualificator == Analysis.FIELD_QUALIFICATOR.REPEATED:
1964 text += tabulate(varname + ".value.clear()\n", nesting)
1965 nesting -= 1
1966 text += tabulate("func add_" + f.name + "(value" + argument_type + ") -> void:\n", nesting)
1967 nesting += 1
1968 text += tabulate(varname + ".value.append(value)\n", nesting)
1969 else:
1970 text += tabulate(varname + ".value = " + default_dict_text() + "[" + generate_field_type(f) + "]\n", nesting)
1971 nesting -= 1
1972 text += tabulate("func set_" + f.name + "(value" + argument_type + ") -> void:\n", nesting)
1973 nesting += 1
1974 text += generate_group_clear(field_index, nesting)
1975 text += tabulate(varname + ".value = value\n", nesting)
1976 return text
1977
1978 func generate_class(class_index : int, nesting : int) -> String:
1979 var text : String = ""
1980 if class_table[class_index].type == Analysis.CLASS_TYPE.MESSAGE || class_table[class_index].type == Analysis.CLASS_TYPE.MAP:
1981 var cls_pref : String = ""
1982 cls_pref += tabulate("class " + class_table[class_index].name + ":\n", nesting)
1983 nesting += 1
1984 cls_pref += tabulate("func _init():\n", nesting)
1985 text += cls_pref
1986 nesting += 1
1987 text += tabulate("var service\n", nesting)
1988 text += tabulate("\n", nesting)
1989 var field_text : String = ""
1990 for i in range(field_table.size()):
1991 if field_table[i].parent_class_id == class_index:
1992 text += generate_field_constructor(i, nesting)
1993 text += tabulate("\n", nesting)
1994 field_text += generate_field(i, nesting - 1)
1995 field_text += tabulate("\n", nesting - 1)
1996 nesting -= 1
1997 text += tabulate("var data = {}\n", nesting)
1998 text += tabulate("\n", nesting)
1999 text += field_text
2000 for j in range(class_table.size()):
2001 if class_table[j].parent_index == class_index:
2002 var cl_text = generate_class(j, nesting)
2003 text += cl_text
2004 if class_table[j].type == Analysis.CLASS_TYPE.MESSAGE || class_table[j].type == Analysis.CLASS_TYPE.MAP:
2005 text += generate_class_services(nesting + 1)
2006 text += tabulate("\n", nesting + 1)
2007 elif class_table[class_index].type == Analysis.CLASS_TYPE.ENUM:
2008 text += tabulate("enum " + class_table[class_index].name + " {\n", nesting)
2009 nesting += 1
2010
2011 var expected_prefix = class_table[class_index].name.to_snake_case().to_upper() + "_"
2012 var all_have_prefix = true
2013 for en in range(class_table[class_index].values.size()):
2014 var value_name = class_table[class_index].values[en].name
2015 all_have_prefix = all_have_prefix and value_name.begins_with(expected_prefix) and value_name != expected_prefix
2016
2017 for en in range(class_table[class_index].values.size()):
2018 var value_name = class_table[class_index].values[en].name
2019 if all_have_prefix:
2020 value_name = value_name.substr(expected_prefix.length())
2021 var enum_val = value_name + " = " + class_table[class_index].values[en].value
2022 if en == class_table[class_index].values.size() - 1:
2023 text += tabulate(enum_val + "\n", nesting)
2024 else:
2025 text += tabulate(enum_val + ",\n", nesting)
2026 nesting -= 1
2027 text += tabulate("}\n", nesting)
2028 text += tabulate("\n", nesting)
2029
2030 return text
2031
2032 func generate_class_services(nesting : int) -> String:
2033 var text : String = ""
2034 text += tabulate("func _to_string() -> String:\n", nesting)
2035 nesting += 1
2036 text += tabulate("return PBPacker.message_to_string(data)\n", nesting)
2037 text += tabulate("\n", nesting)
2038 nesting -= 1
2039 text += tabulate("func to_bytes() -> PackedByteArray:\n", nesting)
2040 nesting += 1
2041 text += tabulate("return PBPacker.pack_message(data)\n", nesting)
2042 text += tabulate("\n", nesting)
2043 nesting -= 1
2044 text += tabulate("func from_bytes(bytes : PackedByteArray, offset : int = 0, limit : int = -1) -> int:\n", nesting)
2045 nesting += 1
2046 text += tabulate("var cur_limit = bytes.size()\n", nesting)
2047 text += tabulate("if limit != -1:\n", nesting)
2048 nesting += 1
2049 text += tabulate("cur_limit = limit\n", nesting)
2050 nesting -= 1
2051 text += tabulate("var result = PBPacker.unpack_message(data, bytes, offset, cur_limit)\n", nesting)
2052 text += tabulate("if result == cur_limit:\n", nesting)
2053 nesting += 1
2054 text += tabulate("if PBPacker.check_required(data):\n", nesting)
2055 nesting += 1
2056 text += tabulate("if limit == -1:\n", nesting)
2057 nesting += 1
2058 text += tabulate("return PB_ERR.NO_ERRORS\n", nesting)
2059 nesting -= 2
2060 text += tabulate("else:\n", nesting)
2061 nesting += 1
2062 text += tabulate("return PB_ERR.REQUIRED_FIELDS\n", nesting)
2063 nesting -= 2
2064 text += tabulate("elif limit == -1 && result > 0:\n", nesting)
2065 nesting += 1
2066 text += tabulate("return PB_ERR.PARSE_INCOMPLETE\n", nesting)
2067 nesting -= 1
2068 text += tabulate("return result\n", nesting)
2069 return text
2070
2071 func translate(file_name : String, core_file_name : String) -> bool:
2072
2073 var file : FileAccess = FileAccess.open(file_name, FileAccess.WRITE)
2074 if file == null:
2075 printerr("File: '", file_name, "' save error.")
2076 return false
2077
2078 if !FileAccess.file_exists(core_file_name):
2079 printerr("File: '", core_file_name, "' not found.")
2080 return false
2081
2082 var core_file : FileAccess = FileAccess.open(core_file_name, FileAccess.READ)
2083 if core_file == null:
2084 printerr("File: '", core_file_name, "' read error.")
2085 return false
2086 var core_text : String = core_file.get_as_text()
2087 core_file.close()
2088
2089 var text : String = ""
2090 var nesting : int = 0
2091 core_text = core_text.replace(PROTO_VERSION_DEFAULT, PROTO_VERSION_CONST + str(proto_version))
2092 text += core_text + "\n\n\n"
2093 text += "############### USER DATA BEGIN ################\n"
2094 var cls_user : String = ""
2095 for i in range(class_table.size()):
2096 if class_table[i].parent_index == -1:
2097 var cls_text = generate_class(i, nesting)
2098 cls_user += cls_text
2099 if class_table[i].type == Analysis.CLASS_TYPE.MESSAGE:
2100 nesting += 1
2101 cls_user += generate_class_services(nesting)
2102 cls_user += tabulate("\n", nesting)
2103 nesting -= 1
2104 text += "\n\n"
2105 text += cls_user
2106 text += "################ USER DATA END #################\n"
2107 file.store_string(text)
2108 file.close()
2109 if !FileAccess.file_exists(file_name):
2110 printerr("File: '", file_name, "' save error.")
2111 return false
2112 return true
2113
2114
2115class ImportFile:
2116 func _init(sha : String, a_path : String, a_parent : int):
2117 sha256 = sha
2118 path = a_path
2119 parent_index = a_parent
2120
2121 var sha256 : String
2122 var path : String
2123 var parent_index : int
2124
2125func parse_all(analyzes : Dictionary, imports : Array, path : String, full_name : String, parent_index : int) -> bool:
2126
2127 if !FileAccess.file_exists(full_name):
2128 printerr(full_name, ": not found.")
2129 return false
2130
2131 var file : FileAccess = FileAccess.open(full_name, FileAccess.READ)
2132 if file == null:
2133 printerr(full_name, ": read error.")
2134 return false
2135 var doc : Document = Document.new(full_name, file.get_as_text())
2136 var sha : String = file.get_sha256(full_name)
2137 file.close()
2138
2139 if !analyzes.has(sha):
2140 print(full_name, ": parsing.")
2141 var analysis : Analysis = Analysis.new(path, doc)
2142 var an_result : AnalyzeResult = analysis.analyze()
2143 if an_result.state:
2144 analyzes[sha] = an_result
2145 var parent : int = imports.size()
2146 imports.append(ImportFile.new(sha, doc.name, parent_index))
2147 for im in an_result.imports:
2148 if !parse_all(analyzes, imports, path, im.path, parent):
2149 return false
2150 else:
2151 printerr(doc.name + ": parsing error.")
2152 return false
2153 else:
2154 print(full_name, ": retrieving data from cache.")
2155 imports.append(ImportFile.new(sha, doc.name, parent_index))
2156 return true
2157
2158func union_analyses(a1 : AnalyzeResult, a2 : AnalyzeResult, only_classes : bool = true) -> void:
2159 var class_offset : int = a1.classes.size()
2160 var field_offset = a1.fields.size()
2161 for cl in a2.classes:
2162 var cur_class : Analysis.ASTClass = cl.copy()
2163 if cur_class.parent_index != -1:
2164 cur_class.parent_index += class_offset
2165 a1.classes.append(cur_class)
2166 if only_classes:
2167 return
2168 for fl in a2.fields:
2169 var cur_field : Analysis.ASTField = fl.copy()
2170 cur_field.parent_class_id += class_offset
2171 cur_field.type_class_id = -1
2172 a1.fields.append(cur_field)
2173 for gr in a2.groups:
2174 var cur_group : Analysis.ASTFieldGroup = gr.copy()
2175 cur_group.parent_class_id += class_offset
2176 var indexes : Array = []
2177 for i in cur_group.field_indexes:
2178 indexes.append(i + field_offset)
2179 cur_group.field_indexes = indexes
2180 a1.groups.append(cur_group)
2181
2182func union_imports(analyzes : Dictionary, key : String, result : AnalyzeResult, keys : Array, nesting : int, use_public : bool = true, only_classes : bool = true) -> void:
2183 nesting += 1
2184 for im in analyzes[key].imports:
2185 var find : bool = false
2186 for k in keys:
2187 if im.sha256 == k:
2188 find = true
2189 break
2190 if find:
2191 continue
2192 if (!use_public) || (use_public && ((im.public && nesting > 1) || nesting < 2)):
2193 keys.append(im.sha256)
2194 union_analyses(result, analyzes[im.sha256], only_classes)
2195 union_imports(analyzes, im.sha256, result, keys, nesting, use_public, only_classes)
2196
2197func semantic_all(analyzes : Dictionary, imports : Array)-> bool:
2198 for k in analyzes.keys():
2199 print(analyzes[k].doc.name, ": analysis.")
2200 var keys : Array = []
2201 var analyze : AnalyzeResult = analyzes[k].soft_copy()
2202 keys.append(k)
2203 analyze.classes = []
2204 for cl in analyzes[k].classes:
2205 analyze.classes.append(cl.copy())
2206 union_imports(analyzes, k, analyze, keys, 0)
2207 var semantic : Semantic = Semantic.new(analyze)
2208 if !semantic.check():
2209 printerr(analyzes[k].doc.name, ": analysis error.")
2210 return false
2211 return true
2212
2213func translate_all(analyzes : Dictionary, file_name : String, core_file_name : String) -> bool:
2214 var first_key : String = analyzes.keys()[0]
2215 var analyze : AnalyzeResult = analyzes[first_key]
2216 var keys : Array = []
2217 keys.append(first_key)
2218 union_imports(analyzes, first_key, analyze, keys, 0, false, false)
2219 print("Performing full semantic analysis.")
2220 var semantic : Semantic = Semantic.new(analyze)
2221 if !semantic.check():
2222 return false
2223 print("Performing translation.")
2224 var translator : Translator = Translator.new(analyze)
2225 if !translator.translate(file_name, core_file_name):
2226 return false
2227 var first : bool = true
2228 return true
2229
2230func work(path : String, in_file : String, out_file : String, core_file : String) -> bool:
2231 var in_full_name : String = path + in_file
2232 var imports : Array = []
2233 var analyzes : Dictionary = {}
2234
2235 print("Compiling source: '", in_full_name, "', output: '", out_file, "'.")
2236 print("\n1. Parsing:")
2237 if parse_all(analyzes, imports, path, in_full_name, -1):
2238 print("* Parsing completed successfully. *")
2239 else:
2240 return false
2241 print("\n2. Perfoming semantic analysis:")
2242 if semantic_all(analyzes, imports):
2243 print("* Semantic analysis completed successfully. *")
2244 else:
2245 return false
2246 print("\n3. Output file creating:")
2247 if translate_all(analyzes, out_file, core_file):
2248 print("* Output file was created successfully. *")
2249 else:
2250 return false
2251 return true
2252
2253func _ready():
2254 pass
diff --git a/vendor/godobuf/addons/protobuf/plugin.cfg b/vendor/godobuf/addons/protobuf/plugin.cfg new file mode 100644 index 0000000..6456a11 --- /dev/null +++ b/vendor/godobuf/addons/protobuf/plugin.cfg
@@ -0,0 +1,7 @@
1[plugin]
2
3name="Godobuf"
4description="Google Protobuf implementation for Godot/GDScript"
5author="oniksan"
6version="0.6.1 for Godot 4.x.y"
7script="protobuf_ui.gd"
diff --git a/vendor/godobuf/addons/protobuf/protobuf_cmdln.gd b/vendor/godobuf/addons/protobuf/protobuf_cmdln.gd new file mode 100644 index 0000000..97d7ba4 --- /dev/null +++ b/vendor/godobuf/addons/protobuf/protobuf_cmdln.gd
@@ -0,0 +1,66 @@
1#
2# BSD 3-Clause License
3#
4# Copyright (c) 2018, Oleg Malyavkin
5# All rights reserved.
6#
7# Redistribution and use in source and binary forms, with or without
8# modification, are permitted provided that the following conditions are met:
9#
10# * Redistributions of source code must retain the above copyright notice, this
11# list of conditions and the following disclaimer.
12#
13# * Redistributions in binary form must reproduce the above copyright notice,
14# this list of conditions and the following disclaimer in the documentation
15# and/or other materials provided with the distribution.
16#
17# * Neither the name of the copyright holder nor the names of its
18# contributors may be used to endorse or promote products derived from
19# this software without specific prior written permission.
20#
21# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
24# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
25# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
26# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
28# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
29# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31
32extends SceneTree
33
34var Parser = preload("res://addons/protobuf/parser.gd")
35var Util = preload("res://addons/protobuf/protobuf_util.gd")
36
37func error(msg : String):
38 push_error(msg)
39 quit()
40
41func _init():
42 var arguments = {}
43 for argument in OS.get_cmdline_args():
44 if argument.find("=") > -1:
45 var key_value = argument.split("=")
46 arguments[key_value[0].lstrip("--")] = key_value[1]
47
48 if !arguments.has("input") || !arguments.has("output"):
49 error("Expected 2 Parameters: input and output")
50
51 var input_file_name = arguments["input"]
52 var output_file_name = arguments["output"]
53
54 var file = FileAccess.open(input_file_name, FileAccess.READ)
55 if file == null:
56 error("File: '" + input_file_name + "' not found.")
57
58 var parser = Parser.new()
59
60 if parser.work(Util.extract_dir(input_file_name), Util.extract_filename(input_file_name), \
61 output_file_name, "res://addons/protobuf/protobuf_core.gd"):
62 print("Compiled '", input_file_name, "' to '", output_file_name, "'.")
63 else:
64 error("Compilation failed.")
65
66 quit()
diff --git a/vendor/godobuf/addons/protobuf/protobuf_core.gd b/vendor/godobuf/addons/protobuf/protobuf_core.gd new file mode 100644 index 0000000..7098413 --- /dev/null +++ b/vendor/godobuf/addons/protobuf/protobuf_core.gd
@@ -0,0 +1,668 @@
1#
2# BSD 3-Clause License
3#
4# Copyright (c) 2018 - 2023, Oleg Malyavkin
5# All rights reserved.
6#
7# Redistribution and use in source and binary forms, with or without
8# modification, are permitted provided that the following conditions are met:
9#
10# * Redistributions of source code must retain the above copyright notice, this
11# list of conditions and the following disclaimer.
12#
13# * Redistributions in binary form must reproduce the above copyright notice,
14# this list of conditions and the following disclaimer in the documentation
15# and/or other materials provided with the distribution.
16#
17# * Neither the name of the copyright holder nor the names of its
18# contributors may be used to endorse or promote products derived from
19# this software without specific prior written permission.
20#
21# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
24# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
25# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
26# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
28# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
29# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31
32# DEBUG_TAB redefine this " " if you need, example: const DEBUG_TAB = "\t"
33
34const PROTO_VERSION = 0
35
36const DEBUG_TAB : String = " "
37
38enum PB_ERR {
39 NO_ERRORS = 0,
40 VARINT_NOT_FOUND = -1,
41 REPEATED_COUNT_NOT_FOUND = -2,
42 REPEATED_COUNT_MISMATCH = -3,
43 LENGTHDEL_SIZE_NOT_FOUND = -4,
44 LENGTHDEL_SIZE_MISMATCH = -5,
45 PACKAGE_SIZE_MISMATCH = -6,
46 UNDEFINED_STATE = -7,
47 PARSE_INCOMPLETE = -8,
48 REQUIRED_FIELDS = -9
49}
50
51enum PB_DATA_TYPE {
52 INT32 = 0,
53 SINT32 = 1,
54 UINT32 = 2,
55 INT64 = 3,
56 SINT64 = 4,
57 UINT64 = 5,
58 BOOL = 6,
59 ENUM = 7,
60 FIXED32 = 8,
61 SFIXED32 = 9,
62 FLOAT = 10,
63 FIXED64 = 11,
64 SFIXED64 = 12,
65 DOUBLE = 13,
66 STRING = 14,
67 BYTES = 15,
68 MESSAGE = 16,
69 MAP = 17
70}
71
72const DEFAULT_VALUES_2 = {
73 PB_DATA_TYPE.INT32: null,
74 PB_DATA_TYPE.SINT32: null,
75 PB_DATA_TYPE.UINT32: null,
76 PB_DATA_TYPE.INT64: null,
77 PB_DATA_TYPE.SINT64: null,
78 PB_DATA_TYPE.UINT64: null,
79 PB_DATA_TYPE.BOOL: null,
80 PB_DATA_TYPE.ENUM: null,
81 PB_DATA_TYPE.FIXED32: null,
82 PB_DATA_TYPE.SFIXED32: null,
83 PB_DATA_TYPE.FLOAT: null,
84 PB_DATA_TYPE.FIXED64: null,
85 PB_DATA_TYPE.SFIXED64: null,
86 PB_DATA_TYPE.DOUBLE: null,
87 PB_DATA_TYPE.STRING: null,
88 PB_DATA_TYPE.BYTES: null,
89 PB_DATA_TYPE.MESSAGE: null,
90 PB_DATA_TYPE.MAP: null
91}
92
93const DEFAULT_VALUES_3 = {
94 PB_DATA_TYPE.INT32: 0,
95 PB_DATA_TYPE.SINT32: 0,
96 PB_DATA_TYPE.UINT32: 0,
97 PB_DATA_TYPE.INT64: 0,
98 PB_DATA_TYPE.SINT64: 0,
99 PB_DATA_TYPE.UINT64: 0,
100 PB_DATA_TYPE.BOOL: false,
101 PB_DATA_TYPE.ENUM: 0,
102 PB_DATA_TYPE.FIXED32: 0,
103 PB_DATA_TYPE.SFIXED32: 0,
104 PB_DATA_TYPE.FLOAT: 0.0,
105 PB_DATA_TYPE.FIXED64: 0,
106 PB_DATA_TYPE.SFIXED64: 0,
107 PB_DATA_TYPE.DOUBLE: 0.0,
108 PB_DATA_TYPE.STRING: "",
109 PB_DATA_TYPE.BYTES: [],
110 PB_DATA_TYPE.MESSAGE: null,
111 PB_DATA_TYPE.MAP: []
112}
113
114enum PB_TYPE {
115 VARINT = 0,
116 FIX64 = 1,
117 LENGTHDEL = 2,
118 STARTGROUP = 3,
119 ENDGROUP = 4,
120 FIX32 = 5,
121 UNDEFINED = 8
122}
123
124enum PB_RULE {
125 OPTIONAL = 0,
126 REQUIRED = 1,
127 REPEATED = 2,
128 RESERVED = 3
129}
130
131enum PB_SERVICE_STATE {
132 FILLED = 0,
133 UNFILLED = 1
134}
135
136class PBField:
137 func _init(a_name : String, a_type : int, a_rule : int, a_tag : int, packed : bool, a_value = null):
138 name = a_name
139 type = a_type
140 rule = a_rule
141 tag = a_tag
142 option_packed = packed
143 value = a_value
144
145 var name : String
146 var type : int
147 var rule : int
148 var tag : int
149 var option_packed : bool
150 var value
151 var is_map_field : bool = false
152 var option_default : bool = false
153
154class PBTypeTag:
155 var ok : bool = false
156 var type : int
157 var tag : int
158 var offset : int
159
160class PBServiceField:
161 var field : PBField
162 var func_ref = null
163 var state : int = PB_SERVICE_STATE.UNFILLED
164
165class PBPacker:
166 static func convert_signed(n : int) -> int:
167 if n < -2147483648:
168 return (n << 1) ^ (n >> 63)
169 else:
170 return (n << 1) ^ (n >> 31)
171
172 static func deconvert_signed(n : int) -> int:
173 if n & 0x01:
174 return ~(n >> 1)
175 else:
176 return (n >> 1)
177
178 static func pack_varint(value) -> PackedByteArray:
179 var varint : PackedByteArray = PackedByteArray()
180 if typeof(value) == TYPE_BOOL:
181 if value:
182 value = 1
183 else:
184 value = 0
185 for _i in range(9):
186 var b = value & 0x7F
187 value >>= 7
188 if value:
189 varint.append(b | 0x80)
190 else:
191 varint.append(b)
192 break
193 if varint.size() == 9 && (varint[8] & 0x80 != 0):
194 varint.append(0x01)
195 return varint
196
197 static func pack_bytes(value, count : int, data_type : int) -> PackedByteArray:
198 var bytes : PackedByteArray = PackedByteArray()
199 if data_type == PB_DATA_TYPE.FLOAT:
200 var spb : StreamPeerBuffer = StreamPeerBuffer.new()
201 spb.put_float(value)
202 bytes = spb.get_data_array()
203 elif data_type == PB_DATA_TYPE.DOUBLE:
204 var spb : StreamPeerBuffer = StreamPeerBuffer.new()
205 spb.put_double(value)
206 bytes = spb.get_data_array()
207 else:
208 for _i in range(count):
209 bytes.append(value & 0xFF)
210 value >>= 8
211 return bytes
212
213 static func unpack_bytes(bytes : PackedByteArray, index : int, count : int, data_type : int):
214 if data_type == PB_DATA_TYPE.FLOAT:
215 return bytes.decode_float(index)
216 elif data_type == PB_DATA_TYPE.DOUBLE:
217 return bytes.decode_double(index)
218 else:
219 # Convert to big endian
220 var slice: PackedByteArray = bytes.slice(index, index + count)
221 slice.reverse()
222 return slice
223
224 static func unpack_varint(varint_bytes) -> int:
225 var value : int = 0
226 var i: int = varint_bytes.size() - 1
227 while i > -1:
228 value = (value << 7) | (varint_bytes[i] & 0x7F)
229 i -= 1
230 return value
231
232 static func pack_type_tag(type : int, tag : int) -> PackedByteArray:
233 return pack_varint((tag << 3) | type)
234
235 static func isolate_varint(bytes : PackedByteArray, index : int) -> PackedByteArray:
236 var i: int = index
237 while i <= index + 10: # Protobuf varint max size is 10 bytes
238 if !(bytes[i] & 0x80):
239 return bytes.slice(index, i + 1)
240 i += 1
241 return [] # Unreachable
242
243 static func unpack_type_tag(bytes : PackedByteArray, index : int) -> PBTypeTag:
244 var varint_bytes : PackedByteArray = isolate_varint(bytes, index)
245 var result : PBTypeTag = PBTypeTag.new()
246 if varint_bytes.size() != 0:
247 result.ok = true
248 result.offset = varint_bytes.size()
249 var unpacked : int = unpack_varint(varint_bytes)
250 result.type = unpacked & 0x07
251 result.tag = unpacked >> 3
252 return result
253
254 static func pack_length_delimeted(type : int, tag : int, bytes : PackedByteArray) -> PackedByteArray:
255 var result : PackedByteArray = pack_type_tag(type, tag)
256 result.append_array(pack_varint(bytes.size()))
257 result.append_array(bytes)
258 return result
259
260 static func pb_type_from_data_type(data_type : int) -> int:
261 if data_type == PB_DATA_TYPE.INT32 || data_type == PB_DATA_TYPE.SINT32 || data_type == PB_DATA_TYPE.UINT32 || data_type == PB_DATA_TYPE.INT64 || data_type == PB_DATA_TYPE.SINT64 || data_type == PB_DATA_TYPE.UINT64 || data_type == PB_DATA_TYPE.BOOL || data_type == PB_DATA_TYPE.ENUM:
262 return PB_TYPE.VARINT
263 elif data_type == PB_DATA_TYPE.FIXED32 || data_type == PB_DATA_TYPE.SFIXED32 || data_type == PB_DATA_TYPE.FLOAT:
264 return PB_TYPE.FIX32
265 elif data_type == PB_DATA_TYPE.FIXED64 || data_type == PB_DATA_TYPE.SFIXED64 || data_type == PB_DATA_TYPE.DOUBLE:
266 return PB_TYPE.FIX64
267 elif data_type == PB_DATA_TYPE.STRING || data_type == PB_DATA_TYPE.BYTES || data_type == PB_DATA_TYPE.MESSAGE || data_type == PB_DATA_TYPE.MAP:
268 return PB_TYPE.LENGTHDEL
269 else:
270 return PB_TYPE.UNDEFINED
271
272 static func pack_field(field : PBField) -> PackedByteArray:
273 var type : int = pb_type_from_data_type(field.type)
274 var type_copy : int = type
275 if field.rule == PB_RULE.REPEATED && field.option_packed:
276 type = PB_TYPE.LENGTHDEL
277 var head : PackedByteArray = pack_type_tag(type, field.tag)
278 var data : PackedByteArray = PackedByteArray()
279 if type == PB_TYPE.VARINT:
280 var value
281 if field.rule == PB_RULE.REPEATED:
282 for v in field.value:
283 data.append_array(head)
284 if field.type == PB_DATA_TYPE.SINT32 || field.type == PB_DATA_TYPE.SINT64:
285 value = convert_signed(v)
286 else:
287 value = v
288 data.append_array(pack_varint(value))
289 return data
290 else:
291 if field.type == PB_DATA_TYPE.SINT32 || field.type == PB_DATA_TYPE.SINT64:
292 value = convert_signed(field.value)
293 else:
294 value = field.value
295 data = pack_varint(value)
296 elif type == PB_TYPE.FIX32:
297 if field.rule == PB_RULE.REPEATED:
298 for v in field.value:
299 data.append_array(head)
300 data.append_array(pack_bytes(v, 4, field.type))
301 return data
302 else:
303 data.append_array(pack_bytes(field.value, 4, field.type))
304 elif type == PB_TYPE.FIX64:
305 if field.rule == PB_RULE.REPEATED:
306 for v in field.value:
307 data.append_array(head)
308 data.append_array(pack_bytes(v, 8, field.type))
309 return data
310 else:
311 data.append_array(pack_bytes(field.value, 8, field.type))
312 elif type == PB_TYPE.LENGTHDEL:
313 if field.rule == PB_RULE.REPEATED:
314 if type_copy == PB_TYPE.VARINT:
315 if field.type == PB_DATA_TYPE.SINT32 || field.type == PB_DATA_TYPE.SINT64:
316 var signed_value : int
317 for v in field.value:
318 signed_value = convert_signed(v)
319 data.append_array(pack_varint(signed_value))
320 else:
321 for v in field.value:
322 data.append_array(pack_varint(v))
323 return pack_length_delimeted(type, field.tag, data)
324 elif type_copy == PB_TYPE.FIX32:
325 for v in field.value:
326 data.append_array(pack_bytes(v, 4, field.type))
327 return pack_length_delimeted(type, field.tag, data)
328 elif type_copy == PB_TYPE.FIX64:
329 for v in field.value:
330 data.append_array(pack_bytes(v, 8, field.type))
331 return pack_length_delimeted(type, field.tag, data)
332 elif field.type == PB_DATA_TYPE.STRING:
333 for v in field.value:
334 var obj = v.to_utf8_buffer()
335 data.append_array(pack_length_delimeted(type, field.tag, obj))
336 return data
337 elif field.type == PB_DATA_TYPE.BYTES:
338 for v in field.value:
339 data.append_array(pack_length_delimeted(type, field.tag, v))
340 return data
341 elif typeof(field.value[0]) == TYPE_OBJECT:
342 for v in field.value:
343 var obj : PackedByteArray = v.to_bytes()
344 data.append_array(pack_length_delimeted(type, field.tag, obj))
345 return data
346 else:
347 if field.type == PB_DATA_TYPE.STRING:
348 var str_bytes : PackedByteArray = field.value.to_utf8_buffer()
349 if PROTO_VERSION == 2 || (PROTO_VERSION == 3 && str_bytes.size() > 0):
350 data.append_array(str_bytes)
351 return pack_length_delimeted(type, field.tag, data)
352 if field.type == PB_DATA_TYPE.BYTES:
353 if PROTO_VERSION == 2 || (PROTO_VERSION == 3 && field.value.size() > 0):
354 data.append_array(field.value)
355 return pack_length_delimeted(type, field.tag, data)
356 elif typeof(field.value) == TYPE_OBJECT:
357 var obj : PackedByteArray = field.value.to_bytes()
358 if obj.size() > 0:
359 data.append_array(obj)
360 return pack_length_delimeted(type, field.tag, data)
361 else:
362 pass
363 if data.size() > 0:
364 head.append_array(data)
365 return head
366 else:
367 return data
368
369 static func skip_unknown_field(bytes : PackedByteArray, offset : int, type : int) -> int:
370 if type == PB_TYPE.VARINT:
371 return offset + isolate_varint(bytes, offset).size()
372 if type == PB_TYPE.FIX64:
373 return offset + 8
374 if type == PB_TYPE.LENGTHDEL:
375 var length_bytes : PackedByteArray = isolate_varint(bytes, offset)
376 var length : int = unpack_varint(length_bytes)
377 return offset + length_bytes.size() + length
378 if type == PB_TYPE.FIX32:
379 return offset + 4
380 return PB_ERR.UNDEFINED_STATE
381
382 static func unpack_field(bytes : PackedByteArray, offset : int, field : PBField, type : int, message_func_ref) -> int:
383 if field.rule == PB_RULE.REPEATED && type != PB_TYPE.LENGTHDEL && field.option_packed:
384 var count = isolate_varint(bytes, offset)
385 if count.size() > 0:
386 offset += count.size()
387 count = unpack_varint(count)
388 if type == PB_TYPE.VARINT:
389 var val
390 var counter = offset + count
391 while offset < counter:
392 val = isolate_varint(bytes, offset)
393 if val.size() > 0:
394 offset += val.size()
395 val = unpack_varint(val)
396 if field.type == PB_DATA_TYPE.SINT32 || field.type == PB_DATA_TYPE.SINT64:
397 val = deconvert_signed(val)
398 elif field.type == PB_DATA_TYPE.BOOL:
399 if val:
400 val = true
401 else:
402 val = false
403 field.value.append(val)
404 else:
405 return PB_ERR.REPEATED_COUNT_MISMATCH
406 return offset
407 elif type == PB_TYPE.FIX32 || type == PB_TYPE.FIX64:
408 var type_size
409 if type == PB_TYPE.FIX32:
410 type_size = 4
411 else:
412 type_size = 8
413 var val
414 var counter = offset + count
415 while offset < counter:
416 if (offset + type_size) > bytes.size():
417 return PB_ERR.REPEATED_COUNT_MISMATCH
418 val = unpack_bytes(bytes, offset, type_size, field.type)
419 offset += type_size
420 field.value.append(val)
421 return offset
422 else:
423 return PB_ERR.REPEATED_COUNT_NOT_FOUND
424 else:
425 if type == PB_TYPE.VARINT:
426 var val = isolate_varint(bytes, offset)
427 if val.size() > 0:
428 offset += val.size()
429 val = unpack_varint(val)
430 if field.type == PB_DATA_TYPE.SINT32 || field.type == PB_DATA_TYPE.SINT64:
431 val = deconvert_signed(val)
432 elif field.type == PB_DATA_TYPE.BOOL:
433 if val:
434 val = true
435 else:
436 val = false
437 if field.rule == PB_RULE.REPEATED:
438 field.value.append(val)
439 else:
440 field.value = val
441 else:
442 return PB_ERR.VARINT_NOT_FOUND
443 return offset
444 elif type == PB_TYPE.FIX32 || type == PB_TYPE.FIX64:
445 var type_size
446 if type == PB_TYPE.FIX32:
447 type_size = 4
448 else:
449 type_size = 8
450 var val
451 if (offset + type_size) > bytes.size():
452 return PB_ERR.REPEATED_COUNT_MISMATCH
453 val = unpack_bytes(bytes, offset, type_size, field.type)
454 offset += type_size
455 if field.rule == PB_RULE.REPEATED:
456 field.value.append(val)
457 else:
458 field.value = val
459 return offset
460 elif type == PB_TYPE.LENGTHDEL:
461 var inner_size = isolate_varint(bytes, offset)
462 if inner_size.size() > 0:
463 offset += inner_size.size()
464 inner_size = unpack_varint(inner_size)
465 if inner_size >= 0:
466 if inner_size + offset > bytes.size():
467 return PB_ERR.LENGTHDEL_SIZE_MISMATCH
468 if message_func_ref != null:
469 var message = message_func_ref.call()
470 if inner_size > 0:
471 var sub_offset = message.from_bytes(bytes, offset, inner_size + offset)
472 if sub_offset > 0:
473 if sub_offset - offset >= inner_size:
474 offset = sub_offset
475 return offset
476 else:
477 return PB_ERR.LENGTHDEL_SIZE_MISMATCH
478 return sub_offset
479 else:
480 return offset
481 elif field.type == PB_DATA_TYPE.STRING:
482 var str_bytes : PackedByteArray = bytes.slice(offset, inner_size + offset)
483 if field.rule == PB_RULE.REPEATED:
484 field.value.append(str_bytes.get_string_from_utf8())
485 else:
486 field.value = str_bytes.get_string_from_utf8()
487 return offset + inner_size
488 elif field.type == PB_DATA_TYPE.BYTES:
489 var val_bytes : PackedByteArray = bytes.slice(offset, inner_size + offset)
490 if field.rule == PB_RULE.REPEATED:
491 field.value.append(val_bytes)
492 else:
493 field.value = val_bytes
494 return offset + inner_size
495 else:
496 return PB_ERR.LENGTHDEL_SIZE_NOT_FOUND
497 else:
498 return PB_ERR.LENGTHDEL_SIZE_NOT_FOUND
499 return PB_ERR.UNDEFINED_STATE
500
501 static func unpack_message(data, bytes : PackedByteArray, offset : int, limit : int) -> int:
502 while true:
503 var tt : PBTypeTag = unpack_type_tag(bytes, offset)
504 if tt.ok:
505 offset += tt.offset
506 if data.has(tt.tag):
507 var service : PBServiceField = data[tt.tag]
508 var type : int = pb_type_from_data_type(service.field.type)
509 if type == tt.type || (tt.type == PB_TYPE.LENGTHDEL && service.field.rule == PB_RULE.REPEATED && service.field.option_packed):
510 var res : int = unpack_field(bytes, offset, service.field, type, service.func_ref)
511 if res > 0:
512 service.state = PB_SERVICE_STATE.FILLED
513 offset = res
514 if offset == limit:
515 return offset
516 elif offset > limit:
517 return PB_ERR.PACKAGE_SIZE_MISMATCH
518 elif res < 0:
519 return res
520 else:
521 break
522 else:
523 var res : int = skip_unknown_field(bytes, offset, tt.type)
524 if res > 0:
525 offset = res
526 if offset == limit:
527 return offset
528 elif offset > limit:
529 return PB_ERR.PACKAGE_SIZE_MISMATCH
530 elif res < 0:
531 return res
532 else:
533 break
534 else:
535 return offset
536 return PB_ERR.UNDEFINED_STATE
537
538 static func pack_message(data) -> PackedByteArray:
539 var DEFAULT_VALUES
540 if PROTO_VERSION == 2:
541 DEFAULT_VALUES = DEFAULT_VALUES_2
542 elif PROTO_VERSION == 3:
543 DEFAULT_VALUES = DEFAULT_VALUES_3
544 var result : PackedByteArray = PackedByteArray()
545 var keys : Array = data.keys()
546 keys.sort()
547 for i in keys:
548 if data[i].field.value != null:
549 if data[i].state == PB_SERVICE_STATE.UNFILLED \
550 && !data[i].field.is_map_field \
551 && typeof(data[i].field.value) == typeof(DEFAULT_VALUES[data[i].field.type]) \
552 && data[i].field.value == DEFAULT_VALUES[data[i].field.type]:
553 continue
554 elif data[i].field.rule == PB_RULE.REPEATED && data[i].field.value.size() == 0:
555 continue
556 result.append_array(pack_field(data[i].field))
557 elif data[i].field.rule == PB_RULE.REQUIRED:
558 print("Error: required field is not filled: Tag:", data[i].field.tag)
559 return PackedByteArray()
560 return result
561
562 static func check_required(data) -> bool:
563 var keys : Array = data.keys()
564 for i in keys:
565 if data[i].field.rule == PB_RULE.REQUIRED && data[i].state == PB_SERVICE_STATE.UNFILLED:
566 return false
567 return true
568
569 static func construct_map(key_values):
570 var result = {}
571 for kv in key_values:
572 result[kv.get_key()] = kv.get_value()
573 return result
574
575 static func tabulate(text : String, nesting : int) -> String:
576 var tab : String = ""
577 for _i in range(nesting):
578 tab += DEBUG_TAB
579 return tab + text
580
581 static func value_to_string(value, field : PBField, nesting : int) -> String:
582 var result : String = ""
583 var text : String
584 if field.type == PB_DATA_TYPE.MESSAGE:
585 result += "{"
586 nesting += 1
587 text = message_to_string(value.data, nesting)
588 if text != "":
589 result += "\n" + text
590 nesting -= 1
591 result += tabulate("}", nesting)
592 else:
593 nesting -= 1
594 result += "}"
595 elif field.type == PB_DATA_TYPE.BYTES:
596 result += "<"
597 for i in range(value.size()):
598 result += str(value[i])
599 if i != (value.size() - 1):
600 result += ", "
601 result += ">"
602 elif field.type == PB_DATA_TYPE.STRING:
603 result += "\"" + value + "\""
604 elif field.type == PB_DATA_TYPE.ENUM:
605 result += "ENUM::" + str(value)
606 else:
607 result += str(value)
608 return result
609
610 static func field_to_string(field : PBField, nesting : int) -> String:
611 var result : String = tabulate(field.name + ": ", nesting)
612 if field.type == PB_DATA_TYPE.MAP:
613 if field.value.size() > 0:
614 result += "(\n"
615 nesting += 1
616 for i in range(field.value.size()):
617 var local_key_value = field.value[i].data[1].field
618 result += tabulate(value_to_string(local_key_value.value, local_key_value, nesting), nesting) + ": "
619 local_key_value = field.value[i].data[2].field
620 result += value_to_string(local_key_value.value, local_key_value, nesting)
621 if i != (field.value.size() - 1):
622 result += ","
623 result += "\n"
624 nesting -= 1
625 result += tabulate(")", nesting)
626 else:
627 result += "()"
628 elif field.rule == PB_RULE.REPEATED:
629 if field.value.size() > 0:
630 result += "[\n"
631 nesting += 1
632 for i in range(field.value.size()):
633 result += tabulate(str(i) + ": ", nesting)
634 result += value_to_string(field.value[i], field, nesting)
635 if i != (field.value.size() - 1):
636 result += ","
637 result += "\n"
638 nesting -= 1
639 result += tabulate("]", nesting)
640 else:
641 result += "[]"
642 else:
643 result += value_to_string(field.value, field, nesting)
644 result += ";\n"
645 return result
646
647 static func message_to_string(data, nesting : int = 0) -> String:
648 var DEFAULT_VALUES
649 if PROTO_VERSION == 2:
650 DEFAULT_VALUES = DEFAULT_VALUES_2
651 elif PROTO_VERSION == 3:
652 DEFAULT_VALUES = DEFAULT_VALUES_3
653 var result : String = ""
654 var keys : Array = data.keys()
655 keys.sort()
656 for i in keys:
657 if data[i].field.value != null:
658 if data[i].state == PB_SERVICE_STATE.UNFILLED \
659 && !data[i].field.is_map_field \
660 && typeof(data[i].field.value) == typeof(DEFAULT_VALUES[data[i].field.type]) \
661 && data[i].field.value == DEFAULT_VALUES[data[i].field.type]:
662 continue
663 elif data[i].field.rule == PB_RULE.REPEATED && data[i].field.value.size() == 0:
664 continue
665 result += field_to_string(data[i].field, nesting)
666 elif data[i].field.rule == PB_RULE.REQUIRED:
667 result += data[i].field.name + ": " + "error"
668 return result
diff --git a/vendor/godobuf/addons/protobuf/protobuf_util.gd b/vendor/godobuf/addons/protobuf/protobuf_util.gd new file mode 100644 index 0000000..5941cb8 --- /dev/null +++ b/vendor/godobuf/addons/protobuf/protobuf_util.gd
@@ -0,0 +1,46 @@
1#
2# BSD 3-Clause License
3#
4# Copyright (c) 2018, Oleg Malyavkin
5# All rights reserved.
6#
7# Redistribution and use in source and binary forms, with or without
8# modification, are permitted provided that the following conditions are met:
9#
10# * Redistributions of source code must retain the above copyright notice, this
11# list of conditions and the following disclaimer.
12#
13# * Redistributions in binary form must reproduce the above copyright notice,
14# this list of conditions and the following disclaimer in the documentation
15# and/or other materials provided with the distribution.
16#
17# * Neither the name of the copyright holder nor the names of its
18# contributors may be used to endorse or promote products derived from
19# this software without specific prior written permission.
20#
21# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
24# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
25# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
26# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
28# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
29# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31
32static func extract_dir(file_path):
33 var parts = file_path.split("/", false)
34 parts.remove_at(parts.size() - 1)
35 var path
36 if file_path.begins_with("/"):
37 path = "/"
38 else:
39 path = ""
40 for part in parts:
41 path += part + "/"
42 return path
43
44static func extract_filename(file_path):
45 var parts = file_path.split("/", false)
46 return parts[parts.size() - 1]
diff --git a/vendor/godobuf/default_env.tres b/vendor/godobuf/default_env.tres new file mode 100644 index 0000000..20207a4 --- /dev/null +++ b/vendor/godobuf/default_env.tres
@@ -0,0 +1,7 @@
1[gd_resource type="Environment" load_steps=2 format=2]
2
3[sub_resource type="ProceduralSky" id=1]
4
5[resource]
6background_mode = 2
7background_sky = SubResource( 1 )
diff --git a/vendor/godobuf/logo.png b/vendor/godobuf/logo.png new file mode 100644 index 0000000..4ff9029 --- /dev/null +++ b/vendor/godobuf/logo.png
Binary files differ
diff --git a/vendor/godobuf/logo.png.import b/vendor/godobuf/logo.png.import new file mode 100644 index 0000000..43df7a6 --- /dev/null +++ b/vendor/godobuf/logo.png.import
@@ -0,0 +1,35 @@
1[remap]
2
3importer="texture"
4type="StreamTexture"
5path="res://.import/logo.png-cca8726399059c8d4f806e28e356b14d.stex"
6metadata={
7"vram_texture": false
8}
9
10[deps]
11
12source_file="res://logo.png"
13dest_files=[ "res://.import/logo.png-cca8726399059c8d4f806e28e356b14d.stex" ]
14
15[params]
16
17compress/mode=0
18compress/lossy_quality=0.7
19compress/hdr_mode=0
20compress/bptc_ldr=0
21compress/normal_map=0
22flags/repeat=0
23flags/filter=true
24flags/mipmaps=false
25flags/anisotropic=false
26flags/srgb=2
27process/fix_alpha_border=true
28process/premult_alpha=false
29process/HDR_as_SRGB=false
30process/invert_color=false
31process/normal_map_invert_y=false
32stream=false
33size_limit=0
34detect_3d=true
35svg/scale=1.0
diff --git a/vendor/godobuf/project.godot b/vendor/godobuf/project.godot new file mode 100644 index 0000000..8cef0a4 --- /dev/null +++ b/vendor/godobuf/project.godot
@@ -0,0 +1,26 @@
1; Engine configuration file.
2; It's best edited using the editor UI and not directly,
3; since the parameters that go here are not all obvious.
4;
5; Format:
6; [section] ; section goes between []
7; param=value ; assign values to parameters
8
9config_version=4
10
11_global_script_classes=[ ]
12_global_script_class_icons={
13}
14
15[application]
16
17config/name="Protobuf Plugin"
18config/icon="res://logo.png"
19
20[editor_plugins]
21
22enabled=PoolStringArray( "res://addons/protobuf/plugin.cfg" )
23
24[rendering]
25
26environment/default_environment="res://default_env.tres"