-
Notifications
You must be signed in to change notification settings - Fork 198
/
polyglot_piranha.pyi
321 lines (280 loc) · 10.3 KB
/
polyglot_piranha.pyi
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
# Copyright (c) 2023 Uber Technologies, Inc.
#
# <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
# except in compliance with the License. You may obtain a copy of the License at
# <p>http://www.apache.org/licenses/LICENSE-2.0
#
# <p>Unless required by applicable law or agreed to in writing, software distributed under the
# License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from typing import List, Optional, Literal
# Languages that Piranha supports (see ./src/models/language.rs)
PiranhaLanguage = Literal["java", "kt", "kotlin", "go", "python", "swift", "typescript", "tsx", "thrift", "strings", "scm", "scala", "ruby", "yaml", "yml"]
def execute_piranha(piranha_argument: PiranhaArguments) -> list[PiranhaOutputSummary]:
"""
Executes piranha for the given `piranha_arguments` and returns `PiranhaOutputSummary` for each file analyzed by Piranha
Parameters
------------
piranha_arguments: Piranha Arguments
Configurations for piranha
Returns
------------
List of `PiranhaOutPutSummary`
"""
...
class PiranhaArguments:
"""
A class to capture Piranha's configurations
"""
def __init__(
self,
language: PiranhaLanguage,
paths_to_codebase: Optional[List[str]] = None,
include: Optional[List[str]] = None,
exclude: Optional[List[str]] = None,
substitutions: Optional[dict[str, str]] = None,
path_to_configurations: Optional[str] = None,
rule_graph: Optional[RuleGraph] = None,
code_snippet: Optional[str] = None,
dry_run: Optional[bool] = None,
cleanup_comments: Optional[bool] = None,
cleanup_comments_buffer: Optional[int] = None,
number_of_ancestors_in_parent_scope: Optional[int] = None,
delete_consecutive_new_lines: Optional[bool] = None,
global_tag_prefix: Optional[str] = "GLOBAL_TAG",
delete_file_if_empty: Optional[bool] = None,
path_to_output: Optional[str] = None,
allow_dirty_ast: Optional[bool] = None,
should_validate: Optional[bool] = None,
experiment_dyn: Optional[bool] = None,
):
"""
Constructs `PiranhaArguments`
Parameters
------------
language: PiranhaLanguage
the target language
paths_to_codebase: List[str]
Paths to source code folder or file
keyword arguments: _
substitutions (dict): Substitutions to instantiate the initial set of rules
path_to_configurations (str): Directory containing the configuration files - `piranha_arguments.toml`, `rules.toml`, and `edges.toml`
rule_graph (RuleGraph): The rule graph constructed via RuleGraph DSL
code_snippet (str): The input code snippet to transform
dry_run (bool): Disables in-place rewriting of code
cleanup_comments (bool): Enables deletion of associated comments
cleanup_comments_buffer (int): The number of lines to consider for cleaning up the comments
number_of_ancestors_in_parent_scope (int): The number of ancestors considered when PARENT rules
delete_consecutive_new_lines (bool): Replaces consecutive \ns with a \n
global_tag_prefix (str): the prefix for global tags
delete_file_if_empty (bool): User option that determines whether an empty file will be deleted
path_to_output (str): Path to the output json file
allow_dirty_ast (bool): Allows syntax errors in the input source code
"""
...
class PiranhaOutputSummary:
"""
A class to represent Piranha's output
Attributes
----------
path: path to the file
content: content of the file after all the rewrites
matches: All the occurrences of "match-only" rules
rewrites: All the applied edits
"""
path: str
"path to the file"
original_content: str
"Original content of the file before any rewrites"
content: str
"Final content of the file after all the rewrites"
matches: list[tuple[str, Match]]
'All the occurrences of "match-only" rules'
rewrites: list[Edit]
"All the applied edits"
class Edit:
"""
A class to represent an edit performed by Piranha
Attributes
----------
p_match: The match representing the target site of the edit
replacement_string: The string to replace the substring encompassed by the match
matched_rule: The rule used for creating this match-replace
"""
p_match: Match
"The match representing the target site of the edit"
matched_rule: str
"The rule used for creating this match-replace"
replacement_string: str
"The string to replace the substring encompassed by the match"
class Match:
"""
A class to represent a match
Attributes
----------
matched_sting: Code snippet that matched
range: Range of the entire AST node captured by the match
matches: The mapping between tags and string representation of the AST captured
"""
matched_string: str
"Code snippet that matched"
range: Range
"Range of the entire AST node captured by the match"
matches: dict[str, str]
"The mapping between tags and string representation of the AST captured"
""
class Range:
"""A range of positions in a multi-line text document,
both in terms of bytes and of rows and columns.
"""
start_byte: int
end_byte: int
start_point: Point
end_point: Point
class Point:
row: int
column: int
class Filter:
"""A class to capture filters of a Piranha Rule"""
enclosing_node: TSQuery
"AST patterns that some ancestor node of the primary match should comply"
not_contains: list[TSQuery]
"AST patterns that SHOULD NOT match any subtree of node matching `enclosing_node` pattern"
contains: TSQuery
"AST pattern that SHOULD match subtrees of `enclosing_node`. " "Number of matches should be within the range of `at_least` and `at_most`."
at_least: int
"The minimum number of times the contains query should match in the enclosing node"
at_most: int
"The maximum number of times the contains query should match in the enclosing node"
child_count: int
"Number of named children under the primary matched node"
sibling_count: int
"Number of named siblings of the primary matched node"
def __init__(
self,
enclosing_node: Optional[str] = None,
not_enclosing_node: Optional[str] = None,
not_contains: list[str] = [],
contains: Optional[str] = None,
at_least: int = 1,
at_most: int = 4294967295, # u32::MAX
child_count: int = 4294967295, # u32::MAX
sibling_count: int = 4294967295, # u32::MAX
):
"""
Constructs `Filter`
Parameters
------------
enclosing_node: str
AST patterns that some ancestor node of the primary match should comply
not_contains: list[str]
AST patterns that should not match any subtree of node matching `enclosing_node` pattern
"""
...
class Rule:
"""A class to capture Piranha Rule"""
name: str
"Name of the rule"
query: TSQuery
"Tree-sitter query as string"
replace_node: str
"The tag corresponding to the node to be replaced"
replace_node_idx: str
"The i'th child of node corresponding to the replace_node tag will be replaced"
replace: str
"Replacement pattern"
groups: set[str]
"Group(s) to which the rule belongs"
holes: set[str]
"Holes that need to be filled, in order to instantiate a rule"
filters: set[Filter]
"Filters to test before applying a rule"
is_seed_rule: bool
"Marks a rule as a seed rule"
def __init__(
self,
name: str,
query: Optional[str] = None,
replace_node: Optional[str] = None,
replace: Optional[str] = None,
groups: set[str] = set(),
holes: set[str] = set(),
filters: set[Filter] = set(),
is_seed_rule: bool = True,
):
"""
Constructs `Rule`
Parameters
------------
name: str
Name of the rule
query: str
Tree-sitter query as string
replace_node: str
The tag corresponding to the node to be replaced
replace: str
Replacement pattern
groups: set[str]
Group(s) to which the rule belongs
holes: set[str]
Holes that need to be filled, in order to instantiate a rule
filters: set[Filter]
Filters to test before applying a rule
is_seed_rule: bool
Marks a rule as a seed rule
"""
...
class OutgoingEdges:
frm: str
"The source rule or group of rules"
to: list[str]
"The target edges or groups of edges"
scope: str
"The scope label for the edge"
def __init__(
self,
frm: str,
to: list[str],
scope: str,
):
"""
Constructs `OutgoingEdge`
Parameters
------------
frm: str
The source rule or group of rules
to: list[str]
The target edges or groups of edges
scope: str
The scope label for the edge
"""
...
class RuleGraph:
rules: list[Rule]
"The rules in the graph"
edges: list[OutgoingEdges]
"The edges in the graph"
graph: dict[str, list[tuple[str, str]]]
"The graph itself (as an adjacency list)"
def __init__(
self,
rules: list[Rule],
edges: list[OutgoingEdges],
):
"""
Constructs `OutgoingEdge`
Parameters
------------
rules: list[Rule]
The rules in the graph
edges: list[OutgoingEdges]
The edges in the graph
"""
...
class TSQuery:
"Captures a Tree sitter query"
def query(self):
"""The query"""
...