1
+ // Imports from default node modules
1
2
const fs = require ( 'fs' ) ;
2
3
const http = require ( "https" ) ;
3
4
5
+ // This is the URL to the deployed wiki
4
6
const url = "https://github.com/Slimefun/Slimefun4/wiki/" ;
7
+
8
+ // The regular expression to check for links that lead to a wiki page
5
9
const regex = / \( h t t p s : \/ \/ g i t h u b \. c o m \/ S l i m e f u n \/ S l i m e f u n 4 \/ w i k i \/ [ A - Z a - z - ] + \) / g;
6
10
7
11
/**
@@ -14,13 +18,18 @@ const options = {
14
18
path : "/repos/Slimefun/Wiki/issues/2" ,
15
19
headers : {
16
20
"User-Agent" : "Slimefun Wiki Action" ,
17
- "authorization" : " token " + process . env . ACCESS_TOKEN ,
21
+ "authorization" : ` token ${ process . env . ACCESS_TOKEN } ` ,
18
22
"content-type" : "application/x-www-form-urlencoded"
19
23
}
20
24
}
21
25
26
+ // The queue of file scan tasks
22
27
const queue = [ ] ;
28
+
29
+ // All found pages
23
30
const pages = [ ] ;
31
+
32
+ // All missing pages
24
33
const missing = [ ] ;
25
34
26
35
// This is our placeholder text for any page that is missing
@@ -36,25 +45,13 @@ https://github.com/Slimefun/Slimefun4/wiki/Expanding-the-Wiki
36
45
37
46
` ;
38
47
39
- // Read the contents of our /pages/ directory.
48
+ // Read the contents of our /pages/ directory as an async promise
40
49
fs . promises . readdir ( "pages" ) . then ( files => {
50
+
51
+ // Loop through all files in that directory
41
52
for ( let i in files ) {
42
- // Queue another task to find the file
43
- queue . push ( fs . promises . readFile ( "pages/" + files [ i ] , "UTF-8" ) . then ( content => {
44
- let match ;
45
-
46
- // Continue as long as a match can be found
47
- do {
48
- // Update our variable
49
- match = regex . exec ( content ) ;
50
-
51
- // If we found a match, handle it
52
- if ( match ) {
53
- let page = match [ 0 ] . substring ( 1 + url . length , match [ 0 ] . length - 1 ) ;
54
- findFile ( `${ page } .md` ) ;
55
- }
56
- } while ( match ) ;
57
- } ) ) ;
53
+ // Queue another task to find linked pages
54
+ queue . push ( fs . promises . readFile ( `pages/${ files [ i ] } ` , "UTF-8" ) . then ( scanFile ) ) ;
58
55
}
59
56
60
57
// Finish working off the queue and evaluate afterwards
@@ -103,6 +100,31 @@ fs.promises.readdir("pages").then(files => {
103
100
} ) ;
104
101
} ) ;
105
102
103
+ /**
104
+ * This method scans the given input for links to wiki pages.
105
+ *
106
+ * @param {string } content The file content
107
+ */
108
+ function scanFile ( content ) {
109
+ let match ;
110
+
111
+ // This scans the document for any linked pages.
112
+ // This continues as long as there are matches for our regular expression
113
+ do {
114
+ // Update our match variable
115
+ match = regex . exec ( content ) ;
116
+
117
+ // If we found a match, handle it
118
+ if ( match ) {
119
+ // We will crop out the url portion of the match
120
+ let page = match [ 0 ] . substring ( 1 + url . length , match [ 0 ] . length - 1 ) ;
121
+
122
+ // Start an attempt to find the .md file for the linked page
123
+ findFile ( `${ page } .md` ) ;
124
+ }
125
+ } while ( match ) ;
126
+ }
127
+
106
128
/**
107
129
* This method attempts to find the given page.
108
130
* If the page could not be found, it will push it to the missing pages array.
0 commit comments