From 367321e331e7daa0b5a21e06c5b6a07d60ca3f4b Mon Sep 17 00:00:00 2001 From: Ankita Sahu <71656941+SAHU-01@users.noreply.github.com> Date: Mon, 30 Sep 2024 12:38:19 +0530 Subject: [PATCH 01/11] [UI] Dynamically addign frontmatter to documentation pages based on pricing-list workflow Signed-off-by: Ankita Sahu <71656941+SAHU-01@users.noreply.github.com> --- .github/workflows/pricing-list.yml | 49 +++ assets/scss/_styles_project.scss | 141 +++++---- layouts/shortcodes/feature-support.html | 22 ++ update_frontmatter.go | 398 ++++++++++++++++++++++++ 4 files changed, 550 insertions(+), 60 deletions(-) create mode 100644 .github/workflows/pricing-list.yml create mode 100644 layouts/shortcodes/feature-support.html create mode 100644 update_frontmatter.go diff --git a/.github/workflows/pricing-list.yml b/.github/workflows/pricing-list.yml new file mode 100644 index 00000000..7ccea2f9 --- /dev/null +++ b/.github/workflows/pricing-list.yml @@ -0,0 +1,49 @@ +name: Update Pricing List and Front Matter + +on: + workflow_dispatch: + inputs: + spreadsheet_uri: + description: 'Link of the spreadsheet containing subscription details.' + type: string + required: true + +jobs: + update-pricing-and-frontmatter: + runs-on: ubuntu-latest + steps: + - name: Trigger Pricing List workflow via GitHub API + run: | + curl -X POST \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: token ${{ secrets.GH_ACCESS_TOKEN }}" \ + https://api.github.com/repos/layer5labs/meshery-extensions-packages/actions/workflows/generate-pricing-list.yml/dispatches \ + -d '{"ref":"master", "inputs":{"spreadsheet_uri":"${{ inputs.spreadsheet_uri }}"}}' + + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Setup Go + uses: actions/setup-go@v2 + with: + go-version: '1.17' # Specify the Go version you need + + - name: Wait for Pricing List workflow to complete + run: | + # You may need to implement a polling mechanism here to check the status of the other workflow + # For simplicity, we'll just wait for a fixed amount of time + sleep 60 # Adjust this time based on how long the pricing list workflow typically takes + + - name: Pull latest changes + run: | + git pull origin master + + - name: Run update_frontmatter.go + run: go run update_frontmatter.go + + - name: Commit and push if there are changes + run: | + git config --local user.email "action@github.com" + git config --local user.name "GitHub Action" + git add . + git diff --quiet && git diff --staged --quiet || (git commit -m "Update front matter based on new pricing data" && git push) \ No newline at end of file diff --git a/assets/scss/_styles_project.scss b/assets/scss/_styles_project.scss index a3727ec8..071759f3 100644 --- a/assets/scss/_styles_project.scss +++ b/assets/scss/_styles_project.scss @@ -37,13 +37,10 @@ .nav-link.active { border-width: 1px; border-style: solid; - border-image: linear-gradient( - to bottom, + border-image: linear-gradient(to bottom, rgba($dark, 0.2) 30%, rgba($primary, 0.3) 60%, - $primary 90% 100% - ) - 1; + $primary 90% 100%) 1; padding-bottom: 0.3rem; align-items: center; justify-content: center; @@ -83,7 +80,7 @@ body { // Inline code p code, - li > code, + li>code, table code { color: inherit; padding: 0.2em 0.4em; @@ -104,7 +101,7 @@ body { background-color: $gray-900; padding: $spacer; - > code { + >code { background-color: inherit !important; padding: 0; margin: 0; @@ -205,14 +202,12 @@ a:not([href]):not([class]):hover { } .td-sidebar { - background-image: linear-gradient( - to top, - #1e2117, - #1d1912, - #18120e, - #0f0a09, - #000000 - ); + background-image: linear-gradient(to top, + #1e2117, + #1d1912, + #18120e, + #0f0a09, + #000000); position: sticky; height: calc(100vh - 5.5rem); top: 5.5rem; @@ -282,20 +277,14 @@ a:not([href]):not([class]):hover { &.active:not(.tree-root) { border-width: 1px; border-style: solid; - border-image: linear-gradient( - to left, + border-image: linear-gradient(to left, rgba($dark, 0) 30%, rgba($primary, 0.3) 60%, - $primary 90% 100% - ) - 1; - background-image: linear-gradient( - to left, + $primary 90% 100%) 1; + background-image: linear-gradient(to left, rgba($dark, 0.2) 30%, rgba($primary, 0.3) 60%, - $primary 90% 100% - ) - 1; + $primary 90% 100%) 1; padding: 0.25rem; padding-left: 0.5rem !important; // background-image: linear-gradient(to left, rgba($dark,.33),rgba($dark,.5),rgba($dark,.75),#1e2117, #31412b, #3b6447, #378b6d, #00b39f); @@ -317,14 +306,12 @@ a:not([href]):not([class]):hover { .td-sidebar-toc { line-height: 1.25rem; border-left: 1px solid $border-color; - background-image: linear-gradient( - to top, - #1e2117, - #1d1912, - #18120e, - #0f0a09, - #000000 - ); + background-image: linear-gradient(to top, + #1e2117, + #1d1912, + #18120e, + #0f0a09, + #000000); @supports (position: sticky) { position: sticky; @@ -381,6 +368,50 @@ a:not([href]):not([class]):hover { border-color: #00b39f; } +// front-matter +.matterinfo { + font-weight: $font-weight-medium; + background: $black; + font-family: "Open Sans"; + border-style: solid; + margin: 2rem auto; + padding: 1rem; + border-color: #00b39f; + border-radius: 10px; +} + +.heading { + font-size: 1.2rem; + color: #00b39f; +} + +.matterheader { + font-size: 1.2rem; + color: #00b39f; +} + +.matterinfo .plan-support { + display: flex; + align-items: center; +} + +.matterinfo .plan-support .support-icon { + width: 20px; + height: 20px; + margin-right: 10px; +} + +.plan-icon { + display: inline-block; +} + +.tier { + color: #00b39f; + font-weight: bold; + padding: 0 5px; +} + + // Style alert boxes. .alert { @@ -407,12 +438,12 @@ a:not([href]):not([class]):hover { transition: color 0.8s; transition: background-color 0.8s; - > img { + >img { width: 2rem; margin-right: 0.5rem; } - > img:hover { + >img:hover { filter: brightness(0) invert(1); } @@ -426,21 +457,15 @@ a:not([href]):not([class]):hover { margin-bottom: 4rem; font-size: 5rem; text-align: left; - background: linear-gradient( - 217deg, + background: linear-gradient(217deg, rgba(249, 245, 13, 0.26), - rgba(223, 255, 219, 0.57) 9.03% - ), - linear-gradient( - 127deg, + rgba(223, 255, 219, 0.57) 9.03%), + linear-gradient(127deg, rgba(249, 245, 13, 0.7), - rgba(223, 225, 199, 0.1) 41.12% - ), - linear-gradient( - 336deg, + rgba(223, 225, 199, 0.1) 41.12%), + linear-gradient(336deg, rgba(249, 245, 13, 0.1), - rgba(239, 239, 251, 0.81) 0% - ); + rgba(239, 239, 251, 0.81) 0%); background-position: 0% 0%, 0% 0%; @@ -465,11 +490,9 @@ a:not([href]):not([class]):hover { transform: rotate(-55.68deg); flex-shrink: 0; overflow: hidden; - background-image: linear-gradient( - 180deg, - rgba(0, 179, 115, 0) 0%, - rgba(0, 179, 159, 0.3) 100% - ); + background-image: linear-gradient(180deg, + rgba(0, 179, 115, 0) 0%, + rgba(0, 179, 159, 0.3) 100%); position: absolute; top: -18rem; right: -32rem; @@ -498,15 +521,13 @@ a:not([href]):not([class]):hover { left: -24rem; overflow: hidden; - > .dash-ircle { + >.dash-ircle { width: 74.125rem; height: 74.125rem; flex-shrink: 0; - background: radial-gradient( - 50% 50% at 50% 50%, - rgba(0, 179, 159, 0.2) 0%, - rgba(0, 179, 159, 0) 100% - ); + background: radial-gradient(50% 50% at 50% 50%, + rgba(0, 179, 159, 0.2) 0%, + rgba(0, 179, 159, 0) 100%); position: absolute; overflow: hidden; background-clip: border-box; @@ -643,7 +664,7 @@ a:not([href]):not([class]):hover { gap: 3rem; } -.tab-content > .active { +.tab-content>.active { display: flex; } @@ -653,4 +674,4 @@ a:not([href]):not([class]):hover { .csvtable { width: 100%; -} +} \ No newline at end of file diff --git a/layouts/shortcodes/feature-support.html b/layouts/shortcodes/feature-support.html new file mode 100644 index 00000000..ea26dd15 --- /dev/null +++ b/layouts/shortcodes/feature-support.html @@ -0,0 +1,22 @@ +{{ $feature_name := .Get "feature_name" }} +{{ $pricingDataJson := getJSON "pricing_data.json" }} + +{{ if $pricingDataJson }} +{{ $feature := index (where $pricingDataJson "entire_row.Feature" $feature_name) 0 }} + +
Feature information not found
+ {{ end }} +Error: Could not load pricing data
+{{ end }} \ No newline at end of file diff --git a/update_frontmatter.go b/update_frontmatter.go new file mode 100644 index 00000000..3b83f5f3 --- /dev/null +++ b/update_frontmatter.go @@ -0,0 +1,398 @@ +// package main + +// import ( +// "encoding/json" +// "fmt" +// "io/ioutil" +// "os" +// "path/filepath" +// "strings" +// ) + +// type PricingData struct { +// Documentation string `json:"documentation"` +// EntireRow struct { +// Feature string `json:"Feature"` +// SubscriptionTier string `json:"Subscription Tier"` +// } `json:"entire_row"` +// } + +// func main() { +// // Read the JSON file +// jsonFile, err := ioutil.ReadFile("pricing_data.json") // Ensure correct filename +// if err != nil { +// fmt.Println("Error reading JSON file:", err) +// return +// } + +// var pricingData []PricingData +// err = json.Unmarshal(jsonFile, &pricingData) +// if err != nil { +// fmt.Println("Error parsing JSON:", err) +// return +// } + +// // Iterate over the JSON data +// for _, entry := range pricingData { +// if strings.HasPrefix(entry.Documentation, "https://docs.layer5.io/") { +// fmt.Println("\n--- Processing documentation URL ---") +// fmt.Println("Documentation URL:", entry.Documentation) + +// // Remove the base URL and split by "/" +// urlParts := strings.Split(strings.TrimPrefix(entry.Documentation, "https://docs.layer5.io/"), "/") + +// // Initialize folder path to "content/en/" +// folderPath := "content/en" +// fmt.Println("Starting folder path:", folderPath) + +// // Build the path step by step +// for _, part := range urlParts { +// if part == "" || strings.HasPrefix(part, "#") { +// // Stop processing at a "#" or empty part (end of path) +// fmt.Println("Encountered fragment or end of URL part:", part) +// break +// } +// // Append each valid part to the folder path +// folderPath = filepath.Join(folderPath, part) +// fmt.Println("Updated folder path:", folderPath) +// } + +// // Target the _index.md file inside the last folder +// filePath := filepath.Join(folderPath, "_index.md") +// fmt.Println("Targeting _index.md file:", filePath) + +// // Check if _index.md exists, otherwise create it +// if _, err := os.Stat(filePath); err == nil { +// // Read the existing file content +// fmt.Println("Found _index.md, reading file...") +// content, err := ioutil.ReadFile(filePath) +// if err != nil { +// fmt.Printf("Error reading file %s: %v\n", filePath, err) +// continue +// } + +// // Prepare the shortcode to be added +// shortcode := fmt.Sprintf(`{{< feature-support feature_name="%s" >}}`, entry.EntireRow.Feature) + +// // Find the position of the ending `---` after the front matter +// contentStr := string(content) +// separatorPos := strings.Index(contentStr, "---") +// if separatorPos != -1 { +// // Find the second `---` that closes the front matter +// separatorEndPos := strings.Index(contentStr[separatorPos+3:], "---") +// if separatorEndPos != -1 { +// separatorEndPos += separatorPos + 3 // Adjust the position +// } + +// // Insert only the shortcode after the closing `---` +// newContent := contentStr[:separatorEndPos+3] + "\n" + shortcode + "\n" + contentStr[separatorEndPos+3:] +// fmt.Println("Writing updated content with shortcode...") +// err = ioutil.WriteFile(filePath, []byte(newContent), 0644) +// if err != nil { +// fmt.Printf("Error writing to file %s: %v\n", filePath, err) +// } else { +// fmt.Printf("Updated file with shortcode for %s\n", filePath) +// } +// } else { +// fmt.Printf("No front matter found in file %s\n", filePath) +// } +// } else if os.IsNotExist(err) { +// // File doesn't exist, create the _index.md file with only shortcode +// fmt.Printf("Creating %s with new shortcode...\n", filePath) +// shortcode := fmt.Sprintf(`{{< feature-support feature_name="%s" >}}`, entry.EntireRow.Feature) +// newContent := fmt.Sprintf("%s\n", shortcode) +// err = ioutil.WriteFile(filePath, []byte(newContent), 0644) +// if err != nil { +// fmt.Printf("Error creating file %s: %v\n", filePath, err) +// } else { +// fmt.Printf("Created and updated file with shortcode for %s\n", filePath) +// } +// } else { +// // If there's an error other than the file not existing, log it +// fmt.Printf("Error checking file %s: %v\n", filePath, err) +// } +// } +// } +// } + +// // Convert a map to JSON string format +// func toJSON(m map[string]string) string { +// d, err := json.MarshalIndent(m, "", " ") +// if err != nil { +// return "" +// } +// return string(d) +// } + + +// package main + +// import ( +// "encoding/json" +// "fmt" +// "io/ioutil" +// "os" +// "path/filepath" +// "regexp" +// "strings" +// ) + +// type PricingData struct { +// Documentation string `json:"documentation"` +// EntireRow struct { +// Feature string `json:"Feature"` +// SubscriptionTier string `json:"Subscription Tier"` +// } `json:"entire_row"` +// } + +// func main() { +// // Read the JSON file +// jsonFile, err := ioutil.ReadFile("pricing_data.json") +// if err != nil { +// fmt.Println("Error reading JSON file:", err) +// return +// } + +// var pricingData []PricingData +// err = json.Unmarshal(jsonFile, &pricingData) +// if err != nil { +// fmt.Println("Error parsing JSON:", err) +// return +// } + +// // Iterate over the JSON data +// for _, entry := range pricingData { +// if strings.HasPrefix(entry.Documentation, "https://docs.layer5.io/") { +// fmt.Println("\n--- Processing documentation URL ---") +// fmt.Println("Documentation URL:", entry.Documentation) + +// // Remove the base URL and split by "/" +// urlParts := strings.Split(strings.TrimPrefix(entry.Documentation, "https://docs.layer5.io/"), "/") + +// // Initialize folder path to "content/en/" +// folderPath := "content/en" +// fmt.Println("Starting folder path:", folderPath) + +// // Extract hash if present +// urlHash := "" +// for i, part := range urlParts { +// if strings.HasPrefix(part, "#") { +// urlHash = strings.TrimPrefix(part, "#") +// urlParts = urlParts[:i] +// break +// } +// } + +// // Build the path step by step +// for _, part := range urlParts { +// if part == "" { +// continue +// } +// folderPath = filepath.Join(folderPath, part) +// fmt.Println("Updated folder path:", folderPath) +// } + +// // Target the _index.md file inside the last folder +// filePath := filepath.Join(folderPath, "_index.md") +// fmt.Println("Targeting _index.md file:", filePath) + +// // Process the file +// processFile(filePath, entry, urlHash) +// } +// } +// } + +// func processFile(filePath string, entry PricingData, urlHash string) { +// content, err := ioutil.ReadFile(filePath) +// if err != nil { +// if os.IsNotExist(err) { +// fmt.Printf("File %s does not exist, creating new file\n", filePath) +// content = []byte{} +// } else { +// fmt.Printf("Error reading file %s: %v\n", filePath, err) +// return +// } +// } + +// shortcode := fmt.Sprintf(`{{< feature-support feature_name="%s" >}}`, entry.EntireRow.Feature) +// contentStr := string(content) +// var newContent string + +// if urlHash != "" { +// // Convert hash to a flexible regex pattern +// hashPattern := strings.ReplaceAll(urlHash, "-", "[-\\s]") +// hashRegex := regexp.MustCompile(`(?im)^#+\s*` + hashPattern) + +// // Find the position of the hash in the content +// hashMatch := hashRegex.FindStringIndex(contentStr) +// if hashMatch != nil { +// // Find the end of the line where the heading is +// endOfLine := strings.Index(contentStr[hashMatch[1]:], "\n") +// if endOfLine == -1 { +// endOfLine = len(contentStr) +// } else { +// endOfLine += hashMatch[1] +// } + +// // Insert the shortcode just below the matching heading +// newContent = contentStr[:endOfLine] + "\n\n" + shortcode + "\n" + contentStr[endOfLine:] +// fmt.Printf("Added shortcode below matching heading for %s\n", filePath) +// } else { +// fmt.Printf("Hash '%s' not found in file %s, adding shortcode at the end\n", urlHash, filePath) +// newContent = contentStr + "\n\n" + shortcode +// } +// } else { +// // If no hash, add to the end of the file +// newContent = contentStr + "\n\n" + shortcode +// fmt.Printf("No hash provided, added shortcode at the end of %s\n", filePath) +// } + +// err = ioutil.WriteFile(filePath, []byte(newContent), 0644) +// if err != nil { +// fmt.Printf("Error writing to file %s: %v\n", filePath, err) +// } else { +// fmt.Printf("Updated file with shortcode for %s\n", filePath) +// } +// } + +package main + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "regexp" + "strings" +) + +type PricingData struct { + Documentation string `json:"documentation"` + EntireRow struct { + Feature string `json:"Feature"` + SubscriptionTier string `json:"Subscription Tier"` + } `json:"entire_row"` +} + +func main() { + // Read the JSON file + jsonFile, err := ioutil.ReadFile("pricing_data.json") + if err != nil { + fmt.Println("Error reading JSON file:", err) + return + } + + var pricingData []PricingData + err = json.Unmarshal(jsonFile, &pricingData) + if err != nil { + fmt.Println("Error parsing JSON:", err) + return + } + + // Iterate over the JSON data + for _, entry := range pricingData { + if strings.HasPrefix(entry.Documentation, "https://docs.layer5.io/") { + fmt.Println("\n--- Processing documentation URL ---") + fmt.Println("Documentation URL:", entry.Documentation) + + // Remove the base URL and split by "/" + urlParts := strings.Split(strings.TrimPrefix(entry.Documentation, "https://docs.layer5.io/"), "/") + + // Initialize folder path to "content/en/" + folderPath := "content/en" + fmt.Println("Starting folder path:", folderPath) + + // Extract hash if present + urlHash := "" + for i, part := range urlParts { + if strings.HasPrefix(part, "#") { + urlHash = strings.TrimPrefix(part, "#") + urlParts = urlParts[:i] + break + } + } + + // Build the path step by step + for _, part := range urlParts { + if part == "" { + continue + } + folderPath = filepath.Join(folderPath, part) + fmt.Println("Updated folder path:", folderPath) + } + + // Target the _index.md file inside the last folder + filePath := filepath.Join(folderPath, "_index.md") + fmt.Println("Targeting _index.md file:", filePath) + + // Process the file + processFile(filePath, entry, urlHash) + } + } +} + +func processFile(filePath string, entry PricingData, urlHash string) { + content, err := ioutil.ReadFile(filePath) + if err != nil { + if os.IsNotExist(err) { + fmt.Printf("File %s does not exist, creating new file\n", filePath) + content = []byte{} + } else { + fmt.Printf("Error reading file %s: %v\n", filePath, err) + return + } + } + + shortcode := fmt.Sprintf(`{{< feature-support feature_name="%s" >}}`, entry.EntireRow.Feature) + contentStr := string(content) + var newContent string + + if urlHash == "" { + // No hash: add shortcode after the front matter + frontMatterEnd := strings.Index(contentStr, "---") + if frontMatterEnd != -1 { + frontMatterEnd = strings.Index(contentStr[frontMatterEnd+3:], "---") + if frontMatterEnd != -1 { + frontMatterEnd += 6 // Adjust for the second "---" + newContent = contentStr[:frontMatterEnd] + "\n" + shortcode + "\n" + contentStr[frontMatterEnd:] + fmt.Printf("Added shortcode after front matter in %s\n", filePath) + } else { + // If second "---" not found, add to the end + newContent = contentStr + "\n\n" + shortcode + fmt.Printf("Front matter end not found, added shortcode at the end of %s\n", filePath) + } + } else { + // If no front matter found, add to the beginning + newContent = shortcode + "\n\n" + contentStr + fmt.Printf("No front matter found, added shortcode at the beginning of %s\n", filePath) + } + } else { + // Hash present: search for matching heading and add shortcode below + hashPattern := strings.ReplaceAll(urlHash, "-", "[-\\s]") + hashRegex := regexp.MustCompile(`(?im)^#+\s*` + hashPattern) + + hashMatch := hashRegex.FindStringIndex(contentStr) + if hashMatch != nil { + endOfLine := strings.Index(contentStr[hashMatch[1]:], "\n") + if endOfLine == -1 { + endOfLine = len(contentStr) + } else { + endOfLine += hashMatch[1] + } + + newContent = contentStr[:endOfLine] + "\n\n" + shortcode + "\n" + contentStr[endOfLine:] + fmt.Printf("Added shortcode below matching heading for %s\n", filePath) + } else { + fmt.Printf("Hash '%s' not found in file %s, adding shortcode at the end\n", urlHash, filePath) + newContent = contentStr + "\n\n" + shortcode + } + } + + err = ioutil.WriteFile(filePath, []byte(newContent), 0644) + if err != nil { + fmt.Printf("Error writing to file %s: %v\n", filePath, err) + } else { + fmt.Printf("Updated file with shortcode for %s\n", filePath) + } +} \ No newline at end of file From 3cc1bd85a1e8355dec8abb7163a9b05215b6060d Mon Sep 17 00:00:00 2001 From: Ankita Sahu <71656941+SAHU-01@users.noreply.github.com> Date: Mon, 30 Sep 2024 17:54:39 +0530 Subject: [PATCH 02/11] removing unwanted commented code Signed-off-by: Ankita Sahu <71656941+SAHU-01@users.noreply.github.com> --- update_frontmatter.go | 257 ------------------------------------------ 1 file changed, 257 deletions(-) diff --git a/update_frontmatter.go b/update_frontmatter.go index 3b83f5f3..53241b62 100644 --- a/update_frontmatter.go +++ b/update_frontmatter.go @@ -1,260 +1,3 @@ -// package main - -// import ( -// "encoding/json" -// "fmt" -// "io/ioutil" -// "os" -// "path/filepath" -// "strings" -// ) - -// type PricingData struct { -// Documentation string `json:"documentation"` -// EntireRow struct { -// Feature string `json:"Feature"` -// SubscriptionTier string `json:"Subscription Tier"` -// } `json:"entire_row"` -// } - -// func main() { -// // Read the JSON file -// jsonFile, err := ioutil.ReadFile("pricing_data.json") // Ensure correct filename -// if err != nil { -// fmt.Println("Error reading JSON file:", err) -// return -// } - -// var pricingData []PricingData -// err = json.Unmarshal(jsonFile, &pricingData) -// if err != nil { -// fmt.Println("Error parsing JSON:", err) -// return -// } - -// // Iterate over the JSON data -// for _, entry := range pricingData { -// if strings.HasPrefix(entry.Documentation, "https://docs.layer5.io/") { -// fmt.Println("\n--- Processing documentation URL ---") -// fmt.Println("Documentation URL:", entry.Documentation) - -// // Remove the base URL and split by "/" -// urlParts := strings.Split(strings.TrimPrefix(entry.Documentation, "https://docs.layer5.io/"), "/") - -// // Initialize folder path to "content/en/" -// folderPath := "content/en" -// fmt.Println("Starting folder path:", folderPath) - -// // Build the path step by step -// for _, part := range urlParts { -// if part == "" || strings.HasPrefix(part, "#") { -// // Stop processing at a "#" or empty part (end of path) -// fmt.Println("Encountered fragment or end of URL part:", part) -// break -// } -// // Append each valid part to the folder path -// folderPath = filepath.Join(folderPath, part) -// fmt.Println("Updated folder path:", folderPath) -// } - -// // Target the _index.md file inside the last folder -// filePath := filepath.Join(folderPath, "_index.md") -// fmt.Println("Targeting _index.md file:", filePath) - -// // Check if _index.md exists, otherwise create it -// if _, err := os.Stat(filePath); err == nil { -// // Read the existing file content -// fmt.Println("Found _index.md, reading file...") -// content, err := ioutil.ReadFile(filePath) -// if err != nil { -// fmt.Printf("Error reading file %s: %v\n", filePath, err) -// continue -// } - -// // Prepare the shortcode to be added -// shortcode := fmt.Sprintf(`{{< feature-support feature_name="%s" >}}`, entry.EntireRow.Feature) - -// // Find the position of the ending `---` after the front matter -// contentStr := string(content) -// separatorPos := strings.Index(contentStr, "---") -// if separatorPos != -1 { -// // Find the second `---` that closes the front matter -// separatorEndPos := strings.Index(contentStr[separatorPos+3:], "---") -// if separatorEndPos != -1 { -// separatorEndPos += separatorPos + 3 // Adjust the position -// } - -// // Insert only the shortcode after the closing `---` -// newContent := contentStr[:separatorEndPos+3] + "\n" + shortcode + "\n" + contentStr[separatorEndPos+3:] -// fmt.Println("Writing updated content with shortcode...") -// err = ioutil.WriteFile(filePath, []byte(newContent), 0644) -// if err != nil { -// fmt.Printf("Error writing to file %s: %v\n", filePath, err) -// } else { -// fmt.Printf("Updated file with shortcode for %s\n", filePath) -// } -// } else { -// fmt.Printf("No front matter found in file %s\n", filePath) -// } -// } else if os.IsNotExist(err) { -// // File doesn't exist, create the _index.md file with only shortcode -// fmt.Printf("Creating %s with new shortcode...\n", filePath) -// shortcode := fmt.Sprintf(`{{< feature-support feature_name="%s" >}}`, entry.EntireRow.Feature) -// newContent := fmt.Sprintf("%s\n", shortcode) -// err = ioutil.WriteFile(filePath, []byte(newContent), 0644) -// if err != nil { -// fmt.Printf("Error creating file %s: %v\n", filePath, err) -// } else { -// fmt.Printf("Created and updated file with shortcode for %s\n", filePath) -// } -// } else { -// // If there's an error other than the file not existing, log it -// fmt.Printf("Error checking file %s: %v\n", filePath, err) -// } -// } -// } -// } - -// // Convert a map to JSON string format -// func toJSON(m map[string]string) string { -// d, err := json.MarshalIndent(m, "", " ") -// if err != nil { -// return "" -// } -// return string(d) -// } - - -// package main - -// import ( -// "encoding/json" -// "fmt" -// "io/ioutil" -// "os" -// "path/filepath" -// "regexp" -// "strings" -// ) - -// type PricingData struct { -// Documentation string `json:"documentation"` -// EntireRow struct { -// Feature string `json:"Feature"` -// SubscriptionTier string `json:"Subscription Tier"` -// } `json:"entire_row"` -// } - -// func main() { -// // Read the JSON file -// jsonFile, err := ioutil.ReadFile("pricing_data.json") -// if err != nil { -// fmt.Println("Error reading JSON file:", err) -// return -// } - -// var pricingData []PricingData -// err = json.Unmarshal(jsonFile, &pricingData) -// if err != nil { -// fmt.Println("Error parsing JSON:", err) -// return -// } - -// // Iterate over the JSON data -// for _, entry := range pricingData { -// if strings.HasPrefix(entry.Documentation, "https://docs.layer5.io/") { -// fmt.Println("\n--- Processing documentation URL ---") -// fmt.Println("Documentation URL:", entry.Documentation) - -// // Remove the base URL and split by "/" -// urlParts := strings.Split(strings.TrimPrefix(entry.Documentation, "https://docs.layer5.io/"), "/") - -// // Initialize folder path to "content/en/" -// folderPath := "content/en" -// fmt.Println("Starting folder path:", folderPath) - -// // Extract hash if present -// urlHash := "" -// for i, part := range urlParts { -// if strings.HasPrefix(part, "#") { -// urlHash = strings.TrimPrefix(part, "#") -// urlParts = urlParts[:i] -// break -// } -// } - -// // Build the path step by step -// for _, part := range urlParts { -// if part == "" { -// continue -// } -// folderPath = filepath.Join(folderPath, part) -// fmt.Println("Updated folder path:", folderPath) -// } - -// // Target the _index.md file inside the last folder -// filePath := filepath.Join(folderPath, "_index.md") -// fmt.Println("Targeting _index.md file:", filePath) - -// // Process the file -// processFile(filePath, entry, urlHash) -// } -// } -// } - -// func processFile(filePath string, entry PricingData, urlHash string) { -// content, err := ioutil.ReadFile(filePath) -// if err != nil { -// if os.IsNotExist(err) { -// fmt.Printf("File %s does not exist, creating new file\n", filePath) -// content = []byte{} -// } else { -// fmt.Printf("Error reading file %s: %v\n", filePath, err) -// return -// } -// } - -// shortcode := fmt.Sprintf(`{{< feature-support feature_name="%s" >}}`, entry.EntireRow.Feature) -// contentStr := string(content) -// var newContent string - -// if urlHash != "" { -// // Convert hash to a flexible regex pattern -// hashPattern := strings.ReplaceAll(urlHash, "-", "[-\\s]") -// hashRegex := regexp.MustCompile(`(?im)^#+\s*` + hashPattern) - -// // Find the position of the hash in the content -// hashMatch := hashRegex.FindStringIndex(contentStr) -// if hashMatch != nil { -// // Find the end of the line where the heading is -// endOfLine := strings.Index(contentStr[hashMatch[1]:], "\n") -// if endOfLine == -1 { -// endOfLine = len(contentStr) -// } else { -// endOfLine += hashMatch[1] -// } - -// // Insert the shortcode just below the matching heading -// newContent = contentStr[:endOfLine] + "\n\n" + shortcode + "\n" + contentStr[endOfLine:] -// fmt.Printf("Added shortcode below matching heading for %s\n", filePath) -// } else { -// fmt.Printf("Hash '%s' not found in file %s, adding shortcode at the end\n", urlHash, filePath) -// newContent = contentStr + "\n\n" + shortcode -// } -// } else { -// // If no hash, add to the end of the file -// newContent = contentStr + "\n\n" + shortcode -// fmt.Printf("No hash provided, added shortcode at the end of %s\n", filePath) -// } - -// err = ioutil.WriteFile(filePath, []byte(newContent), 0644) -// if err != nil { -// fmt.Printf("Error writing to file %s: %v\n", filePath, err) -// } else { -// fmt.Printf("Updated file with shortcode for %s\n", filePath) -// } -// } - package main import ( From c4e560135602315486ba4d34c836f62585eb993e Mon Sep 17 00:00:00 2001 From: Lee CalcoteFeature information not found
- {{ end }} -Error: Could not load pricing data
-{{ end }} \ No newline at end of file diff --git a/update_frontmatter.go b/update_frontmatter.go deleted file mode 100644 index 0a74a5b9..00000000 --- a/update_frontmatter.go +++ /dev/null @@ -1,179 +0,0 @@ -package main - -import ( - "encoding/json" - "fmt" - "io/ioutil" - "os" - "path/filepath" - "regexp" - "strings" -) - -type PricingData struct { - Documentation string `json:"documentation"` - EntireRow struct { - Feature string `json:"Feature"` - SubscriptionTier string `json:"Subscription Tier"` - } `json:"entire_row"` -} - -func main() { - // Read the JSON file - jsonFile, err := ioutil.ReadFile("pricing_data.json") - if err != nil { - fmt.Println("Error reading JSON file:", err) - return - } - - var pricingData []PricingData - err = json.Unmarshal(jsonFile, &pricingData) - if err != nil { - fmt.Println("Error parsing JSON:", err) - return - } - - // Group entries by documentation URL - groupedEntries := make(map[string][]PricingData) - for _, entry := range pricingData { - if strings.HasPrefix(entry.Documentation, "https://docs.layer5.io/") { - groupedEntries[entry.Documentation] = append(groupedEntries[entry.Documentation], entry) - } - } - - // Process each group - for docURL, entries := range groupedEntries { - fmt.Println("\n--- Processing documentation URL ---") - fmt.Println("Documentation URL:", docURL) - - // Extract path and hash - urlParts, urlHash := extractHash(docURL) - - // Build the folder path - folderPath := buildPath("content/en", urlParts) - fmt.Println("Final folder path:", folderPath) - - // Target the _index.md file inside the last folder - filePath := filepath.Join(folderPath, "_index.md") - fmt.Println("Targeting _index.md file:", filePath) - - // Process the file - processFileWithMultipleEntries(filePath, entries, urlHash) - } -} - -func extractHash(url string) ([]string, string) { - urlParts := strings.Split(strings.TrimPrefix(url, "https://docs.layer5.io/"), "/") - var urlHash string - for i, part := range urlParts { - if strings.HasPrefix(part, "#") { - urlHash = strings.TrimPrefix(part, "#") - urlParts = urlParts[:i] - break - } - } - return urlParts, urlHash -} - -func buildPath(basePath string, parts []string) string { - for _, part := range parts { - if part == "" { - continue - } - basePath = filepath.Join(basePath, part) - } - return basePath -} - -func processFileWithMultipleEntries(filePath string, entries []PricingData, urlHash string) { - content, err := ioutil.ReadFile(filePath) - if err != nil { - if os.IsNotExist(err) { - fmt.Printf("File %s does not exist, creating new file\n", filePath) - content = []byte{} - } else { - fmt.Printf("Error reading file %s: %v\n", filePath, err) - return - } - } - - contentStr := string(content) - tierFeatures := make(map[string][]string) - - // Collect features for each tier - for _, entry := range entries { - tier := strings.ToLower(entry.EntireRow.SubscriptionTier) - tierFeatures[tier] = append(tierFeatures[tier], entry.EntireRow.Feature) - } - - // Process free tier - if freeFeatures, exists := tierFeatures["free"]; exists && len(freeFeatures) > 0 { - consolidatedFeature := strings.Join(freeFeatures, " ") - shortcode := fmt.Sprintf(`{{< feature-support feature_name="%s" >}}`, consolidatedFeature) - contentStr = insertConsolidatedShortcode(contentStr, shortcode, filePath) - } - - // Process non-free tiers - for tier, features := range tierFeatures { - if tier == "free" { - continue - } - var tierShortcodes []string - for _, feature := range features { - shortcode := fmt.Sprintf(`{{< feature-support feature_name="%s" >}}`, feature) - tierShortcodes = append(tierShortcodes, shortcode) - } - contentStr = insertShortcodesAfterHeadings(contentStr, tierShortcodes, urlHash, filePath) - } - - err = ioutil.WriteFile(filePath, []byte(contentStr), 0644) - if err != nil { - fmt.Printf("Error writing to file %s: %v\n", filePath, err) - } else { - fmt.Printf("Updated file with consolidated shortcodes for %s\n", filePath) - } -} - -func insertConsolidatedShortcode(contentStr, shortcode, filePath string) string { - // Remove any existing feature-support shortcodes - re := regexp.MustCompile(`(?s){{<\s*feature-support.*?>}}`) - contentStr = re.ReplaceAllString(contentStr, "") - - // Insert the consolidated shortcode after front matter - frontMatterEnd := strings.Index(contentStr, "---") - if frontMatterEnd != -1 { - frontMatterEnd = strings.Index(contentStr[frontMatterEnd+3:], "---") - if frontMatterEnd != -1 { - frontMatterEnd += 6 // Adjust for the second "---" - fmt.Printf("Added consolidated free shortcode after front matter in %s\n", filePath) - return contentStr[:frontMatterEnd] + "\n" + shortcode + "\n" + contentStr[frontMatterEnd:] - } - } - // If no front matter or second "---" found, insert at the beginning - fmt.Printf("Front matter end not found, added consolidated free shortcode at the beginning of %s\n", filePath) - return shortcode + "\n\n" + contentStr -} - -func insertShortcodesAfterHeadings(contentStr string, shortcodes []string, urlHash string, filePath string) string { - hashPattern := strings.ReplaceAll(urlHash, "-", "[-\\s]") - hashRegex := regexp.MustCompile(`(?im)^#+\s*` + hashPattern) - - hashMatch := hashRegex.FindStringIndex(contentStr) - if hashMatch != nil { - endOfLine := strings.Index(contentStr[hashMatch[1]:], "\n") - if endOfLine == -1 { - endOfLine = len(contentStr) - } else { - endOfLine += hashMatch[1] - } - shortcodesStr := strings.Join(shortcodes, "\n") - fmt.Printf("Added non-free shortcodes below matching heading for %s\n", filePath) - contentStr = contentStr[:endOfLine] + "\n\n" + shortcodesStr + "\n" + contentStr[endOfLine:] - } else { - // If hash not found, append at the end - fmt.Printf("Hash '%s' not found in file %s, adding non-free shortcodes at the end\n", urlHash, filePath) - shortcodesStr := strings.Join(shortcodes, "\n") - contentStr += "\n\n" + shortcodesStr - } - return contentStr -} \ No newline at end of file From 175d72ff6417ff36ab96e94ec6020a84f2d203b4 Mon Sep 17 00:00:00 2001 From: Ankita Sahu <71656941+SAHU-01@users.noreply.github.com> Date: Mon, 21 Oct 2024 17:00:26 +0530 Subject: [PATCH 09/11] requested changes updated Signed-off-by: Ankita Sahu <71656941+SAHU-01@users.noreply.github.com> --- .github/workflows/feature-list.yml | 7 ++++++- assets/scss/_styles_project.scss | 2 +- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/feature-list.yml b/.github/workflows/feature-list.yml index 812fb4f8..def6cf73 100644 --- a/.github/workflows/feature-list.yml +++ b/.github/workflows/feature-list.yml @@ -84,4 +84,9 @@ jobs: uses: stefanzweifel/git-auto-commit-action@v5 with: commit_message: Updated feature data - file_pattern: ${{ env.FEATURES_FILE }} \ No newline at end of file + file_pattern: ${{ env.FEATURES_FILE }} + branch: master + commit_options: "--signoff" + commit_user_name: l5io + commit_user_email: ci@layer5.io + commit_author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com> \ No newline at end of file diff --git a/assets/scss/_styles_project.scss b/assets/scss/_styles_project.scss index 071759f3..279f6894 100644 --- a/assets/scss/_styles_project.scss +++ b/assets/scss/_styles_project.scss @@ -372,7 +372,7 @@ a:not([href]):not([class]):hover { .matterinfo { font-weight: $font-weight-medium; background: $black; - font-family: "Open Sans"; + font-family: "Qanelas Soft"; border-style: solid; margin: 2rem auto; padding: 1rem; From 31e0f8276f58a0995cb81f707a7c40133d63b4e0 Mon Sep 17 00:00:00 2001 From: Ankita Sahu <71656941+SAHU-01@users.noreply.github.com> Date: Sun, 27 Oct 2024 23:32:25 +0530 Subject: [PATCH 10/11] simplyfing workflow logic Signed-off-by: Ankita Sahu <71656941+SAHU-01@users.noreply.github.com> --- .github/workflows/feature-list.yml | 109 +++++++++++------------- .github/workflows/new-feature-check.yml | 71 --------------- 2 files changed, 48 insertions(+), 132 deletions(-) delete mode 100644 .github/workflows/new-feature-check.yml diff --git a/.github/workflows/feature-list.yml b/.github/workflows/feature-list.yml index def6cf73..c1433cea 100644 --- a/.github/workflows/feature-list.yml +++ b/.github/workflows/feature-list.yml @@ -1,89 +1,76 @@ -name: Feature List +name: Feature List Update on: - workflow_dispatch: - inputs: - spreadsheet_uri: - description: 'Link of the spreadsheet containing subscription details.' - type: string - required: true + schedule: + - cron: '0 0 * * *' # Run every night at midnight UTC + workflow_dispatch: jobs: - update-feature-data: + check-and-update-features: runs-on: ubuntu-latest - env: - FEATURES_FILE: 'data/features.json' + env: + FEATURES_FILE: 'data/features.json' steps: - - name: Trigger Feature List workflow and wait for completion + - name: Checkout current repository + uses: actions/checkout@v4 + + - name: Check for updates in source repository + id: check-updates uses: actions/github-script@v6 with: github-token: ${{secrets.GH_ACCESS_TOKEN}} script: | - const result = await github.rest.actions.createWorkflowDispatch({ + const { data: sourceFile } = await github.rest.repos.getContent({ owner: 'layer5labs', repo: 'meshery-extensions-packages', - workflow_id: 'generate-feature-list.yml', - ref: 'master', - inputs: { - spreadsheet_uri: '${{ secrets.INPUT_SPREADSHEET_URI }}' - } + path: 'feature-data.json', + ref: 'master' + }); + + // Store the latest commit SHA + const latestSHA = sourceFile.sha; + + // Try to get the previously stored SHA from cache + const cache = await github.rest.actions.getActionsCacheList({ + owner: context.repo.owner, + repo: context.repo.repo, }); - console.log("Triggered workflow, waiting for completion..."); + let hasUpdates = true; + if (cache.data.actions_caches.length > 0) { + const lastSHA = cache.data.actions_caches[0].key.split('-').pop(); + hasUpdates = lastSHA !== latestSHA; + } - while (true) { - const runs = await github.rest.actions.listWorkflowRuns({ - owner: 'layer5labs', - repo: 'meshery-extensions-packages', - workflow_id: 'generate-feature-list.yml' + if (hasUpdates) { + // Update the cache with new SHA + await github.rest.actions.createActionsCacheEntry({ + owner: context.repo.owner, + repo: context.repo.repo, + key: `feature-data-sha-${latestSHA}`, + ref: context.ref, + cache_data: latestSHA }); - if (runs.data.workflow_runs[0].status === 'completed') { - console.log("Workflow completed"); - break; - } + // Decode and save the content + const content = Buffer.from(sourceFile.content, 'base64').toString('utf-8'); + const fs = require('fs'); + + // Create data directory if it doesn't exist + fs.mkdirSync('data', { recursive: true }); - console.log("Waiting for workflow to complete..."); - await new Promise(resolve => setTimeout(resolve, 30000)); + // Write the new content + fs.writeFileSync('${{ env.FEATURES_FILE }}', content); } - - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Create data directory if it doesn't exist - run: mkdir -p data - - - name: Download new feature data - uses: actions/github-script@v6 - with: - github-token: ${{secrets.GH_ACCESS_TOKEN}} - script: | - const fs = require('fs'); - const path = require('path'); - const { data } = await github.rest.repos.getContent({ - owner: 'layer5labs', - repo: 'meshery-extensions-packages', - path: 'feature-data.json' - }); - - const newContent = Buffer.from(data.content, 'base64').toString('utf-8'); - fs.writeFileSync('${{ env.FEATURES_FILE }}', newContent); - - - name: Update or create features.json - run: | - if [ -f "${{ env.FEATURES_FILE }}" ]; then - echo "Updating existing features.json" - else - echo "Creating new features.json" - cp "${{ env.FEATURES_FILE }}" data/features.json - fi + return hasUpdates; - name: Commit changes + if: steps.check-updates.outputs.result == 'true' uses: stefanzweifel/git-auto-commit-action@v5 with: - commit_message: Updated feature data + commit_message: "Updated feature data from source repository" file_pattern: ${{ env.FEATURES_FILE }} branch: master commit_options: "--signoff" diff --git a/.github/workflows/new-feature-check.yml b/.github/workflows/new-feature-check.yml deleted file mode 100644 index b4660e14..00000000 --- a/.github/workflows/new-feature-check.yml +++ /dev/null @@ -1,71 +0,0 @@ -name: Nightly Google Sheet Check - -on: - schedule: - - cron: '0 0 * * *' # Run every day at midnight UTC - -jobs: - check-for-updates: - runs-on: ubuntu-latest - steps: - - name: Check for Google Sheet updates - id: sheet-check - uses: actions/github-script@v6 - with: - github-token: ${{secrets.GH_ACCESS_TOKEN}} - script: | - const { GoogleSpreadsheet } = require('google-spreadsheet'); - - async function checkForUpdates() { - const doc = new GoogleSpreadsheet('${{ secrets.INPUT_SPREADSHEET_URI }}'); - await doc.useServiceAccountAuth({ - client_email: process.env.GOOGLE_SERVICE_ACCOUNT_EMAIL, - private_key: process.env.GOOGLE_PRIVATE_KEY, - }); - - await doc.loadInfo(); - const sheet = doc.sheetsByIndex[0]; - const lastModified = sheet.lastModified; - - const cache = await github.rest.actions.getActionsCacheList({ - owner: context.repo.owner, - repo: context.repo.repo, - }); - - let lastKnownModified = null; - if (cache.data.actions_caches.length > 0) { - lastKnownModified = new Date(cache.data.actions_caches[0].last_accessed_at); - } - - if (!lastKnownModified || lastModified > lastKnownModified) { - await github.rest.actions.createActionsCacheEntry({ - owner: context.repo.owner, - repo: context.repo.repo, - key: 'last-modified-time', - ref: context.ref, - cache_data: lastModified.toISOString(), - }); - return true; - } - - return false; - } - - const hasUpdates = await checkForUpdates(); - core.setOutput('has-updates', hasUpdates); - - - name: Trigger Subscriptions Plan workflow - if: steps.sheet-check.outputs.has-updates == 'true' - uses: actions/github-script@v6 - with: - github-token: ${{secrets.GH_ACCESS_TOKEN}} - script: | - github.rest.actions.createWorkflowDispatch({ - owner: context.repo.owner, - repo: context.repo.repo, - workflow_id: 'feature-list.yml', - ref: 'main', - inputs: { - spreadsheet_uri: '${{ secrets.INPUT_SPREADSHEET_URI }}' - } - }); \ No newline at end of file From 3a0bf65dfdf04795a9cc7feceda975a419e549a1 Mon Sep 17 00:00:00 2001 From: Ankita Sahu <71656941+SAHU-01@users.noreply.github.com> Date: Sun, 27 Oct 2024 23:35:15 +0530 Subject: [PATCH 11/11] debugging workflow Signed-off-by: Ankita Sahu <71656941+SAHU-01@users.noreply.github.com> --- .github/workflows/feature-list.yml | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/.github/workflows/feature-list.yml b/.github/workflows/feature-list.yml index c1433cea..0cfd9ab6 100644 --- a/.github/workflows/feature-list.yml +++ b/.github/workflows/feature-list.yml @@ -5,6 +5,10 @@ on: - cron: '0 0 * * *' # Run every night at midnight UTC workflow_dispatch: +permissions: + contents: write + actions: write + jobs: check-and-update-features: runs-on: ubuntu-latest @@ -17,9 +21,8 @@ jobs: - name: Check for updates in source repository id: check-updates - uses: actions/github-script@v6 + uses: actions/github-script@v7 with: - github-token: ${{secrets.GH_ACCESS_TOKEN}} script: | const { data: sourceFile } = await github.rest.repos.getContent({ owner: 'layer5labs', @@ -62,12 +65,14 @@ jobs: // Write the new content fs.writeFileSync('${{ env.FEATURES_FILE }}', content); + + core.setOutput('has-updates', 'true'); + } else { + core.setOutput('has-updates', 'false'); } - - return hasUpdates; - name: Commit changes - if: steps.check-updates.outputs.result == 'true' + if: steps.check-updates.outputs.has-updates == 'true' uses: stefanzweifel/git-auto-commit-action@v5 with: commit_message: "Updated feature data from source repository"