diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index feec341..ab811ff 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -31,7 +31,7 @@ on: - main env: - VERSION_NUMBER: 'v1.8.3' + VERSION_NUMBER: 'v1.8.4' DOCKERHUB_REGISTRY_NAME: 'digitalghostdev/poke-cli' AWS_REGION: 'us-west-2' @@ -55,7 +55,7 @@ jobs: args: '-no-fail -fmt sarif -out results.sarif ./...' - name: Upload SARIF Report - uses: github/codeql-action/upload-sarif@v3 + uses: github/codeql-action/upload-sarif@v4 with: sarif_file: results.sarif @@ -67,15 +67,19 @@ jobs: steps: - name: Checkout uses: actions/checkout@v6 - with: - fetch-depth: 0 - - name: Gitleaks - uses: gitleaks/gitleaks-action@v2 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITLEAKS_CONFIG: ./gitleaks.toml - GITLEAKS_VERSION: 8.29.0 + - name: Run Gitleaks + run: | + docker run --rm -v ${{ github.workspace }}:/path \ + ghcr.io/gitleaks/gitleaks:v8.29.0 \ + dir /path -c /path/gitleaks.toml --redact -v \ + --report-format sarif --report-path /path/gitleaks-results.sarif + + - name: Upload SARIF Report + if: always() + uses: github/codeql-action/upload-sarif@v4 + with: + sarif_file: gitleaks-results.sarif build-linux-packages: runs-on: ubuntu-22.04 @@ -240,16 +244,39 @@ jobs: if [ "${{ needs.upload-deb-packages.result }}" != "success" ] || \ [ "${{ needs.upload-rpm-packages.result }}" != "success" ] || \ [ "${{ needs.upload-apk-packages.result }}" != "success" ]; then - echo "⚠️ Some uploads failed! ⚠️" + echo "⚠️ Some uploads failed" exit 1 fi - echo "✅ All packages uploaded successfully! ✅" + echo "✅ All packages uploaded successfully" - build-docs-docker-image: + validate-links: runs-on: ubuntu-22.04 needs: [gitleaks] if: needs.gitleaks.result == 'success' + steps: + - name: Checkout + uses: actions/checkout@v6 + + - name: Check Links + uses: lycheeverse/lychee-action@v2 + with: + args: --verbose --config lychee.toml ./docs/**/*.md + fail: false + output: ./lychee-report.md + + - name: Upload Report + if: always() + uses: actions/upload-artifact@v4 + with: + name: lychee-report + path: ./lychee-report.md + + build-docs-docker-image: + runs-on: ubuntu-22.04 + needs: [validate-links] + if: needs.validate-links.result == 'success' + steps: - name: Checkout uses: actions/checkout@v6 diff --git a/.gitignore b/.gitignore index b46475e..0fb4852 100644 --- a/.gitignore +++ b/.gitignore @@ -18,7 +18,6 @@ poke-cli # Output of the go coverage tool, specifically when used with LiteIDE *.out -codecov* # Dependency directories (remove the comment below to include it) # vendor/ diff --git a/.goreleaser.yml b/.goreleaser.yml index bc74fb2..fa34e4d 100644 --- a/.goreleaser.yml +++ b/.goreleaser.yml @@ -14,7 +14,7 @@ builds: - windows - darwin ldflags: - - -s -w -X main.version=v1.8.3 + - -s -w -X main.version=v1.8.4 archives: - formats: [ 'zip' ] diff --git a/Dockerfile b/Dockerfile index 9d10a20..7d449de 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,7 +8,7 @@ RUN go mod download COPY . . -RUN go build -ldflags "-X main.version=v1.8.3" -o poke-cli . +RUN go build -ldflags "-X main.version=v1.8.4" -o poke-cli . # build 2 FROM --platform=$BUILDPLATFORM alpine:3.23 diff --git a/README.md b/README.md index b86226b..d4f9175 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ pokemon-logo

version-label - docker-image-size + docker-image-size ci-status-badge
@@ -96,11 +96,11 @@ Cloudsmith is a fully cloud-based service that lets you easily create, store, an 3. Choose how to interact with the container: * Run a single command and exit: ```bash - docker run --rm -it digitalghostdev/poke-cli:v1.8.3 [subcommand] [flag] + docker run --rm -it digitalghostdev/poke-cli:v1.8.4 [subcommand] [flag] ``` * Enter the container and use its shell: ```bash - docker run --rm -it --name poke-cli --entrypoint /bin/sh digitalghostdev/poke-cli:v1.8.3 -c "cd /app && exec sh" + docker run --rm -it --name poke-cli --entrypoint /bin/sh digitalghostdev/poke-cli:v1.8.4 -c "cd /app && exec sh" # placed into the /app directory, run the program with './poke-cli' # example: ./poke-cli ability swift-swim ``` diff --git a/card_data/pipelines/poke_cli_dbt/dbt_project.yml b/card_data/pipelines/poke_cli_dbt/dbt_project.yml index 1e444bf..d208ca8 100644 --- a/card_data/pipelines/poke_cli_dbt/dbt_project.yml +++ b/card_data/pipelines/poke_cli_dbt/dbt_project.yml @@ -1,5 +1,5 @@ name: 'poke_cli_dbt' -version: '1.8.3' +version: '1.8.4' profile: 'poke_cli_dbt' diff --git a/card_data/pipelines/tests/extract_sets_test.py b/card_data/pipelines/tests/extract_sets_test.py new file mode 100644 index 0000000..b2de8ab --- /dev/null +++ b/card_data/pipelines/tests/extract_sets_test.py @@ -0,0 +1,168 @@ +import sys +from pathlib import Path + +sys.path.insert(0, str(Path(__file__).parent.parent.parent)) + +import pytest +import polars as pl +import responses +from pipelines.defs.extract.tcgdex.extract_sets import extract_sets_data + + +@pytest.fixture +def mock_api_response(): + """Sample API responses matching tcgdex series format with sets""" + return { + "https://api.tcgdex.net/v2/en/series/me": { + "id": "me", + "name": "Mega Evolution", + "sets": [ + { + "id": "me01", + "name": "Mega Evolution", + "cardCount": {"official": 12, "total": 12}, + "logo": "https://example.com/me01.png", + "symbol": "https://example.com/me01-symbol.png", + }, + { + "id": "me02", + "name": "Phantasmal Flames", + "cardCount": {"official": 25, "total": 25}, + "logo": "https://example.com/me02.png", + "symbol": "https://example.com/me02-symbol.png", + }, + ], + }, + "https://api.tcgdex.net/v2/en/series/sv": { + "id": "sv", + "name": "Scarlet & Violet", + "sets": [ + { + "id": "sv01", + "name": "Scarlet & Violet", + "cardCount": {"official": 198, "total": 258}, + "logo": "https://example.com/sv01.png", + "symbol": "https://example.com/sv01-symbol.png", + }, + { + "id": "sv02", + "name": "Paldea Evolved", + "cardCount": {"official": 193, "total": 279}, + "logo": "https://example.com/sv02.png", + "symbol": None, + }, + ], + }, + "https://api.tcgdex.net/v2/en/series/swsh": { + "id": "swsh", + "name": "Sword & Shield", + "sets": [ + { + "id": "swsh1", + "name": "Sword & Shield", + "cardCount": {"official": 202, "total": 216}, + "logo": None, + "symbol": "https://example.com/swsh1-symbol.png", + }, + ], + }, + } + + +@pytest.mark.benchmark +@responses.activate +def test_extract_sets_data_success(mock_api_response): + """Test successful extraction of sets from multiple series""" + # Mock all API calls + for url, response_data in mock_api_response.items(): + responses.add( + responses.GET, + url, + json=response_data, + status=200, + ) + + result = extract_sets_data() + + # Assertions + assert isinstance(result, pl.DataFrame) # nosec + assert len(result) == 5 # nosec (2 + 2 + 1 sets) + assert set(result.columns) == { # nosec + "series_id", + "set_id", + "set_name", + "official_card_count", + "total_card_count", + "logo", + "symbol", + } + assert set(result["series_id"].to_list()) == {"me", "sv", "swsh"} # nosec + assert set(result["set_id"].to_list()) == {"me01", "me02", "sv01", "sv02", "swsh1"} # nosec + + +@pytest.mark.benchmark +@responses.activate +def test_extract_sets_data_empty_sets(mock_api_response): + """Test extraction when a series has no sets""" + # Modify one response to have empty sets + mock_api_response["https://api.tcgdex.net/v2/en/series/me"]["sets"] = [] + + for url, response_data in mock_api_response.items(): + responses.add( + responses.GET, + url, + json=response_data, + status=200, + ) + + result = extract_sets_data() + + assert isinstance(result, pl.DataFrame) # nosec + assert len(result) == 3 # nosec (0 + 2 + 1 sets) + assert "me" not in result["series_id"].to_list() # nosec + + +@pytest.mark.benchmark +@responses.activate +def test_extract_sets_data_null_card_counts(): + """Test extraction with null card counts""" + mock_responses = { + "https://api.tcgdex.net/v2/en/series/me": { + "id": "me", + "name": "Mega Evolution", + "sets": [], + }, + "https://api.tcgdex.net/v2/en/series/sv": { + "id": "sv", + "name": "Scarlet & Violet", + "sets": [ + { + "id": "sv01", + "name": "Scarlet & Violet", + "cardCount": {}, + "logo": None, + "symbol": None, + }, + ], + }, + "https://api.tcgdex.net/v2/en/series/swsh": { + "id": "swsh", + "name": "Sword & Shield", + "sets": [], + }, + } + + for url, response_data in mock_responses.items(): + responses.add( + responses.GET, + url, + json=response_data, + status=200, + ) + + result = extract_sets_data() + + assert isinstance(result, pl.DataFrame) # nosec + assert len(result) == 1 # nosec + assert result["official_card_count"].to_list()[0] is None # nosec + assert result["total_card_count"].to_list()[0] is None # nosec diff --git a/card_data/uv.lock b/card_data/uv.lock index ed44b61..dcc428a 100644 --- a/card_data/uv.lock +++ b/card_data/uv.lock @@ -156,7 +156,6 @@ dependencies = [ { name = "psycopg2-binary" }, { name = "pyarrow" }, { name = "pydantic" }, - { name = "pytest-codspeed" }, { name = "requests" }, { name = "soda-core-postgres" }, { name = "sqlalchemy" }, @@ -170,6 +169,7 @@ dev = [ { name = "dagster-postgres" }, { name = "dagster-webserver" }, { name = "pytest" }, + { name = "pytest-codspeed" }, { name = "responses" }, ] @@ -189,7 +189,6 @@ requires-dist = [ { name = "psycopg2-binary", specifier = ">=2.9.10" }, { name = "pyarrow", specifier = ">=20.0.0" }, { name = "pydantic", specifier = ">=2.11.7" }, - { name = "pytest-codspeed", specifier = ">=4.2.0" }, { name = "requests", specifier = ">=2.32.4" }, { name = "soda-core-postgres", specifier = ">=3.5.5" }, { name = "sqlalchemy", specifier = ">=2.0.41" }, @@ -203,6 +202,7 @@ dev = [ { name = "dagster-postgres", specifier = ">=0.27.3" }, { name = "dagster-webserver" }, { name = "pytest", specifier = ">=9.0.2" }, + { name = "pytest-codspeed", specifier = ">=4.2.0" }, { name = "responses", specifier = ">=0.25.8" }, ] diff --git a/cmd/card/cardinfo.go b/cmd/card/cardinfo.go index 18d4705..19d8ed4 100644 --- a/cmd/card/cardinfo.go +++ b/cmd/card/cardinfo.go @@ -22,7 +22,7 @@ func resizeImage(img image.Image, width, height int) image.Image { func CardImage(imageURL string) (string, error) { client := &http.Client{ - Timeout: time.Second * 15, + Timeout: time.Second * 60, } parsedURL, err := url.Parse(imageURL) if err != nil || (parsedURL.Scheme != "http" && parsedURL.Scheme != "https") { @@ -38,8 +38,14 @@ func CardImage(imageURL string) (string, error) { return "", fmt.Errorf("non-200 response: %d", resp.StatusCode) } + // Read body into memory first to avoid timeout during decode limitedBody := io.LimitReader(resp.Body, 10*1024*1024) - img, _, err := image.Decode(limitedBody) + bodyBytes, err := io.ReadAll(limitedBody) + if err != nil { + return "", fmt.Errorf("failed to read image data: %w", err) + } + + img, _, err := image.Decode(bytes.NewReader(bodyBytes)) if err != nil { return "", fmt.Errorf("failed to decode image: %w", err) } diff --git a/cmd/card/cardlist.go b/cmd/card/cardlist.go index 7fc38ff..ba511e5 100644 --- a/cmd/card/cardlist.go +++ b/cmd/card/cardlist.go @@ -172,7 +172,7 @@ func CallCardData(url string) ([]byte, error) { req.Header.Add("Authorization", "Bearer sb_publishable_oondaaAIQC-wafhEiNgpSQ_reRiEp7j") req.Header.Add("Content-Type", "application/json") - client := &http.Client{Timeout: 15 * time.Second} + client := &http.Client{Timeout: 60 * time.Second} resp, err := client.Do(req) if err != nil { return nil, fmt.Errorf("error making GET request: %w", err) diff --git a/cmd/card/setslist.go b/cmd/card/setslist.go index 4c55e70..df75aef 100644 --- a/cmd/card/setslist.go +++ b/cmd/card/setslist.go @@ -71,8 +71,11 @@ type setData struct { Symbol string `json:"symbol"` } +// creating a function variable to swap the implementation in tests +var getSetsData = callSetsData + func SetsList(seriesID string) (SetsModel, error) { - body, err := callSetsData("https://uoddayfnfkebrijlpfbh.supabase.co/rest/v1/sets") + body, err := getSetsData("https://uoddayfnfkebrijlpfbh.supabase.co/rest/v1/sets") if err != nil { return SetsModel{}, fmt.Errorf("error getting sets data: %v", err) } @@ -122,7 +125,7 @@ func callSetsData(url string) ([]byte, error) { req.Header.Add("Authorization", "Bearer sb_publishable_oondaaAIQC-wafhEiNgpSQ_reRiEp7j") req.Header.Add("Content-Type", "application/json") - client := &http.Client{Timeout: 15 * time.Second} + client := &http.Client{Timeout: 60 * time.Second} resp, err := client.Do(req) if err != nil { return nil, fmt.Errorf("error making GET request: %w", err) diff --git a/cmd/card/setslist_test.go b/cmd/card/setslist_test.go index e01befd..c1b0118 100644 --- a/cmd/card/setslist_test.go +++ b/cmd/card/setslist_test.go @@ -1,6 +1,9 @@ package card import ( + "errors" + "net/http" + "net/http/httptest" "strings" "testing" @@ -186,3 +189,170 @@ func TestSetsModel_Update_EnterKey(t *testing.T) { t.Error("Update with Enter should return tea.Quit command") } } + +func TestCallSetsData_SendsHeadersAndReturnsBody(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if got := r.Header.Get("apikey"); got != "sb_publishable_oondaaAIQC-wafhEiNgpSQ_reRiEp7j" { + t.Fatalf("missing or wrong apikey header: %q", got) + } + if got := r.Header.Get("Authorization"); got != "Bearer sb_publishable_oondaaAIQC-wafhEiNgpSQ_reRiEp7j" { + t.Fatalf("missing or wrong Authorization header: %q", got) + } + if got := r.Header.Get("Content-Type"); got != "application/json" { + t.Fatalf("missing or wrong Content-Type header: %q", got) + } + + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(`{"ok":true}`)) + })) + defer srv.Close() + + body, err := callSetsData(srv.URL) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if string(body) != `{"ok":true}` { + t.Fatalf("unexpected body: %s", string(body)) + } +} + +func TestCallSetsData_Non200Error(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + http.Error(w, "boom", http.StatusInternalServerError) + })) + defer srv.Close() + + _, err := callSetsData(srv.URL) + if err == nil { + t.Fatal("expected error for non-200 status") + } + if !strings.Contains(err.Error(), "unexpected status code: 500") { + t.Fatalf("error should mention status code, got: %v", err) + } +} + +func TestCallSetsData_BadURL(t *testing.T) { + _, err := callSetsData("http://%41:80/") // invalid URL host + if err == nil { + t.Fatal("expected error for bad URL") + } +} + +func TestSetsList_Success(t *testing.T) { + original := getSetsData + defer func() { getSetsData = original }() + + getSetsData = func(url string) ([]byte, error) { + json := `[ + {"series_id":"sv","set_id":"sv01","set_name":"Scarlet & Violet","official_card_count":198,"total_card_count":258,"logo":"https://example.com/sv01.png","symbol":"https://example.com/sv01-symbol.png"}, + {"series_id":"sv","set_id":"sv02","set_name":"Paldea Evolved","official_card_count":193,"total_card_count":279,"logo":"https://example.com/sv02.png","symbol":"https://example.com/sv02-symbol.png"}, + {"series_id":"swsh","set_id":"swsh01","set_name":"Sword & Shield","official_card_count":202,"total_card_count":216,"logo":"https://example.com/swsh01.png","symbol":"https://example.com/swsh01-symbol.png"} + ]` + return []byte(json), nil + } + + model, err := SetsList("sv") + if err != nil { + t.Fatalf("SetsList returned error: %v", err) + } + + // Should only have 2 sets (filtered by series_id "sv") + if model.SeriesName != "sv" { + t.Errorf("expected SeriesName 'sv', got %s", model.SeriesName) + } + + // Check setsIDMap has correct mappings + if model.setsIDMap["Scarlet & Violet"] != "sv01" { + t.Errorf("expected setsIDMap['Scarlet & Violet'] = 'sv01', got %s", model.setsIDMap["Scarlet & Violet"]) + } + if model.setsIDMap["Paldea Evolved"] != "sv02" { + t.Errorf("expected setsIDMap['Paldea Evolved'] = 'sv02', got %s", model.setsIDMap["Paldea Evolved"]) + } + + // swsh set should not be in the map + if _, exists := model.setsIDMap["Sword & Shield"]; exists { + t.Error("Sword & Shield should not be in setsIDMap (different series)") + } + + if model.View() == "" { + t.Error("model view should render") + } +} + +func TestSetsList_FetchError(t *testing.T) { + original := getSetsData + defer func() { getSetsData = original }() + + getSetsData = func(url string) ([]byte, error) { + return nil, errors.New("network error") + } + + _, err := SetsList("sv") + if err == nil { + t.Fatal("expected error when fetch fails") + } + if !strings.Contains(err.Error(), "error getting sets data") { + t.Errorf("error should mention 'error getting sets data', got: %v", err) + } +} + +func TestSetsList_BadJSON(t *testing.T) { + original := getSetsData + defer func() { getSetsData = original }() + + getSetsData = func(url string) ([]byte, error) { + return []byte("not-json"), nil + } + + _, err := SetsList("sv") + if err == nil { + t.Fatal("expected error for bad JSON payload") + } + if !strings.Contains(err.Error(), "error parsing sets data") { + t.Errorf("error should mention 'error parsing sets data', got: %v", err) + } +} + +func TestSetsList_EmptyResult(t *testing.T) { + original := getSetsData + defer func() { getSetsData = original }() + + getSetsData = func(url string) ([]byte, error) { + return []byte("[]"), nil + } + + model, err := SetsList("sv") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if len(model.setsIDMap) != 0 { + t.Errorf("expected empty setsIDMap, got %d entries", len(model.setsIDMap)) + } + + if model.View() == "" { + t.Error("expected view to render with empty data") + } +} + +func TestSetsList_NoMatchingSeries(t *testing.T) { + original := getSetsData + defer func() { getSetsData = original }() + + getSetsData = func(url string) ([]byte, error) { + json := `[ + {"series_id":"swsh","set_id":"swsh01","set_name":"Sword & Shield","official_card_count":202,"total_card_count":216,"logo":"","symbol":""} + ]` + return []byte(json), nil + } + + model, err := SetsList("sv") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // No sets match "sv" series + if len(model.setsIDMap) != 0 { + t.Errorf("expected empty setsIDMap when no series match, got %d entries", len(model.setsIDMap)) + } +} diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 0000000..160b1d1 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,9 @@ +coverage: + status: + project: + default: + target: 70% + threshold: 5% + patch: + default: + target: 50% \ No newline at end of file diff --git a/docs/Infrastructure_Guide/grafana.md b/docs/Infrastructure_Guide/grafana.md new file mode 100644 index 0000000..a3108bd --- /dev/null +++ b/docs/Infrastructure_Guide/grafana.md @@ -0,0 +1,71 @@ +--- +weight: 7 +--- + +# 7 // Grafana Cloud + +!!! question "What is Grafana?" + + Grafana is an open-source observability platform that visualizes metrics, logs, and traces through customizable dashboards. + It connects to various data sources like Prometheus, PostgreSQL, and others to provide real-time monitoring with charts, + graphs, and alerts. Grafana Cloud is the managed, hosted version that includes pre-configured Prometheus for metrics storage, + eliminating the need to run your own infrastructure. + +## Overview +Grafana is used to monitor the Supabase PostgreSQL instance. This service is not required for the project, +but a nice to have to view more details about the database's performance. The free tier on Grafana Cloud is +more than enough. + +Create an account on [Grafana Cloud](https://grafana.com/auth/sign-up/create-user). + +## Create Scrape Job +_A scrape job is responsible for calling the Supabase metrics endpoint._ + +1. On the homepage, on the left side menu, click on the drop-down for connections and +choose to [add a new connection](https://digitalghostdev.grafana.net/connections/add-new-connection). +2. Search for Supabase. +3. Give the Scrape Job a name. +4. Supabase recommends to leave the scrape interval to every minute. +5. Input the Supabase project ID. This can be found under Settings > General in Supabase. +6. Input a Supabase secret key. This can be found under Settings > API Keys > Secret Keys in Supabase. +Create a new one for Grafana. +7. Click **Test Connection**. +8. If test connection is successful, click on **Save Scrape Job**. +9. Finally click **Install**. + +## Import Supabase Dashboard +_Supabase has a premade dashboard ready for use in a `.json` format that can be imported via the Grafana UI._ + +1. On the left side menu, click on **Dashboards**. +2. In the upper-right, click on the **New** dropdown, then choose **Import.** +3. Copy or download this `.json` file from +[supabase/supabase-grafana](https://raw.githubusercontent.com/supabase/supabase-grafana/refs/heads/main/grafana/dashboard.json). +4. Upload or paste the `.json` file or code into the respective section in the Grafana UI. +5. Click **Load**. A prompt should appear to choose a data source. Choose the Prometheus instance that receives the Supabase metrics. +6. The dashboard should be ready to view. + +![Example Grafana Dashboard](../assets/grafana-dashboard-example.png) + +!!! warning + + The `uid` for will need to be updated manually if the prompt for choosing a data source does not appear. If so, the dashboard + will have errors on each visual. To fix this, perform the steps below. + + +1. Grab the `uid` for the Prometheus data source. + * On the left side menu, click on the **Connections** dropdown. Then, choose **Data Sources**. + * Search for "prom" and then click on the default Prometheus data source. + * In the URL of that page, the last section is the `uid`: `/datasources/edit/`. + +2. Edit the JSON Model in the Dashboard. + * Visit the dashboard and then click on **Edit** in the upper-right. + * Click on **Settings**. + * Click on the **JSON Model** tab. + * Grab the JSON code and using a text editor like VS Code, paste the code and search for `"uid": "prometheus"`. + * Choose to change all occurrences of `"uid": "prometheus"` to `"uid": ""` + * Copy the updated JSON code and paste it into the Grafana UI. + * Save changes and head back to the dashboard. + +--- + +Related Supabase [documentation](https://supabase.com/docs/guides/telemetry/metrics/grafana-cloud). \ No newline at end of file diff --git a/docs/assets/grafana-dashboard-example.png b/docs/assets/grafana-dashboard-example.png new file mode 100644 index 0000000..1e4d368 Binary files /dev/null and b/docs/assets/grafana-dashboard-example.png differ diff --git a/lychee.toml b/lychee.toml new file mode 100644 index 0000000..e0019c0 --- /dev/null +++ b/lychee.toml @@ -0,0 +1,9 @@ +timeout = 30 +threads = 2 +retry_wait_time = 2 + +verbose = "info" + +mode = "color" + +accept = ["200", "429"] \ No newline at end of file diff --git a/nfpm.yaml b/nfpm.yaml index 0ee0aba..102d64c 100644 --- a/nfpm.yaml +++ b/nfpm.yaml @@ -1,7 +1,7 @@ name: "poke-cli" arch: "arm64" platform: "linux" -version: "v1.8.3" +version: "v1.8.4" section: "default" version_schema: semver maintainer: "Christian S" diff --git a/testdata/main_latest_flag.golden b/testdata/main_latest_flag.golden index 9a0e4f4..2de4c1c 100644 --- a/testdata/main_latest_flag.golden +++ b/testdata/main_latest_flag.golden @@ -2,6 +2,6 @@ ┃ ┃ ┃ Latest available release ┃ ┃ on GitHub: ┃ -┃ • v1.8.2 ┃ +┃ • v1.8.3 ┃ ┃ ┃ ┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛