Skip to content

New automated testing for releases #22

New automated testing for releases

New automated testing for releases #22

Workflow file for this run

name: Build & Test
on:
push:
branches: [main]
pull_request:
branches: [main]
schedule:
- cron: 0 0 * * * # Daily at midnight UTC
env:
TEST_IPV6: false
jobs:
# First we need to compile the mmdbmeld project so we can build the DBs
build-tools:
runs-on: ubuntu-latest
env:
GO_VERSION: 1.21.4
MMDBMELD_TAG: v0.2.0
steps:
- name: Check for Cached Tools Build
uses: actions/cache@v3
id: tools-cache
with:
path: ./tools
key: go-v${{ env.GO_VERSION }}-mmdbmeld-${{ env.MMDBMELD_TAG }}
- name: Set up Go
uses: actions/setup-go@v4
if: steps.tools-cache.outputs.cache-hit != 'true'
with:
go-version: ${{ env.GO_VERSION }}
- name: Clone mmdbmeld
if: steps.tools-cache.outputs.cache-hit != 'true'
run: git clone -b ${{ env.MMDBMELD_TAG }} https://github.com/safing/mmdbmeld.git
- name: Build mmdbmeld
if: steps.tools-cache.outputs.cache-hit != 'true'
working-directory: ./mmdbmeld
run: |
go build -C cmd/mmdbmeld
go build -C cmd/mmdbcheck
go build -C cmd/mmdbquery
- name: Move the Tools
if: steps.tools-cache.outputs.cache-hit != 'true'
run: |
mkdir ./tools
cp ./mmdbmeld/cmd/mmdbmeld/mmdbmeld ./mmdbmeld/cmd/mmdbcheck/mmdbcheck ./mmdbmeld/cmd/mmdbcheck/mmdbquery ./tools
- name: Upload Tools To Artifacts
uses: actions/upload-artifact@v3
with:
name: tools
path: ./tools
# Now we can fetch the DBs and build them
build-database:
runs-on: ubuntu-latest
needs: [build-tools]
steps:
- uses: actions/checkout@v4
- name: Download Tools
uses: actions/download-artifact@v3
with:
name: tools
- run: |
mkdir DBs
mkdir build
chmod +x mmdbmeld
# TODO: We are saving etags which can be utilized for proper caching of these databases. Ideally, we should implement a method to do so so that any DB that doesn't get updated doesn't need to be redownloaded daily.
- name: Download DBs
working-directory: ./DBs
run: |
curl --show-error \
--etag-save geo-whois-asn-country-ipv4.csv.etag \
--etag-compare geo-whois-asn-country-ipv4.csv.etag \
--output geo-whois-asn-country-ipv4.csv \
"https://cdn.jsdelivr.net/npm/@ip-location-db/geo-whois-asn-country/geo-whois-asn-country-ipv4.csv"
curl --show-error \
--etag-save geo-whois-asn-country-ipv6.csv.etag \
--etag-compare geo-whois-asn-country-ipv6.csv.etag \
--output geo-whois-asn-country-ipv6.csv \
"https://cdn.jsdelivr.net/npm/@ip-location-db/geo-whois-asn-country/geo-whois-asn-country-ipv6.csv"
curl --show-error \
--etag-save iptoasn-asn-ipv4.csv.etag \
--etag-compare iptoasn-asn-ipv4.csv.etag \
--output iptoasn-asn-ipv4.csv \
"https://cdn.jsdelivr.net/npm/@ip-location-db/iptoasn-asn/iptoasn-asn-ipv4.csv"
curl --show-error \
--etag-save iptoasn-asn-ipv6.csv.etag \
--etag-compare iptoasn-asn-ipv6.csv.etag \
--output iptoasn-asn-ipv6.csv \
"https://cdn.jsdelivr.net/npm/@ip-location-db/iptoasn-asn/iptoasn-asn-ipv6.csv"
curl --show-error \
--etag-save iptoasn-country-ipv4.csv.etag \
--etag-compare iptoasn-country-ipv4.csv.etag \
--output iptoasn-country-ipv4.csv \
"https://cdn.jsdelivr.net/npm/@ip-location-db/iptoasn-country/iptoasn-country-ipv4.csv"
curl --show-error \
--etag-save iptoasn-country-ipv6.csv.etag \
--etag-compare iptoasn-country-ipv6.csv.etag \
--output iptoasn-country-ipv6.csv \
"https://cdn.jsdelivr.net/npm/@ip-location-db/iptoasn-country/iptoasn-country-ipv6.csv"
- name: Build DBs
run: ./mmdbmeld build-config.yml
- name: Upload Build To Artifacts
uses: actions/upload-artifact@v3
with:
name: build
path: ./build
run-tests:
runs-on: ubuntu-latest
needs: build-database
steps:
- name: Download Databases
uses: actions/download-artifact@v3
with:
name: build
- name: Download Tools
uses: actions/download-artifact@v3
with:
name: tools
- run: |
chmod +x mmdbcheck
- name: Test IPv4 DBs
run: |
find . -type f -name "*v4*.mmdb" -print | while read -r file; do
./mmdbcheck all $file
done
- name: Test IPv6 DBs
if: ${{ env.TEST_IPV6 == 'true' }}
run: |
find . -type f -name "*v6*.mmdb" -print | while read -r file; do
./mmdbcheck all $file
done
# This test pulls in the Pingdom probe server data which includes their IP addresses and location and utilizes that to validate the results of the databases.
run-test-pingdom-probes:
runs-on: ubuntu-latest
needs: build-database
steps:
- uses: actions/checkout@v4
- name: Download Databases
uses: actions/download-artifact@v3
with:
name: build
- name: Download Tools
uses: actions/download-artifact@v3
with:
name: tools
- run: |
chmod +x mmdbcheck
echo "NOW=$(date '+%D')" >> ${GITHUB_ENV}
echo "RELEASE_DATE=$(date '+%D %T')" >> ${GITHUB_ENV}
echo "## Built on: ${{ env.RELEASE_DATE }}" > results.md
echo "Check the project's [readme](https://github.com/HostByBelle/IP-Geolocation-DB#variants) for information about the varients available." >> results.md
echo "### Testing Results" >> results.md
- name: Cache Pingdom Probe Data
uses: actions/cache@v3
id: probe-cache
with:
path: ./tests/feed.xml
key: pingdom-probe-${{ env.NOW }}
- name: Download Pingdom Probe Data
if: steps.probe-cache.outputs.cache-hit != 'true'
working-directory: ./tests
run: |
curl --show-error \
--output feed.xml \
--location \
"https://www.pingdom.com/rss/probe_servers.xml"
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: "3.12"
- name: Install Python Requirements
working-directory: ./tests
run: pip install -r requirements.txt
- name: Test IPv4 DBs
run: |
echo "#### IPv4 Pingdom Tests:" >> results.md
find . -type f -name "*v4*.mmdb" -print | while read -r file; do
echo "**$file**" >> results.md
python ./tests/pingdom-probe-test.py ./tests/feed.xml $file ip >> results.md
done
- name: Test IPv4 DBs
run: |
echo "#### IPv6 Pingdom Tests:" >> results.md
find . -type f -name "*v6*.mmdb" -print | while read -r file; do
python ./tests/pingdom-probe-test.py ./tests/feed.xml $file ipv6 >> results.md
done
- name: Upload Results
uses: actions/upload-artifact@v3
with:
name: results
path: ./results.md