Skip to content

Add workflow for dispensing Predictoor USDC rewards (#840) #1519

Add workflow for dispensing Predictoor USDC rewards (#840)

Add workflow for dispensing Predictoor USDC rewards (#840) #1519

Workflow file for this run

name: Test with df-sql
on:
push:
branches: [main]
pull_request:
branches: [main]
workflow_call:
env:
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
BINANCE_API_URL: ${{secrets.BINANCE_API_URL}}
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.8]
steps:
- uses: actions/checkout@v2
- name: Set up Python {{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- uses: actions/checkout@v2
name: Checkout df-sql
with:
repository: "oceanprotocol/df-sql"
path: "df-sql"
ref: v0.1.1
- name: Create CSV dir
run: |
mkdir ~/.dfcsv
- name: Setup df-sql env file
run: |
mv df-sql/.env.sample df-sql/.env
- name: Run df-sql
working-directory: ${{ github.workspace }}/df-sql
run: |
docker-compose up &
- name: Install dependencies
working-directory: ${{ github.workspace }}
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt --quiet
- name: Set up Openzeppelin
run: npm install @openzeppelin/contracts
- name: Set env variables
run: |
echo "DFTOOL_KEY=${{ secrets.DFTOOL_KEY_MULTISIG }}" >> $GITHUB_ENV
echo "ADDRESS_FILE=.github/workflows/data/address.json" >> $GITHUB_ENV
echo "WEB3_INFURA_PROJECT_ID=${{ secrets.WEB3_INFURA_PROJECT_ID }}" >> $GITHUB_ENV
echo "SECRET_SEED=${{ secrets.SECRET_SEED }}" >> $GITHUB_ENV
echo "TEST_START_DATE=2023-01-20" >> $GITHUB_ENV
echo "TEST_END_DATE=2023-01-27" >> $GITHUB_ENV
echo "CSV_DIR=/tmp/csv" >> $GITHUB_ENV
echo "RETRY_TIMES=5" >> $GITHUB_ENV
echo "POLYGON_RPC_URL=https://polygon-mainnet.infura.io/v3/" >> $GITHUB_ENV
echo "MAINNET_RPC_URL=https://mainnet.infura.io/v3/" >> $GITHUB_ENV
echo "INFURA_NETWORKS=polygon,mainnet" >> $GITHUB_ENV
- name: Run dftool get_rate
run: |
./dftool get_rate OCEAN $TEST_START_DATE $TEST_END_DATE $CSV_DIR --RETRIES $RETRY_TIMES
- name: Run dftool volsym
run: |
./dftool volsym $TEST_START_DATE $TEST_END_DATE 2 $CSV_DIR 1 --RETRIES $RETRY_TIMES
./dftool volsym $TEST_START_DATE $TEST_END_DATE 2 $CSV_DIR 137 --RETRIES $RETRY_TIMES
- name: Run dftool vebals
run: |
./dftool vebals $TEST_START_DATE $TEST_END_DATE 2 $CSV_DIR 1 --RETRIES $RETRY_TIMES
- name: Run dftool allocations
run: |
./dftool allocations $TEST_START_DATE $TEST_END_DATE 2 $CSV_DIR 1 --RETRIES $RETRY_TIMES
- name: Run dftool nftinfo
run: |
./dftool nftinfo $CSV_DIR 1 --FIN $TEST_END_DATE
- name: Run sed
run: |
cp $CSV_DIR/rate-OCEAN.csv $CSV_DIR/rate-MOCEAN.csv
sed -i -e 's/MOCEAN/OCEAN/g' $CSV_DIR/rate-MOCEAN.csv
- name: Run dftool calc rewards
run: |
./dftool calc volume $CSV_DIR 100000000 --START_DATE $TEST_START_DATE
- name: Move csvs to df-sql dir
run: |
mv /tmp/csv/* ~/.dfcsv
- name: Wait a minute for df-sql to sync
run: |
sleep $(($(date -d "+ 1 minute + 5 seconds" +%s) - $(date +%s)))
- name: Test queries
run: |
set -e
curl -X POST http://localhost:6234/vebals | grep -q "\"locked_amt\":"
curl -X POST http://localhost:6234/volume | grep -q "\"nft_addr\":\"0x"
curl -X GET http://localhost:6234/apy/active | grep -q "\"apy\":"
curl -X GET http://localhost:6234/apy/addr/0x | grep -q "\"apy\":"
curl -X POST http://localhost:6234/nftinfo | grep -q "\"ocean_allocated_realtime\":0,"
curl -X POST http://localhost:6234/allocations | grep -q "\"ocean_amt\":"
curl -X POST http://localhost:6234/rewards | grep -q "\"LP_addr\":\"0x"
curl -X POST http://localhost:6234/rewardsSummary | grep -q "\"curating_amt\":"