name: Scrape transfer and update file on: # schedule: # - cron: '15 * * * *' workflow_dispatch: inputs: timeout: description: "Timeout time" required: true type: number default: 120 concurrency: group: transfer-scraper jobs: scrape-data: name: Scrape transfer guide runs-on: ubuntu-latest steps: - name: Checkout scraping repo uses: actions/checkout@v4 with: path: quatalog-scraping - name: Checkout data repo uses: actions/checkout@v4 with: path: data - name: Set up python uses: actions/setup-python@v4 with: python-version: '3.11' cache: 'pip' - name: Install dependencies working-directory: quatalog-scraping/transfer_scraper run: | python -m pip install --upgrade pip pip install -r 'requirements.txt' - name: Log IP run: | echo "Public IP: $(curl -s 'https://ipinfo.io/ip')" - name: Scrape transfer guide run: | python3 quatalog-scraping/transfer_scraper/main.py data/transfer.json data/transfer_state.json ${{ github.event.inputs.timeout }} - name: Upload data to artifact uses: actions/upload-artifact@v4 with: name: transfer-data path: data/ push-new-data: name: Push new data to data repo runs-on: ubuntu-latest needs: [scrape-data] steps: - name: Clone Quatalog data uses: actions/checkout@v4 with: repository: quatalog/data path: quatalog-data token: ${{ secrets.PUSH_TOKEN }} - name: Download data from artifact uses: actions/download-artifact@v4 with: name: transfer-data path: data/ - name: Copy data to repo directory run: | rsync -avz data/ quatalog-data/ - name: Push new data working-directory: quatalog-data run: | git config user.name "Quatalog Updater" git config user.email "github_actions@quatalog.com" git add transfer.json transfer_state.json git commit -m "$(date)" || exit 0 git push