-
-
Notifications
You must be signed in to change notification settings - Fork 415
38 lines (36 loc) · 1.4 KB
/
schedule_spider_by_date.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
name: Schedule All Enabled Spiders Crawl
on:
workflow_dispatch:
inputs:
start_date:
description: 'Start date (YYYY-MM-DD)'
required: true
jobs:
schedule:
runs-on: ubuntu-latest
env:
SHUB_APIKEY: ${{ secrets.SHUB_APIKEY }}
SCRAPY_CLOUD_PROJECT_ID: ${{ secrets.SCRAPY_CLOUD_PROJECT_ID }}
FILES_STORE: ${{ secrets.FILES_STORE }}
QUERIDODIARIO_DATABASE_URL: ${{ secrets.QUERIDODIARIO_DATABASE_URL }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_ENDPOINT_URL: ${{ secrets.AWS_ENDPOINT_URL }}
AWS_REGION_NAME: ${{ secrets.AWS_REGION_NAME }}
SPIDERMON_DISCORD_FAKE: ${{ secrets.SPIDERMON_DISCORD_FAKE }}
SPIDERMON_DISCORD_WEBHOOK_URL: ${{ secrets.SPIDERMON_DISCORD_WEBHOOK_URL }}
ZYTE_SMARTPROXY_APIKEY: ${{ secrets.ZYTE_SMARTPROXY_APIKEY }}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: "3.10"
- name: Prepare environment
run: |
python -m pip install --upgrade pip
pip install click python-decouple scrapinghub SQLAlchemy psycopg2
- name: Schedule partial crawl
if: ${{ github.event.inputs.start_date }}
run: |
cd data_collection/
python scheduler.py schedule-all-spiders-by-date --start_date ${{ github.event.inputs.start_date }}