Skip to content

Update robots.txt to disallow all crawlers #71

Update robots.txt to disallow all crawlers

Update robots.txt to disallow all crawlers #71

name: Build
on:
push:
env:
BUILD_NUMBER: ${{ github.run_number }}
jobs:
build:
if: github.repository == 'vrchost/rooibos'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set up Python 3.7
uses: actions/setup-python@v2
with:
python-version: 3.7
- name: Install requirements
run: |
python -m pip install -U -q pip wheel twine
python -m pip install -r requirements-dev.txt
- name: Build
run: |
rm -f -- dist/*
python setup.py bdist_wheel
- name: Archive build artifacts
uses: actions/upload-artifact@v2
with:
name: artifacts
path: dist/*
- name: Publish to testpypi
run: |
python3 -m twine upload dist/*
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.TWINE_TEST_TOKEN }}
TWINE_REPOSITORY: testpypi