Skip to content

Commit

Permalink
chore: format robots.txt.yml with prettier (#21)
Browse files Browse the repository at this point in the history
  • Loading branch information
hedyhli authored May 8, 2024
1 parent 16a33cb commit 9843063
Showing 1 changed file with 24 additions and 25 deletions.
49 changes: 24 additions & 25 deletions .github/workflows/robots.txt.yml
Original file line number Diff line number Diff line change
@@ -1,34 +1,33 @@
name: "Update robots.txt"
on:
schedule:
- cron: "0 0 * * 6" # At 00:00 on Saturday
workflow_dispatch:
schedule:
- cron: "0 0 * * 6" # At 00:00 on Saturday
workflow_dispatch:

jobs:
update:
name: "Update robots.txt"
update:
name: "Update robots.txt"

runs-on: ubuntu-latest
runs-on: ubuntu-latest

permissions:
contents: write
permissions:
contents: write

steps:
- name: Checkout repository
uses: actions/checkout@v4
steps:
- name: Checkout repository
uses: actions/checkout@v4

- name: Pull latest robots
run: |
curl -X POST https://api.darkvisitors.com/robots-txts \
-H "Authorization: Bearer ${{ secrets.DarkVisitorsBearer }}" \
-H "Content-Type: application/json" \
--data-raw '{"agent_types": ["AI Assistant", "AI Data Scraper", "AI Search Crawler", "Undocumented AI Agent"]}' \
--output ./public/robots.txt
- name: Commit changes
uses: EndBug/add-and-commit@v9
with:
message: "chore: generate robots.txt"
default_author: github_actions
push: true
- name: Pull latest robots
run: |
curl -X POST https://api.darkvisitors.com/robots-txts \
-H "Authorization: Bearer ${{ secrets.DarkVisitorsBearer }}" \
-H "Content-Type: application/json" \
--data-raw '{"agent_types": ["AI Assistant", "AI Data Scraper", "AI Search Crawler", "Undocumented AI Agent"]}' \
--output ./public/robots.txt
- name: Commit changes
uses: EndBug/add-and-commit@v9
with:
message: "chore: generate robots.txt"
default_author: github_actions
push: true

0 comments on commit 9843063

Please sign in to comment.