| Crates.io | sqlite-robotstxt |
| lib.rs | sqlite-robotstxt |
| version | 0.0.1-alpha.3 |
| created_at | 2023-08-27 20:31:58.572967+00 |
| updated_at | 2023-08-28 02:50:50.668216+00 |
| description | A SQLite extension for parsing robots.txt files |
| homepage | https://github.com/asg017/sqlite-robotstxt |
| repository | https://github.com/asg017/sqlite-robotstxt |
| max_upload_size | |
| id | 956375 |
| size | 195,065 |
A SQLite extension for parsing robots.txt files. Based on sqlite-loadable-rs and the robotstxt crate.
See if a specified User-Agent can access a specific path, based on the rules of a robots.txt.
select robotstxt_matches(
readfile('robots.txt'),
'My-Agent',
'/path'
); -- 0 or 1
Find all indvidual rules specified in a robots.txt file.
select *
from robotstxt_rules(
readfile('tests/examples/en.wikipedia.org.robots.txt')
)
limit 10;
/*
┌────────────────────────────┬────────┬───────────┬──────┐
│ user_agent │ source │ rule_type │ path │
├────────────────────────────┼────────┼───────────┼──────┤
│ MJ12bot │ 12 │ disallow │ / │
│ Mediapartners-Google* │ 16 │ disallow │ / │
│ IsraBot │ 20 │ disallow │ │
│ Orthogaffe │ 23 │ disallow │ │
│ UbiCrawler │ 28 │ disallow │ / │
│ DOC │ 31 │ disallow │ / │
│ Zao │ 34 │ disallow │ / │
│ sitecheck.internetseer.com │ 39 │ disallow │ / │
│ Zealbot │ 42 │ disallow │ / │
│ MSIECrawler │ 45 │ disallow │ / │
└────────────────────────────┴────────┴───────────┴──────┘
*/
Use with sqlite-http to requests robots.txt files on the fly.
select *
from robotstxt_rules(
http_get_body('https://www.reddit.com/robots.txt')
)
limit 10;
/*
┌────────────┬────────┬───────────┬─────────────────────┐
│ user_agent │ source │ rule_type │ path │
├────────────┼────────┼───────────┼─────────────────────┤
│ 008 │ 3 │ disallow │ / │
│ voltron │ 7 │ disallow │ / │
│ bender │ 10 │ disallow │ /my_shiny_metal_ass │
│ Gort │ 13 │ disallow │ /earth │
│ MJ12bot │ 16 │ disallow │ / │
│ PiplBot │ 19 │ disallow │ / │
│ * │ 22 │ disallow │ /*.json │
│ * │ 23 │ disallow │ /*.json-compact │
│ * │ 24 │ disallow │ /*.json-html │
│ * │ 25 │ disallow │ /*.xml │
└────────────┴────────┴───────────┴─────────────────────┘
*/
robotstxt_allowed(rules, path) overload on robotstxt_user_agents