generate-robotstxt
Version:
Awesome generator robots.txt
101 lines (82 loc) • 2.34 kB
Markdown
# generate-robotstxt
[](https://www.npmjs.org/package/generate-robotstxt)
[](https://travis-ci.org/itgalaxy/generate-robotstxt)
[](https://david-dm.org/itgalaxy/generate-robotstxt)
[](https://david-dm.org/itgalaxy/generate-robotstxt?type=dev)
[](https://greenkeeper.io)
Awesome generator robots.txt.
## Installation
```shell
npm install --save-dev generate-robotstxt
```
## Usage
```js
import robotstxt from "generate-robotstxt";
robotstxt({
policy: [
{
userAgent: "Googlebot",
allow: "/",
disallow: "/search",
crawlDelay: 2
},
{
userAgent: "OtherBot",
allow: ["/allow-for-all-bots", "/allow-only-for-other-bot"],
disallow: ["/admin", "/login"],
crawlDelay: 2
},
{
userAgent: "*",
allow: "/",
disallow: "/search",
crawlDelay: 10,
cleanParam: "ref /articles/"
}
],
sitemap: "http://example.com/sitemap.xml",
host: "http://example.com"
}).then(content => {
console.log(content);
});
```
## File based configuration
**robots-txt.config.js**
```js
module.exports = {
policy: [
{
userAgent: "Googlebot",
allow: "/",
disallow: ["/search"],
crawlDelay: 2
},
{
userAgent: "OtherBot",
allow: ["/allow-for-all-bots", "/allow-only-for-other-bot"],
disallow: ["/admin", "/login"],
crawlDelay: 2
},
{
userAgent: "*",
allow: "/",
disallow: "/search",
crawlDelay: 10,
cleanParam: "ref /articles/"
}
],
sitemap: "http://example.com/sitemap.xml",
host: "http://example.com"
};
```
## CLI
```shell
Awesome generator robots.txt
Usage generate-robotstxt [options] <dest>
Options:
--config Path to a specific configuration file.
```
## Contribution
Feel free to push your code if you agree with publishing under the MIT license.
## [Changelog](CHANGELOG.md)
## [License](LICENSE)