mirror of
https://github.com/crawler-commons/crawler-commons
synced 2024-05-04 14:36:04 +02:00
BaseRobotRules: Document that Crawl-delay is stored in milliseconds
This commit is contained in:
parent
a3900425f3
commit
d710c85871
|
@ -47,10 +47,20 @@ public abstract class BaseRobotRules implements Serializable {
|
|||
_sitemaps = new LinkedHashSet<>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Crawl-delay (in milliseconds)
|
||||
*
|
||||
* @return Crawl-delay defined in the robots.txt for the given agent name,
|
||||
* or {@link UNSET_CRAWL_DELAY} if not defined.
|
||||
*/
|
||||
public long getCrawlDelay() {
|
||||
return _crawlDelay;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param crawlDelay
|
||||
* Crawl-Delay in milliseconds
|
||||
*/
|
||||
public void setCrawlDelay(long crawlDelay) {
|
||||
_crawlDelay = crawlDelay;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue