Versions in this module Expand all Collapse all v0 v0.0.1 Sep 21, 2023 Changes in this version + const DefaultTimeout + var ErrorRobotstxtRestriction = errors.New("Page not accessible due to robots.txt restriction") + func New(cookieJar ...http.CookieJar) (*colibri.Colibri, error) + type Client struct + Jar http.CookieJar + func NewClient(cookieJar ...http.CookieJar) (*Client, error) + func (client *Client) Clear() + func (client *Client) Do(c *colibri.Colibri, rules *colibri.Rules) (colibri.Response, error) + type ReqDelay struct + func NewReqDelay() *ReqDelay + func (rd *ReqDelay) Clear() + func (rd *ReqDelay) Done(u *url.URL) + func (rd *ReqDelay) Stamp(u *url.URL) + func (rd *ReqDelay) Wait(u *url.URL, duration time.Duration) + type Response struct + HTTP *http.Response + func (resp *Response) Body() io.ReadCloser + func (resp *Response) Do(rules *colibri.Rules) (colibri.Response, error) + func (resp *Response) Extract(rules *colibri.Rules) (colibri.Response, map[string]any, error) + func (resp *Response) Header() http.Header + func (resp *Response) StatusCode() int + func (resp *Response) URL() *url.URL + type RobotsData struct + func NewRobotsData() *RobotsData + func (robots *RobotsData) Clear() + func (robots *RobotsData) IsAllowed(c *colibri.Colibri, rules *colibri.Rules) error