colibri

package module
v0.0.1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Sep 21, 2023 License: MIT Imports: 11 Imported by: 0

README

Colibri

Colibri is an extensible web crawling and scraping framework for Go, used to crawl and extract structured data on the web.

Installation

 $ go get github.com/eduardogxnzalez/colibri

Quick Starts

type (
	// HTTPClient represents an HTTP client.
	HTTPClient interface {
		// Do makes HTTP requests.
		Do(c *Colibri, rules *Rules) (Response, error)

		// Clear cleans the fields of the structure.
		Clear()
	}

	// Delay manages the delay between each HTTP request.
	Delay interface {
		// Wait waits for the previous HTTP request to the same URL and stores
		// the timestamp, then starts the calculated delay with the timestamp
		// and the specified duration of the delay.
		Wait(u *url.URL, duration time.Duration)

		// Done warns that an HTTP request has been made to the URL.
		Done(u *url.URL)

		// Stamp records the time at which the HTTP request to the URL was made.
		Stamp(u *url.URL)

		// Clear cleans the fields of the structure.
		Clear()
	}

	// RobotsTxt represents a robots.txt parser.
	RobotsTxt interface {
		// IsAllowed verifies that the User-Agent can access the URL.
		IsAllowed(c *Colibri, rules *Rules) error

		// Clear cleans the fields of the structure.
		Clear()
	}

	// Parser represents a parser of the response content.
	Parser interface {
		// Match returns true if the Content-Type is compatible with the Parser.
		Match(contentType string) bool

		// Parse parses the response based on the rules.
		Parse(rules *Rules, resp Response) (map[string]any, error)

		// Clear cleans the fields of the structure.
		Clear()
	}
)

// Colibri performs HTTP requests and parses
// the content of the response based on rules.
type Colibri struct {
	Client    HTTPClient
	Delay     Delay
	RobotsTxt RobotsTxt
	Parser    Parser
}

Do

// Do performs an HTTP request according to the rules.
func (c *Colibri) Do(rules *Rules) (resp Response, err error)
c := colibri.New()
c.Client = ...    // Required
c.Delay = ...     // Optional
c.RobotsTxt = ... // Optional
c.Parser = ...    // Optional

rules, err := colibri.NewRules(map[string]any{...})
if err != nil {
	panic(err)
}

resp, err := c.Do(rules)
if err != nil {
	panic(err)
}

fmt.Println("URL:", resp.URL())
fmt.Println("Status code:", resp.StatusCode())
fmt.Println("Content-Type", resp.Header().Get("Content-Type"))

Extract

// Extract performs the HTTP request and parses the content of the response following the rules.
// It returns the response of the request, the data extracted with the selectors
// and an error (if any).
func (c *Colibri) Extract(rules *Rules) (resp Response, output map[string]any, err error)
var rawRules = []byte(`{...}`) // Raw Rules ~ JSON 

c := colibri.New()
c.Client = ...    // Required
c.Delay = ...     // Optional
c.RobotsTxt = ... // Optional
c.Parser = ...    // Required

var rules colibri.Rules
err := json.Unmarshal(data, &rules)
if err != nil {
	panic(err)
} 

resp, data, err := c.Extract(&rules)
if err != nil {
	panic(err)
}

fmt.Println("URL:", resp.URL())
fmt.Println("Status code:", resp.StatusCode())
fmt.Println("Content-Type", resp.Header().Get("Content-Type"))
fmt.Println("Data:", data)

Raw Rules ~ JSON

{
	"Method": "string",
	"URL": "string",
	"Proxy": "string",
	"Header": {
		"string": "string",
		"string": ["string", "string", ...]
	},
	"Timeout": "string_or_number",
	"UseCookies": "bool_string_or_number",
	"IgnoreRobotsTxt": "bool_string_or_number",
	"Delay": "string_or_number",
	"Selectors": {...}
}

Selectors

{
	"Selectors": {
		"key_name": "expression"
	}
}
{
	"Selectors": {
		"title": "//head/title"
	}
}
{
	"Selectors": {
		"key_name":  {
			"Expr": "expression",
			"Type": "expression_type",
			"All": "bool_or_string",
			"Follow": "bool_or_string",
			"Selectors": {...}
		}
	}
}
{
	"Selectors": {
		"title":  {
			"Expr": "//head/title",
			"Type": "xpath"
		}
	}
}
Nested selectors
{
	"Selectors": {
		"body":  {
			"Expr": "//body",
			"Type": "xpath",
			"Selectors": {
				"p": "//p"
			}
		}
	}
}
Find all
{
	"Selectors": {
		"a":  {
			"Expr": "//body/a",
			"Type": "xpath",
			"All": true,
		}
	}
}
Follow URLs
{
	"Selectors": {
		"a":  {
			"Expr": "//body/a",
			"Type": "xpath",
			"All": true,
			"Follow": true,
			"Selectors": {
				"title": "//head/title"
			}
		}
	}
}
{
	"Selectors": {
		"a":  {
			"Expr": "//body/a",
			"Type": "xpath",
			"All": true,
			"Follow": true,
			"Proxy": "http://proxy-url.com:8080",
			"UseCookies": true,
			"Selectors": {
				"title": "//head/title"
			}
		}
	}
}
Custom fields
{
	"Selectors": {
		"title":  {
			"Expr": "//head/title",
			"Type": "xpath",
			"required": true
		}
	}
}

Example

{
	"Method": "GET",
	"URL": "https://example.com",
	"Header": {
		"User-Agent": "test/0.0.1",
	},
	"Timeout": 5,
	"Selectors": {
		"a":  {
			"Expr": "//body/a",
			"Type": "xpath",
			"All": true,
			"Follow": true,
			"Selectors": {
				"title": "//head/title"
			}
		}
	}
}

Documentation

Overview

Colibri is an extensible web crawling and scraping framework for Go, used to crawl and extract structured data on the web.

Index

Constants

View Source
const (
	KeyDelay = "Delay"

	KeyFields = "Fields"

	KeyHeader = "Header"

	KeyIgnoreRobotsTxt = "IgnoreRobotsTxt"

	KeyMethod = "Method"

	KeyProxy = "Proxy"

	KeySelectors = "Selectors"

	KeyTimeout = "Timeout"

	KeyUseCookies = "UseCookies"

	KeyURL = "URL"
)
View Source
const (
	KeyAll = "All"

	KeyExpr = "Expr"

	KeyFollow = "Follow"

	KeyName = "Name"

	KeyType = "Type"
)
View Source
const DefaultUserAgent = "colibri/0.1"

DefaultUserAgent is the default User-Agent used for requests.

Variables

View Source
var (
	// ErrClientIsNil returned when Client is nil.
	ErrClientIsNil = errors.New("Client is nil")

	// ErrParserIsNil returned when Parser is nil.
	ErrParserIsNil = errors.New("Parser is nil")

	// ErrRulesIsNil returned when rules are nil.
	ErrRulesIsNil = errors.New("Rules is nil")
)
View Source
var (
	// ErrMustBeConvBool is returned when the value is not convertible to bool.
	ErrMustBeConvBool = errors.New("must be a bool, string or number")

	// ErrMustBeConvDuration is returned when the value is not convertible to time.Duration.
	ErrMustBeConvDuration = errors.New("must be a string or number")

	// ErrMustBeString is returned when the value must be a string.
	ErrMustBeString = errors.New("must be a string")

	// ErrInvalidHeader is returned when the header is invalid.
	ErrInvalidHeader = errors.New("invalid header")
)
View Source
var (
	// ErrInvalidSelector is returned when the selector is invalid.
	ErrInvalidSelector = errors.New("invalid selector")

	// ErrInvalidSelectors is returned when the selectors are invalid.
	ErrInvalidSelectors = errors.New("invalid selectors")
)
View Source
var ErrNotAssignable = errors.New("value is not assignable to field")

ErrNotAssignable is returned when the value of RawRules cannot be assigned to the structure field.

Functions

func AddError

func AddError(errs error, key string, err error) error

AddError adds an error to the existing error set. If errs or err is null or the key is empty, no operation is performed. If errs is not of type *Err, a new error of type *Err is returned and the original error is stored with the key "#".

func DefaultConvFunc

func DefaultConvFunc(key string, rawValue any) (any, error)

DefaultConvFunc ConvFunc used by default by the NewRules function.

func ReleaseRules

func ReleaseRules(rules *Rules)

ReleaseRules clears and sends the rules to the rules pool.

func ReleaseSelector

func ReleaseSelector(selector *Selector)

ReleaseRules clears and sends the selector to the selector pool.

func ToURL

func ToURL(value any) (*url.URL, error)

ToURL converts a value to a *url.URL.

Types

type Colibri

type Colibri struct {
	Client    HTTPClient
	Delay     Delay
	RobotsTxt RobotsTxt
	Parser    Parser
}

Colibri performs HTTP requests and parses the content of the response based on rules.

func New

func New() *Colibri

New returns a new empty Colibri structure.

func (*Colibri) Clear

func (c *Colibri) Clear()

Clear cleans the fields of the structure.

func (*Colibri) Do

func (c *Colibri) Do(rules *Rules) (resp Response, err error)

Do performs an HTTP request according to the rules.

func (*Colibri) Extract

func (c *Colibri) Extract(rules *Rules) (resp Response, output map[string]any, err error)

Extract performs the HTTP request and parses the content of the response following the rules. It returns the response of the request, the data extracted with the selectors and an error (if any).

type ConvFunc

type ConvFunc func(key string, rawValue any) (any, error)

ConvFunc processes the value based on the key.

type Delay

type Delay interface {
	// Wait waits for the previous HTTP request to the same URL and stores
	// the timestamp, then starts the calculated delay with the timestamp
	// and the specified duration of the delay.
	Wait(u *url.URL, duration time.Duration)

	// Done warns that an HTTP request has been made to the URL.
	Done(u *url.URL)

	// Stamp records the time at which the HTTP request to the URL was made.
	Stamp(u *url.URL)

	// Clear cleans the fields of the structure.
	Clear()
}

Delay manages the delay between each HTTP request.

type Errs

type Errs struct {
	// contains filtered or unexported fields
}

Errs is a structure that stores and manages errors.

func (*Errs) Add

func (errs *Errs) Add(key string, err error) *Errs

Add adds an error to the error set. If the key or error is null, no operation is performed. If there is already an error stored with the same key, the error is stored with the key + # + key number. Returns a pointer to the updated error structure.

func (*Errs) Error

func (errs *Errs) Error() string

Error returns a string representation of errors stored in JSON format.

func (*Errs) Get

func (errs *Errs) Get(key string) (err error, ok bool)

Get returns the error associated with a key and a boolean indicating whether the key exists. If the key does not exist, a null error and false are returned.

func (*Errs) MarshalJSON

func (errs *Errs) MarshalJSON() ([]byte, error)

MarshalJSON returns the JSON representation of the stored errors.

type HTTPClient

type HTTPClient interface {
	// Do makes HTTP requests.
	Do(c *Colibri, rules *Rules) (Response, error)

	// Clear cleans the fields of the structure.
	Clear()
}

HTTPClient represents an HTTP client.

type Parser

type Parser interface {
	// Match returns true if the Content-Type is compatible with the Parser.
	Match(contentType string) bool

	// Parse parses the response based on the rules.
	Parse(rules *Rules, resp Response) (map[string]any, error)

	// Clear cleans the fields of the structure.
	Clear()
}

Parser represents a parser of the response content.

type RawRules

type RawRules map[string]any

RawRules represents the raw rules.

type Response

type Response interface {
	// URL returns the URI of the request used to obtain the response.
	URL() *url.URL

	// StatusCode returns the status code.
	StatusCode() int

	// Header returns the HTTP header of the response.
	Header() http.Header

	// Body returns the response body.
	Body() io.ReadCloser

	// Do Colibri Do method wrapper.
	// Wraps the Colibri used to obtain the HTTP response.
	Do(rules *Rules) (Response, error)

	// Extract Colibri Extract method wrapper.
	// Wraps the Colibri used to obtain the HTTP response.
	Extract(rules *Rules) (Response, map[string]any, error)
}

Response represents an HTTP response.

type RobotsTxt

type RobotsTxt interface {
	// IsAllowed verifies that the User-Agent can access the URL.
	IsAllowed(c *Colibri, rules *Rules) error

	// Clear cleans the fields of the structure.
	Clear()
}

RobotsTxt represents a robots.txt parser.

type Rules

type Rules struct {
	// Method specifies the HTTP method (GET, POST, PUT, ...).
	Method string

	// URL specifies the requested URI.
	URL *url.URL

	// Proxy specifies the proxy URI.
	Proxy *url.URL

	// Header contains the HTTP header.
	Header http.Header

	// Timeout specifies the time limit for the HTTP request.
	Timeout time.Duration

	// UseCookies specifies whether the client should send and store Cookies.
	UseCookies bool

	// IgnoreRobotsTxt specifies whether robots.txt should be ignored.
	IgnoreRobotsTxt bool

	// Delay specifies the delay time between requests.
	Delay time.Duration

	// Selectors
	Selectors []*Selector

	// Fields stores additional data.
	Fields map[string]any
}

func NewRules

func NewRules(rawRules RawRules) (*Rules, error)

NewRules returns the rules processed using DefaultConvFunc.

func NewRulesWithConvFunc

func NewRulesWithConvFunc(rawRules RawRules, convFunc ConvFunc) (*Rules, error)

NewRulesWithConvFunc returns the processed rules.

func (*Rules) Clear

func (rules *Rules) Clear()

Clear clears all fields of the rules. Selectors are released, see ReleaseSelector.

func (*Rules) Clone

func (rules *Rules) Clone() *Rules

Clone returns a copy of the original rules. Cloning the Fields field may produce errors, avoid storing pointer.

func (*Rules) UnmarshalJSON

func (rules *Rules) UnmarshalJSON(b []byte) error

type Selector

type Selector struct {
	// Name selector name.
	Name string

	// Expr stores the selector expression.
	Expr string

	// Type stores the type of the selector expression.
	Type string

	// All specifies whether all elements are to be found.
	All bool

	// Follow specifies whether the URLs found by the selector should be followed.
	Follow bool

	// Selectors nested selectors.
	Selectors []*Selector

	// Fields stores additional data.
	Fields map[string]any
}

func CloneSelectors

func CloneSelectors(selectors []*Selector) []*Selector

CloneSelectors clones the selectors.

func (*Selector) Clear

func (selector *Selector) Clear()

Clear clears all fields of the selector. Selectors are released, see ReleaseSelector.

func (*Selector) Clone

func (selector *Selector) Clone() *Selector

Clone returns a copy of the original selector. Cloning the Fields field may produce errors, avoid storing pointer.

func (*Selector) Rules

func (selector *Selector) Rules(src *Rules) *Rules

Rules returns a Rules with the Selector data. Copies the nested selectors from the Selector and gets the rest of the data from Fields, if they are not in Fields it uses the data from the source Rules.

Directories

Path Synopsis
parsers are interfaces that Colibri can use to parse the content of the responses.
parsers are interfaces that Colibri can use to parse the content of the responses.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL