Documentation ¶
Overview ¶
Package parser defines the Parser interface and implementations for the different test types, NDT, Paris Traceroute, and SideStream.
Package parser defines the Parser interface and implementations for the different test types, NDT, Paris Traceroute, and SideStream.
Index ¶
- Constants
- Variables
- func CopyStructToMap(sourceStruct interface{}, destinationMap map[string]bigquery.Value)
- func CreateTestId(fn string, bn string) string
- func ExtractLogtimeFromFilename(fileName string) (time.Time, error)
- func GetLogtime(filename PTFileName) (time.Time, error)
- func NewAnnotationParser(sink row.Sink, label, suffix string, ann v2as.Annotator) etl.Parser
- func NewDiscoParser(ins etl.Inserter) etl.Parser
- func NewNDT5ResultParser(sink row.Sink, label, suffix string, ann v2as.Annotator) etl.Parser
- func NewNDT7ResultParser(sink row.Sink, table, suffix string, ann v2as.Annotator) etl.Parser
- func NewParser(dt etl.DataType, ins etl.Inserter) etl.Parser
- func NewSinkParser(dt etl.DataType, sink row.Sink, table string, ann api.Annotator) etl.Parser
- func PackDataIntoSchema(ssValue map[string]string, logTime time.Time, testName string) (schema.SS, error)
- func Parse(meta map[string]bigquery.Value, testName string, testId string, ...) (cachedPTData, error)
- func ParseFirstLine(oneLine string) (protocol string, destIP string, serverIP string, err error)
- func ParseJSON(testName string, rawContent []byte, tableName string, taskFilename string) (schema.PTTest, error)
- func ParseKHeader(header string) ([]string, error)
- func ParseOneLine(snapshot string, varNames []string) (map[string]string, error)
- func ParsePT(testName string, rawContent []byte, tableName string, taskFilename string) (schema.PTTest, error)
- func PopulateSnap(ssValue map[string]string) (schema.Web100Snap, error)
- func ProcessAllNodes(allNodes []Node, server_IP, protocol string, tableName string) []schema.ScamperHop
- func ProcessOneTuple(parts []string, protocol string, currentLeaves []Node, ...) error
- func Unique(oneNode Node, list []Node) bool
- func Version() string
- type AnnotationParser
- func (ap *AnnotationParser) Accepted() int
- func (ap *AnnotationParser) Committed() int
- func (ap *AnnotationParser) Failed() int
- func (ap *AnnotationParser) Flush() error
- func (ap *AnnotationParser) FullTableName() string
- func (ap *AnnotationParser) IsParsable(testName string, data []byte) (string, bool)
- func (ap *AnnotationParser) ParseAndInsert(meta map[string]bigquery.Value, testName string, test []byte) error
- func (ap *AnnotationParser) RowsInBuffer() int
- func (ap *AnnotationParser) TableName() string
- func (ap *AnnotationParser) TaskError() error
- type Base
- type CyclestartLine
- type CyclestopLine
- type DiscoParser
- func (dp *DiscoParser) Flush() error
- func (dp *DiscoParser) FullTableName() string
- func (dp *DiscoParser) IsParsable(testName string, data []byte) (string, bool)
- func (dp *DiscoParser) ParseAndInsert(meta map[string]bigquery.Value, testName string, test []byte) error
- func (dp *DiscoParser) TableName() string
- func (dp *DiscoParser) TaskError() error
- type FakeRowStats
- type MetaFileData
- type Metadata
- type NDT5ResultParser
- func (dp *NDT5ResultParser) Accepted() int
- func (dp *NDT5ResultParser) Committed() int
- func (dp *NDT5ResultParser) Failed() int
- func (dp *NDT5ResultParser) Flush() error
- func (dp *NDT5ResultParser) FullTableName() string
- func (dp *NDT5ResultParser) IsParsable(testName string, data []byte) (string, bool)
- func (dp *NDT5ResultParser) ParseAndInsert(meta map[string]bigquery.Value, testName string, test []byte) error
- func (dp *NDT5ResultParser) RowsInBuffer() int
- func (dp *NDT5ResultParser) TableName() string
- func (dp *NDT5ResultParser) TaskError() error
- type NDT7ResultParser
- func (dp *NDT7ResultParser) Accepted() int
- func (dp *NDT7ResultParser) Committed() int
- func (dp *NDT7ResultParser) Failed() int
- func (dp *NDT7ResultParser) Flush() error
- func (dp *NDT7ResultParser) FullTableName() string
- func (dp *NDT7ResultParser) IsParsable(testName string, data []byte) (string, bool)
- func (dp *NDT7ResultParser) ParseAndInsert(meta map[string]bigquery.Value, testName string, test []byte) error
- func (dp *NDT7ResultParser) RowsInBuffer() int
- func (dp *NDT7ResultParser) TableName() string
- func (dp *NDT7ResultParser) TaskError() error
- type NDTParser
- type NDTTest
- type Node
- type NullParser
- type PTFileName
- type PTParser
- func (pt *PTParser) Flush() error
- func (pt *PTParser) InsertOneTest(oneTest cachedPTData)
- func (pt *PTParser) IsParsable(testName string, data []byte) (string, bool)
- func (pt *PTParser) NumBufferedTests() int
- func (pt *PTParser) ParseAndInsert(meta map[string]bigquery.Value, testName string, rawContent []byte) error
- func (pt *PTParser) ProcessLastTests() error
- func (pt *PTParser) TableName() string
- func (pt *PTParser) TaskError() error
- type Probe
- type Reply
- type RowBuffer
- type SSParser
- type ScamperLink
- type ScamperNode
- type TCPInfoParser
- func (p *TCPInfoParser) Accepted() int
- func (p *TCPInfoParser) Committed() int
- func (p *TCPInfoParser) Failed() int
- func (p *TCPInfoParser) Flush() error
- func (p *TCPInfoParser) FullTableName() string
- func (p *TCPInfoParser) IsParsable(testName string, data []byte) (string, bool)
- func (p *TCPInfoParser) ParseAndInsert(fileMetadata map[string]bigquery.Value, testName string, rawContent []byte) error
- func (p *TCPInfoParser) RowsInBuffer() int
- func (p *TCPInfoParser) TableName() string
- func (p *TCPInfoParser) TaskError() error
- type TS
- type TestInfo
- type TracelbLine
Constants ¶
const ( // These are all caps to reflect the linux constant names. WC_ADDRTYPE_IPV4 = 1 WC_ADDRTYPE_IPV6 = 2 LOCAL_AF_IPV4 = 0 LOCAL_AF_IPV6 = 1 )
const ( CLIENT_TO_SERVER = 0 SERVER_TO_CLIENT = 1 )
const IPv4_AF int32 = 2
const IPv6_AF int32 = 10
const PTBufferSize int = 2
Variables ¶
var ( ErrAnnotationError = errors.New("Annotation error") ErrNotAnnotatable = errors.New("object does not implement Annotatable") ErrRowNotPointer = errors.New("Row should be a pointer type") )
Errors that may be returned by BaseRowBuffer functions.
var (
// NDTOmitDeltas flag indicates if deltas should be suppressed.
NDTOmitDeltas, _ = strconv.ParseBool(os.Getenv("NDT_OMIT_DELTAS"))
// NDTEstimateBW flag indicates if we should run BW estimation code
// and annotate rows.
NDTEstimateBW, _ = strconv.ParseBool(os.Getenv("NDT_ESTIMATE_BW"))
)
Functions ¶
func CopyStructToMap ¶
CopyStructToMap takes a POINTER to an arbitrary SIMPLE struct and copies it's fields into a value map. It will also make fields entirely lower case, for convienece when working with exported structs. Also, NEVER pass in something that is not a pointer to a struct, as this will cause a panic.
func CreateTestId ¶
func ExtractLogtimeFromFilename ¶
ExtractLogtimeFromFilename extracts the log time. legacy filename is like "20170203T00:00:00Z_ALL0.web100" The current filename is like "20170315T01:00:00Z_173.205.3.39_0.web100" Return time stamp if the filename is in right format
func GetLogtime ¶
func GetLogtime(filename PTFileName) (time.Time, error)
Return timestamp parsed from file name.
func NewAnnotationParser ¶
NewAnnotationParser creates a new parser for annotation data.
func NewNDT5ResultParser ¶
NewNDT5ResultParser returns a parser for NDT5Result archives.
func NewNDT7ResultParser ¶
NewNDT7ResultParser returns a parser for NDT7Result archives.
func NewParser ¶
NewParser creates an appropriate parser for a given data type. DEPRECATED - parsers should migrate to use NewSinkParser.
func NewSinkParser ¶
NewSinkParser creates an appropriate parser for a given data type. Eventually all datatypes will use this instead of NewParser.
func PackDataIntoSchema ¶
func PackDataIntoSchema(ssValue map[string]string, logTime time.Time, testName string) (schema.SS, error)
PackDataIntoSchema packs data into sidestream BigQeury schema and buffers it.
func Parse ¶
func Parse(meta map[string]bigquery.Value, testName string, testId string, rawContent []byte, tableName string) (cachedPTData, error)
Parse the raw test file into hops ParisTracerouteHop. TODO(dev): dedup the hops that are identical.
func ParseFirstLine ¶
Handle the first line, like "traceroute [(64.86.132.76:33461) -> (98.162.212.214:53849)], protocol icmp, algo exhaustive, duration 19 s"
func ParseJSON ¶
func ParseJSON(testName string, rawContent []byte, tableName string, taskFilename string) (schema.PTTest, error)
ParseJSON the raw jsonl test file into schema.PTTest.
func ParseKHeader ¶
ParseKHeader parses the first line of SS file, in format "K: cid PollTime LocalAddress LocalPort ... other_web100_variables_separated_by_space"
func ParseOneLine ¶
ParseOneLine parses a single line of sidestream data.
func ParsePT ¶
func ParsePT(testName string, rawContent []byte, tableName string, taskFilename string) (schema.PTTest, error)
ParsonPT the json test file into schema.PTTest
func PopulateSnap ¶
func PopulateSnap(ssValue map[string]string) (schema.Web100Snap, error)
PopulateSnap fills in the snapshot data.
func ProcessAllNodes ¶
func ProcessAllNodes(allNodes []Node, server_IP, protocol string, tableName string) []schema.ScamperHop
ProcessAllNodes take the array of the Nodes, and generate one ScamperHop entry from each node.
func ProcessOneTuple ¶
func ProcessOneTuple(parts []string, protocol string, currentLeaves []Node, allNodes, newLeaves *[]Node) error
For each 4 tuples, it is like: parts[0] is the hostname, like "if-ae-10-3.tcore2.DT8-Dallas.as6453.net". parts[1] is IP address like "(66.110.57.41)" or "(72.14.218.190):0,2,3,4,6,8,10" parts[2] are rtt in numbers like "0.298/0.318/0.340/0.016" parts[3] should always be "ms"
Types ¶
type AnnotationParser ¶
AnnotationParser parses the annotation datatype from the uuid-annotator.
func (*AnnotationParser) Accepted ¶
func (ap *AnnotationParser) Accepted() int
Accepted returns the count of all rows received through InsertRow(s)
func (*AnnotationParser) Committed ¶
func (ap *AnnotationParser) Committed() int
Committed returns the count of rows successfully committed to BQ.
func (*AnnotationParser) Failed ¶
func (ap *AnnotationParser) Failed() int
Failed returns the count of all rows that could not be committed.
func (*AnnotationParser) Flush ¶
func (ap *AnnotationParser) Flush() error
func (*AnnotationParser) FullTableName ¶
func (ap *AnnotationParser) FullTableName() string
func (*AnnotationParser) IsParsable ¶
func (ap *AnnotationParser) IsParsable(testName string, data []byte) (string, bool)
IsParsable returns the canonical test type and whether to parse data.
func (*AnnotationParser) ParseAndInsert ¶
func (ap *AnnotationParser) ParseAndInsert(meta map[string]bigquery.Value, testName string, test []byte) error
ParseAndInsert decodes the data.Annotation JSON and inserts it into BQ.
func (*AnnotationParser) RowsInBuffer ¶
func (ap *AnnotationParser) RowsInBuffer() int
RowsInBuffer returns the count of rows currently in the buffer.
func (*AnnotationParser) TableName ¶
func (ap *AnnotationParser) TableName() string
func (*AnnotationParser) TaskError ¶
func (ap *AnnotationParser) TaskError() error
TaskError returns non-nil if the task had enough failures to justify recording the entire task as in error. For now, this is any failure rate exceeding 10%.
type Base ¶
Base provides common parser functionality.
func NewBase ¶
NewBase creates a new parser.Base. This will generally be embedded in a type specific parser.
func (*Base) AnnotateAndFlush ¶
AnnotateAndFlush annotates the rows in the buffer, and synchronously pushes them through Inserter.
func (*Base) AnnotateAndPutAsync ¶
AnnotateAndPutAsync annotates the rows in the buffer (synchronously), and asynchronously pushes them to the Inserter.
type CyclestartLine ¶
type CyclestopLine ¶
type DiscoParser ¶
type DiscoParser struct { etl.RowStats // RowStats implemented for DiscoParser with an embedded struct. // contains filtered or unexported fields }
TODO(dev) add tests
func (*DiscoParser) Flush ¶
func (dp *DiscoParser) Flush() error
These functions are also required to complete the etl.Parser interface. For Disco, we just forward the calls to the Inserter.
func (*DiscoParser) FullTableName ¶
func (dp *DiscoParser) FullTableName() string
func (*DiscoParser) IsParsable ¶
func (dp *DiscoParser) IsParsable(testName string, data []byte) (string, bool)
IsParsable returns the canonical test type and whether to parse data.
func (*DiscoParser) ParseAndInsert ¶
func (dp *DiscoParser) ParseAndInsert(meta map[string]bigquery.Value, testName string, test []byte) error
Disco data a JSON representation that should be pushed directly into BigQuery. For now, though, we parse into a struct, for compatibility with current inserter backend.
Returns:
error on Decode error error on InsertRows error nil on success
TODO - optimize this to use the JSON directly, if possible.
func (*DiscoParser) TableName ¶
func (dp *DiscoParser) TableName() string
func (*DiscoParser) TaskError ¶
func (dp *DiscoParser) TaskError() error
type FakeRowStats ¶
type FakeRowStats struct { }
FakeRowStats provides trivial implementation of RowStats interface.
func (*FakeRowStats) Accepted ¶
func (s *FakeRowStats) Accepted() int
func (*FakeRowStats) Committed ¶
func (s *FakeRowStats) Committed() int
func (*FakeRowStats) Failed ¶
func (s *FakeRowStats) Failed() int
func (*FakeRowStats) RowsInBuffer ¶
func (s *FakeRowStats) RowsInBuffer() int
type MetaFileData ¶
type MetaFileData struct { TestName string DateTime time.Time SummaryData []int32 // Note: this is ignored in the legacy pipeline. Tls bool Websockets bool Fields map[string]string // All of the string fields. }
MetaFileData is the parsed info from the .meta file.
func ProcessMetaFile ¶
func ProcessMetaFile(tableName string, suffix string, testName string, content []byte) *MetaFileData
ProcessMetaFile parses the .meta file. TODO(dev) - add unit tests TODO(prod) - For tests that include a meta file, should respect the test filenames. See ndt_meta_log_parser_lib.cc
func (*MetaFileData) PopulateConnSpec ¶
func (mfd *MetaFileData) PopulateConnSpec(connSpec schema.Web100ValueMap)
type NDT5ResultParser ¶
NDT5ResultParser handles parsing of NDT5Result archives.
func (*NDT5ResultParser) Accepted ¶
func (dp *NDT5ResultParser) Accepted() int
Accepted returns the count of all rows received through InsertRow(s)
func (*NDT5ResultParser) Committed ¶
func (dp *NDT5ResultParser) Committed() int
Committed returns the count of rows successfully committed to BQ.
func (*NDT5ResultParser) Failed ¶
func (dp *NDT5ResultParser) Failed() int
Failed returns the count of all rows that could not be committed.
func (*NDT5ResultParser) Flush ¶
func (dp *NDT5ResultParser) Flush() error
func (*NDT5ResultParser) FullTableName ¶
func (dp *NDT5ResultParser) FullTableName() string
func (*NDT5ResultParser) IsParsable ¶
func (dp *NDT5ResultParser) IsParsable(testName string, data []byte) (string, bool)
IsParsable returns the canonical test type and whether to parse data.
func (*NDT5ResultParser) ParseAndInsert ¶
func (dp *NDT5ResultParser) ParseAndInsert(meta map[string]bigquery.Value, testName string, test []byte) error
ParseAndInsert decodes the data.NDT5Result JSON and inserts it into BQ.
func (*NDT5ResultParser) RowsInBuffer ¶
func (dp *NDT5ResultParser) RowsInBuffer() int
RowsInBuffer returns the count of rows currently in the buffer.
func (*NDT5ResultParser) TableName ¶
func (dp *NDT5ResultParser) TableName() string
func (*NDT5ResultParser) TaskError ¶
func (dp *NDT5ResultParser) TaskError() error
TaskError returns non-nil if the task had enough failures to justify recording the entire task as in error. For now, this is any failure rate exceeding 10%.
type NDT7ResultParser ¶
NDT7ResultParser handles parsing of NDT7Result archives.
func (*NDT7ResultParser) Accepted ¶
func (dp *NDT7ResultParser) Accepted() int
Accepted returns the count of all rows received through InsertRow(s)
func (*NDT7ResultParser) Committed ¶
func (dp *NDT7ResultParser) Committed() int
Committed returns the count of rows successfully committed to BQ.
func (*NDT7ResultParser) Failed ¶
func (dp *NDT7ResultParser) Failed() int
Failed returns the count of all rows that could not be committed.
func (*NDT7ResultParser) Flush ¶
func (dp *NDT7ResultParser) Flush() error
func (*NDT7ResultParser) FullTableName ¶
func (dp *NDT7ResultParser) FullTableName() string
func (*NDT7ResultParser) IsParsable ¶
func (dp *NDT7ResultParser) IsParsable(testName string, data []byte) (string, bool)
IsParsable returns the canonical test type and whether to parse data.
func (*NDT7ResultParser) ParseAndInsert ¶
func (dp *NDT7ResultParser) ParseAndInsert(meta map[string]bigquery.Value, testName string, test []byte) error
ParseAndInsert decodes the data.NDT7Result JSON and inserts it into BQ.
func (*NDT7ResultParser) RowsInBuffer ¶
func (dp *NDT7ResultParser) RowsInBuffer() int
RowsInBuffer returns the count of rows currently in the buffer.
func (*NDT7ResultParser) TableName ¶
func (dp *NDT7ResultParser) TableName() string
func (*NDT7ResultParser) TaskError ¶
func (dp *NDT7ResultParser) TaskError() error
TaskError returns non-nil if the task had enough failures to justify recording the entire task as in error. For now, this is any failure rate exceeding 10%.
type NDTParser ¶
type NDTParser struct { Base // contains filtered or unexported fields }
NDTParser implements the Parser interface for NDT.
func NewNDTParser ¶
NewNDTParser returns a new NDT parser. Caller may include an annotator. If not provided, the default annotator is used. TODO - clean up the vararg annotator hack once it is standard in all parsers.
func (*NDTParser) Flush ¶
Flush completes processing of final task group, if any, and flushes buffer to BigQuery.
func (*NDTParser) IsParsable ¶
IsParsable returns the canonical test type and whether to parse data.
func (*NDTParser) ParseAndInsert ¶
func (n *NDTParser) ParseAndInsert(taskInfo map[string]bigquery.Value, testName string, content []byte) error
ParseAndInsert extracts the last snaplog from the given raw snap log.
type NDTTest ¶
type NDTTest struct {
schema.Web100ValueMap
}
NDTTest is a wrapper for Web100ValueMap that implements Annotatable.
func (NDTTest) AnnotateClients ¶
func (ndt NDTTest) AnnotateClients(annMap map[string]*api.Annotations) error
AnnotateClients adds the client annotations. See parser.Annotatable This is a bit ugly because of the use of bigquery Value maps.
func (NDTTest) AnnotateServer ¶
func (ndt NDTTest) AnnotateServer(local *api.Annotations) error
AnnotateServer adds the server annotations. See parser.Annotatable This is a bit ugly because of the use of bigquery Value maps.
func (NDTTest) GetClientIPs ¶
GetClientIPs returns the client (remote) IP for annotation. See parser.Annotatable This is a bit ugly because of the use of bigquery Value maps.
func (NDTTest) GetLogTime ¶
GetLogTime returns the timestamp that should be used for annotation.
func (NDTTest) GetServerIP ¶
GetServerIP returns the server (local) IP for annotation. See parser.Annotatable This is a bit ugly because of the use of bigquery Value maps.
type NullParser ¶
type NullParser struct {
FakeRowStats
}
func (*NullParser) ParseAndInsert ¶
func (*NullParser) TableName ¶
func (np *NullParser) TableName() string
func (*NullParser) TaskError ¶
func (np *NullParser) TaskError() error
type PTFileName ¶
type PTFileName struct {
Name string
}
------------------------------------------------- The following are struct and funcs shared by legacy parsing and Json parsing. -------------------------------------------------
func (*PTFileName) GetDate ¶
func (f *PTFileName) GetDate() (string, bool)
type PTParser ¶
type PTParser struct { Base // contains filtered or unexported fields }
func (*PTParser) InsertOneTest ¶
func (pt *PTParser) InsertOneTest(oneTest cachedPTData)
func (*PTParser) IsParsable ¶
IsParsable returns the canonical test type and whether to parse data.
func (*PTParser) NumBufferedTests ¶
func (*PTParser) ParseAndInsert ¶
func (pt *PTParser) ParseAndInsert(meta map[string]bigquery.Value, testName string, rawContent []byte) error
ParseAndInsert parses a paris-traceroute log file and inserts results into a single row.
func (*PTParser) ProcessLastTests ¶
Insert last several tests in previousTests
type RowBuffer ¶
type RowBuffer struct {
// contains filtered or unexported fields
}
RowBuffer provides all basic functionality generally needed for buffering, annotating, and inserting rows that implement Annotatable.
func (*RowBuffer) AddRow ¶
AddRow simply inserts a row into the buffer. Returns error if buffer is full. Not thread-safe. Should only be called by owning thread.
func (*RowBuffer) Annotate ¶
Annotate fetches annotations for all rows in the buffer. Not thread-safe. Should only be called by owning thread. TODO should convert this to operate on the rows, instead of the buffer. Then we can do it after TakeRows().
func (*RowBuffer) NumRowsForTest ¶
NumRowsForTest allows tests to find number of rows in buffer.
type SSParser ¶
type SSParser struct {
Base
}
SSParser provides a parser implementation for SideStream data.
func NewDefaultSSParser ¶
TODO get rid of this hack.
func NewSSParser ¶
NewSSParser creates a new sidestream parser.
func (*SSParser) IsParsable ¶
IsParsable returns the canonical test type and whether to parse data.
type ScamperLink ¶
type ScamperNode ¶
type ScamperNode struct { Addr string `json:"addr"` Name string `json:"name"` Q_ttl int `json:"q_ttl"` Linkc int64 `json:"linkc"` Links [][]ScamperLink `json:"links"` }
type TCPInfoParser ¶
TCPInfoParser handles parsing for TCPINFO datatype.
func NewTCPInfoParser ¶
NewTCPInfoParser creates a new TCPInfoParser. Duh. Annotator may be optionally passed in, or will be created if nil.
func (*TCPInfoParser) Accepted ¶
func (p *TCPInfoParser) Accepted() int
Accepted returns the count of all rows received through InsertRow(s)
func (*TCPInfoParser) Committed ¶
func (p *TCPInfoParser) Committed() int
Committed returns the count of rows successfully committed to BQ.
func (*TCPInfoParser) Failed ¶
func (p *TCPInfoParser) Failed() int
Failed returns the count of all rows that could not be committed.
func (*TCPInfoParser) Flush ¶
func (p *TCPInfoParser) Flush() error
Flush synchronously flushes any pending rows.
func (*TCPInfoParser) FullTableName ¶
func (p *TCPInfoParser) FullTableName() string
FullTableName implements etl.Parser.FullTableName
func (*TCPInfoParser) IsParsable ¶
func (p *TCPInfoParser) IsParsable(testName string, data []byte) (string, bool)
IsParsable returns the canonical test type and whether to parse data.
func (*TCPInfoParser) ParseAndInsert ¶
func (p *TCPInfoParser) ParseAndInsert(fileMetadata map[string]bigquery.Value, testName string, rawContent []byte) error
ParseAndInsert extracts all ArchivalRecords from the rawContent and inserts into a single row. Approximately 15 usec/snapshot.
func (*TCPInfoParser) RowsInBuffer ¶
func (p *TCPInfoParser) RowsInBuffer() int
RowsInBuffer returns the count of rows currently in the buffer.
func (*TCPInfoParser) TableName ¶
func (p *TCPInfoParser) TableName() string
TableName implements etl.Parser.TableName
func (*TCPInfoParser) TaskError ¶
func (p *TCPInfoParser) TaskError() error
TaskError return the task level error, based on failed rows, or any other criteria. TaskError returns non-nil if more than 10% of row commits failed.
type TestInfo ¶
type TestInfo struct { DateDir string // Optional leading date yyyy/mm/dd/ Date string // The date field from the test file name Time string // The time field Address string // The remote address field Suffix string // The filename suffix Timestamp time.Time // The parsed timestamp, with microsecond resolution }
TestInfo contains all the fields from a valid NDT test file name.
func ParseNDTFileName ¶
ParseNDTFileName parses the name of a tar or tgz file containing NDT test data.
type TracelbLine ¶
type TracelbLine struct { Type string `json:"type"` Version string `json:"version"` Userid float64 `json:"userid"` Method string `json:"method"` Src string `json:"src"` Dst string `json:"dst"` Start TS `json:"start"` Probe_size float64 `json:"probe_size"` Firsthop float64 `json:"firsthop"` Attempts float64 `json:"attempts"` Confidence float64 `json:"confidence"` Tos float64 `json:"tos"` Gaplint float64 `json:"gaplint"` Wait_timeout float64 `json:"wait_timeout"` Wait_probe float64 `json:"wait_probe"` Probec float64 `json:"probec"` Probec_max float64 `json:"probec_max"` Nodec float64 `json:"nodec"` Linkc float64 `json:"linkc"` Nodes []ScamperNode `json:"nodes"` }