doddish

package
v0.1.10 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: May 14, 2026 License: MIT Imports: 14 Imported by: 0

Documentation

Index

Constants

View Source
const (
	OpOr            = Op(',')
	OpAnd           = Op(' ')
	OpGroupOpen     = Op('[')
	OpGroupClose    = Op(']')
	OpNegation      = Op('^')
	OpExact         = Op('=')
	OpNewline       = Op('\n')
	OpSigilLatest   = Op(':')
	OpSigilHistory  = Op('+')
	OpSigilExternal = Op('.')
	OpSigilHidden   = Op('?')
	OpPathSeparator = Op('/')
	OpType          = Op('!')
	OpVirtual       = Op('%')
	OpMarklId       = Op('@')
	OpTagSeparator  = Op('-')
	OpDescription   = Op('#') // not really an operator?
	OpReference     = Op('<')
)
View Source
const (
	TokenTypeIncomplete = TokenType(iota)
	TokenTypeOperator   // " =,.:+?^[]"
	TokenTypeIdentifier // ["one", "uno", "tag", "one", "type", "/browser/bookmark-1", "sha"...]
	TokenTypeLiteral    // ["\"some text\"", "\"some text \\\" with escape\""]
)

Variables

View Source
var (
	ErrEmptySeq       = newPkgError("empty seq")
	ErrMoreThanOneSeq = newPkgError("more than one seq")
)
View Source
var (
	// @abcd
	TokenMatcherBlobDigest = TokensMatcher{
		TokenMatcherOp('@'),
		TokenTypeIdentifier,
	}

	// !key
	TokenMatcherType = TokensMatcher{
		TokenMatcherOp('!'),
		TokenTypeIdentifier,
	}

	// !key@abcd
	TokenMatcherTypeLock = TokensMatcher{
		TokenMatcherOp('!'),
		TokenTypeIdentifier,
		TokenMatcherOp('@'),
		TokenTypeIdentifier,
	}

	// key@abcd
	TokenMatcherDodderTag = TokensMatcher{
		TokenTypeIdentifier,
		TokenMatcherOp('@'),
		TokenTypeIdentifier,
	}

	// key=value
	TokenMatcherKeyValue = TokensMatcher{
		TokenTypeIdentifier,
		TokenMatcherOp(OpExact),
	}

	// key="value"
	TokenMatcherKeyValueLiteral = TokensMatcher{
		TokenTypeIdentifier,
		TokenMatcherOp(OpExact),
		TokenTypeLiteral,
	}

	TokenMatcherTai = TokensMatcher{
		TokenTypeIdentifier,
		TokenMatcherOp('.'),
		TokenTypeIdentifier,
	}

	// <ref@sig
	TokenMatcherReferencedObject = TokensMatcher{
		TokenMatcherOp('<'),
		TokenTypeIdentifier,
		TokenMatcherOp('@'),
		TokenTypeIdentifier,
	}

	// alias<ref@sig
	TokenMatcherReferencedObjectAlias = TokensMatcher{
		TokenTypeIdentifier,
		TokenMatcherOp('<'),
		TokenTypeIdentifier,
		TokenMatcherOp('@'),
		TokenTypeIdentifier,
	}

	// <@digest
	TokenMatcherBlobReference = TokensMatcher{
		TokenMatcherOp('<'),
		TokenMatcherOp('@'),
		TokenTypeIdentifier,
	}

	// alias<@digest
	TokenMatcherBlobReferenceAlias = TokensMatcher{
		TokenTypeIdentifier,
		TokenMatcherOp('<'),
		TokenMatcherOp('@'),
		TokenTypeIdentifier,
	}
)

Functions

func IsErrEmptySeq

func IsErrEmptySeq(err error) bool

func IsErrUnsupportedSeq

func IsErrUnsupportedSeq(err error) bool

func ScanExactlyOneSeqWithDotAllowedInIdenfierFromString

func ScanExactlyOneSeqWithDotAllowedInIdenfierFromString(
	value string,
) (seq Seq, err Error)

func SeqCompare

func SeqCompare(left, right Seq) cmp.Result

func SeqComparePartial

func SeqComparePartial(left, right Seq) cmp.Result

func SeqsCompare

func SeqsCompare(left, right []Seq) cmp.Result

func TokenMatcherOr

func TokenMatcherOr(tm ...TokenMatcher) tokenMatcherOr

Types

type ComparableSeq

type ComparableSeq struct {
	Tokens         []Token
	TokenIndex     int
	TokenByteIndex int
	ByteCount      int
}

func (ComparableSeq) DecodeRune

func (seq ComparableSeq) DecodeRune() (char rune, width int)

func (ComparableSeq) Len

func (seq ComparableSeq) Len() int

func (ComparableSeq) Shift

func (seq ComparableSeq) Shift(amount int) ComparableSeq

type ComparableSeqs

type ComparableSeqs struct {
	Seqs       []Seq
	SeqIndex   int
	CurrentSeq ComparableSeq
	ByteCount  int
}

func GetComparableSeqs

func GetComparableSeqs(seqs []Seq) ComparableSeqs

func (ComparableSeqs) DecodeRune

func (seqs ComparableSeqs) DecodeRune() (char rune, width int)

func (ComparableSeqs) Len

func (seqs ComparableSeqs) Len() int

func (ComparableSeqs) Shift

func (seqs ComparableSeqs) Shift(amount int) ComparableSeqs

type ErrUnsupportedSeq

type ErrUnsupportedSeq struct {
	For string
	Seq
}

func (ErrUnsupportedSeq) Error

func (err ErrUnsupportedSeq) Error() string

func (ErrUnsupportedSeq) GetErrorType

func (err ErrUnsupportedSeq) GetErrorType() pkgErrDisamb

func (ErrUnsupportedSeq) Is

func (err ErrUnsupportedSeq) Is(target error) bool

type Error

type Error = errors.Typed[pkgErrDisamb]

type Op

type Op byte

func MakeOp

func MakeOp(char rune) (Op, operatorType)

func (Op) GetType

func (op Op) GetType() operatorType

func (Op) ToByte

func (op Op) ToByte() byte

func (Op) ToBytes

func (op Op) ToBytes() []byte

func (Op) ToRune

func (op Op) ToRune() rune

type Scanner

type Scanner struct {
	RuneScanner io.RuneScanner
	// contains filtered or unexported fields
}

func MakeScanner

func MakeScanner(runeScanner io.RuneScanner) *Scanner

func (*Scanner) CanScan

func (scanner *Scanner) CanScan() (ok bool)

func (*Scanner) ConsumeSpacesOrErrorOnFalse

func (scanner *Scanner) ConsumeSpacesOrErrorOnFalse() (ok bool)

Consumes any spaces currently available in the underlying RuneReader. If this returns false, it means that a read error has occurred, not that no spaces were consumed.

func (*Scanner) Error

func (scanner *Scanner) Error() error

func (*Scanner) GetSeq

func (scanner *Scanner) GetSeq() Seq

Valid only until the next call to any scan method. To keep the sequence, make a clone of it by calling Seq.Clone()

func (*Scanner) N

func (scanner *Scanner) N() int64

func (*Scanner) ReadRune

func (scanner *Scanner) ReadRune() (char rune, n int, err error)

func (*Scanner) Reset

func (scanner *Scanner) Reset(runeScanner io.RuneScanner)

func (*Scanner) Scan

func (scanner *Scanner) Scan() (ok bool)

TODO https://github.com/amarbel-llc/dodder/issues/31 Create sequence-returning methods for Scan family

func (*Scanner) ScanDotAllowedInIdentifiers

func (scanner *Scanner) ScanDotAllowedInIdentifiers() (ok bool)

func (*Scanner) ScanDotAllowedInIdentifiersOrError

func (scanner *Scanner) ScanDotAllowedInIdentifiersOrError() (Seq, Error)

func (*Scanner) ScanSkipSpace

func (scanner *Scanner) ScanSkipSpace() (ok bool)

func (*Scanner) UnreadRune

func (scanner *Scanner) UnreadRune() (err error)

TODO https://github.com/amarbel-llc/dodder/issues/31 Remove UnreadRune entirely, replace with different buffering strategy

func (*Scanner) Unscan

func (scanner *Scanner) Unscan()

type Seq

func (*Seq) Add

func (seq *Seq) Add(tokenType TokenType, contents []byte)

func (Seq) At

func (seq Seq) At(idx int) Token

func (Seq) Clone

func (seq Seq) Clone() (dst Seq)

func (Seq) GetBinaryMarshaler

func (seq Seq) GetBinaryMarshaler() SeqBinaryCoding

func (*Seq) GetBinaryUnmarshaler

func (seq *Seq) GetBinaryUnmarshaler() *SeqBinaryCoding

func (Seq) GetComparable

func (seq Seq) GetComparable() ComparableSeq

func (*Seq) GetSlice

func (seq *Seq) GetSlice() collections_slice.Slice[Token]

func (*Seq) GetSliceMutable

func (seq *Seq) GetSliceMutable() *collections_slice.Slice[Token]

func (Seq) GetTokenTypes

func (seq Seq) GetTokenTypes() TokenTypes

func (Seq) Len

func (seq Seq) Len() int

func (Seq) MatchAll

func (seq Seq) MatchAll(tokens ...TokenMatcher) bool

func (Seq) MatchEnd

func (seq Seq) MatchEnd(tokens ...TokenMatcher) (ok bool, left, right Seq)

func (Seq) MatchStart

func (seq Seq) MatchStart(tokens ...TokenMatcher) bool

func (Seq) PartitionFavoringLeft

func (seq Seq) PartitionFavoringLeft(
	m TokenMatcher,
) (ok bool, left, right Seq, partition Token)

func (Seq) PartitionFavoringRight

func (seq Seq) PartitionFavoringRight(
	m TokenMatcher,
) (ok bool, left, right Seq, partition Token)

func (*Seq) Reset

func (seq *Seq) Reset()

func (Seq) String

func (seq Seq) String() string

func (Seq) StringDebug

func (seq Seq) StringDebug() string

type SeqBinaryCoding

type SeqBinaryCoding Seq

func (SeqBinaryCoding) AppendBinary

func (marshaler SeqBinaryCoding) AppendBinary(bites []byte) ([]byte, error)

func (SeqBinaryCoding) MarshalBinary

func (marshaler SeqBinaryCoding) MarshalBinary() (bites []byte, err error)

func (SeqBinaryCoding) ToSeq

func (marshaler SeqBinaryCoding) ToSeq() Seq

func (*SeqBinaryCoding) ToSeqMutable

func (marshaler *SeqBinaryCoding) ToSeqMutable() *Seq

func (*SeqBinaryCoding) UnmarshalBinary

func (marshaler *SeqBinaryCoding) UnmarshalBinary(bites []byte) (err error)

type SeqRuneScanner

type SeqRuneScanner struct {
	Seq
	// contains filtered or unexported fields
}

func (*SeqRuneScanner) IsFull

func (scanner *SeqRuneScanner) IsFull() bool

func (*SeqRuneScanner) ReadRune

func (scanner *SeqRuneScanner) ReadRune() (r rune, size int, err error)

func (*SeqRuneScanner) Reset

func (scanner *SeqRuneScanner) Reset()

func (*SeqRuneScanner) UnreadRune

func (scanner *SeqRuneScanner) UnreadRune() (err error)

type Token

type Token struct {
	Type     TokenType
	Contents []byte
}

func (Token) AppendBinary

func (token Token) AppendBinary(bites []byte) ([]byte, error)

TODO remove support for empty tokens

func (Token) Clone

func (token Token) Clone() (dst Token)

func (Token) GetBinaryByteCount

func (token Token) GetBinaryByteCount() int

func (Token) MarshalBinary

func (token Token) MarshalBinary() ([]byte, error)

func (Token) String

func (token Token) String() string

func (*Token) UnmarshalBinary

func (token *Token) UnmarshalBinary(bites []byte) (err error)

TODO remove support for empty tokens

type TokenMatcher

type TokenMatcher interface {
	Match(Token) bool
}

type TokenMatcherOp

type TokenMatcherOp byte

func (TokenMatcherOp) Match

func (tokenMatcher TokenMatcherOp) Match(token Token) bool

type TokenType

type TokenType byte

func (TokenType) Match

func (expected TokenType) Match(actual Token) bool

func (TokenType) String

func (i TokenType) String() string

type TokenTypes

type TokenTypes []