models

package
v0.0.18 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Mar 20, 2024 License: AGPL-3.0 Imports: 21 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func ArchiveFromHashDoc

func ArchiveFromHashDoc(doc *HashDoc, rs RemoteStorage) (*exporter.MyArchive, error)

ArchiveFromHashDoc reads an archive

func FileHashAndSize

func FileHashAndSize(file string) ([]byte, int64, error)

func FromTime added in v0.0.12

func FromTime(t time.Time) string

func Hash

func Hash(r io.Reader) (string, int64, error)

func HashEntries

func HashEntries(entries []*HashEntry) (string, error)

func ToTime added in v0.0.12

func ToTime(timeStamp string) (t time.Time, err error)

Types

type ContentFile added in v0.0.12

type ContentFile struct {
	DummyDocument  bool          `json:"dummyDocument"`
	ExtraMetadata  ExtraMetadata `json:"extraMetadata"`
	FileType       string        `json:"fileType"`
	FontName       string        `json:"fontName"`
	LastOpenedPage int           `json:"lastOpenedPage"`
	LineHeight     int           `json:"lineHeight"`
	Margins        int           `json:"margins"`
	Orientation    string        `json:"orientation"`
	PageCount      int           `json:"pageCount"`
	Pages          []interface{} `json:"pages"`
	TextScale      int           `json:"textScale"`
	Transform      Transform     `json:"transform"`
	SizeInBytes    string        `json:"sizeInBytes"`
}

type ExtraMetadata added in v0.0.12

type ExtraMetadata struct {
	LastPen             string `json:"LastPen"`
	LastTool            string `json:"LastTool"`
	ThicknessScale      string `json:"ThicknessScale"`
	LastFinelinerv2Size string `json:"LastFinelinerv2Size"`
}

type FieldReader

type FieldReader struct {
	// contains filtered or unexported fields
}

FieldReader iterates over delimted fields

func NewFieldReader

func NewFieldReader(line string) FieldReader

NewFieldReader reader from string line

func (*FieldReader) HasNext

func (fr *FieldReader) HasNext() bool

HasNext are there more fields

func (*FieldReader) Next

func (fr *FieldReader) Next() (string, error)

Next read the next field

type HashDoc

type HashDoc struct {
	Files []*HashEntry
	HashEntry

	//extra fields that are serialized
	MetadataFile
	PayloadType string
	PayloadSize int64
}

HashDoc a document in a hash tree

func NewHashDoc

func NewHashDoc(name, documentID string, docType common.EntryType) *HashDoc

func NewHashDocWithMeta added in v0.0.12

func NewHashDocWithMeta(documentID string, meta MetadataFile) *HashDoc

func (*HashDoc) AddFile

func (d *HashDoc) AddFile(e *HashEntry) error

AddFile adds an entry

func (*HashDoc) IndexReader

func (d *HashDoc) IndexReader() (io.ReadCloser, error)

IndexReader reader of the document index

func (*HashDoc) Line

func (d *HashDoc) Line() string

Line index line

func (*HashDoc) MetadataReader

func (d *HashDoc) MetadataReader() (hash string, reader io.Reader, err error)

func (*HashDoc) Mirror

func (d *HashDoc) Mirror(e *HashEntry, r RemoteStorage) error

Mirror mirror on the wall

func (*HashDoc) ReadMetadata

func (d *HashDoc) ReadMetadata(fileEntry *HashEntry, r RemoteStorage) error

ReadMetadata tries to read the metadata blob if this entry is metadata

func (*HashDoc) Rehash

func (d *HashDoc) Rehash() error

Rehash re-calculates the hash

type HashEntry

type HashEntry struct {
	Hash      string
	Type      string
	EntryName string
	Subfiles  int
	Size      int64
}

HashEntry an entry in a doc (.content, .meta the payload etc) with a hash

func NewHashEntry added in v0.0.12

func NewHashEntry(hash, documentID string, size int64) *HashEntry

NewHashEntry blah

func (*HashEntry) IsContent added in v0.0.12

func (h *HashEntry) IsContent() bool

func (*HashEntry) IsMetadata added in v0.0.12

func (h *HashEntry) IsMetadata() bool

IsMetadata if this entry points to a metadata blob

func (*HashEntry) Line

func (h *HashEntry) Line() string

Line a line in the index file

type HashTree

type HashTree struct {
	Hash       string
	Generation int64
	Docs       []*HashDoc
}

HashTree a syncing concept for faster diffing

func BuildTree

func BuildTree(provider RemoteStorage) (*HashTree, error)

BuildTree from remote storage

func LoadTree

func LoadTree(cacheFile string) (*HashTree, error)

LoadTree loads a cached tree to avoid parsing all the blobs

func (*HashTree) Add

func (t *HashTree) Add(d *HashDoc) error

Add adds a doc to the tree

func (*HashTree) FindDoc

func (t *HashTree) FindDoc(documentID string) (*HashDoc, error)

FindDoc finds a document by its name

func (*HashTree) Mirror

func (t *HashTree) Mirror(r RemoteStorage) (changed bool, err error)

Mirror makes the tree look like the storage

func (*HashTree) Rehash

func (t *HashTree) Rehash() error

Rehash recalcualte the root hash from all docs

func (*HashTree) Remove

func (t *HashTree) Remove(documentID string) error

Remove removes

func (*HashTree) RootIndex

func (t *HashTree) RootIndex() (io.ReadCloser, error)

RootIndex reads the root index

func (*HashTree) Save

func (t *HashTree) Save(cacheFile string) error

Save saves

type MetadataFile

type MetadataFile struct {
	DocumentName     string           `json:"visibleName"`
	CollectionType   common.EntryType `json:"type"`
	Parent           string           `json:"parent"`
	LastModified     string           `json:"lastModified"`
	LastOpened       string           `json:"lastOpened"`
	Version          int              `json:"version"`
	Pinned           bool             `json:"pinned"`
	Synced           bool             `json:"synced"`
	Modified         bool             `json:"modified"`
	Deleted          bool             `json:"deleted"`
	MetadataModified bool             `json:"metadatamodified"`
}

MetadataFile content

type RemoteStorage

type RemoteStorage interface {
	// GetRootIndex returns the rootIndex
	GetRootIndex() (hash string, generation int64, err error)

	// GetReader returns a reader for the the blob with that hash
	GetReader(hash string) (io.ReadCloser, error)
}

RemoteStorage abstraction for the blob storage

type RemoteStorageWriter

type RemoteStorageWriter interface {
	WriteRootIndex(generation int64, hash string) (gen int64, err error)
	Write(hash string, reader io.Reader) error
}

RemoteStorageWriter write abstraction

type RootHistory added in v0.0.14

type RootHistory struct {
	Generation int64
	Date       time.Time
	Hash       string
}

func ReadRootHistory added in v0.0.14

func ReadRootHistory(filename string) (history []*RootHistory, err error)

func (*RootHistory) GetHashTree added in v0.0.14

func (h *RootHistory) GetHashTree(r RemoteStorage) (t *HashTree, err error)

func (*RootHistory) OpenIndex added in v0.0.14

func (h *RootHistory) OpenIndex(r RemoteStorage) (io.ReadCloser, error)

type Transform added in v0.0.12

type Transform struct {
	M11 int `json:"m11"`
	M12 int `json:"m12"`
	M13 int `json:"m13"`
	M21 int `json:"m21"`
	M22 int `json:"m22"`
	M23 int `json:"m23"`
	M31 int `json:"m31"`
	M32 int `json:"m32"`
	M33 int `json:"m33"`
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL