Add database models and functions for file objects
This commit adds the DB models and functions needed to create, read, search through, update and delete files within sqlite3. Signed-off-by: Gabriel Adrian Samfira <gsamfira@cloudbasesolutions.com>
This commit is contained in:
parent
76f538ffb3
commit
7d53ce79b7
37 changed files with 3089 additions and 5 deletions
|
|
@ -16,6 +16,7 @@ package common
|
|||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
|
||||
commonParams "github.com/cloudbase/garm-provider-common/params"
|
||||
"github.com/cloudbase/garm/params"
|
||||
|
|
@ -178,6 +179,16 @@ type TemplateStore interface {
|
|||
DeleteTemplate(ctx context.Context, id uint) (err error)
|
||||
}
|
||||
|
||||
type FileObjectStore interface {
|
||||
ListFileObjects(ctx context.Context, page, pageSize uint64) (params.FileObjectPaginatedResponse, error)
|
||||
SearchFileObjectByTags(ctx context.Context, tags []string, page, pageSize uint64) (params.FileObjectPaginatedResponse, error)
|
||||
GetFileObject(ctx context.Context, objID uint) (params.FileObject, error)
|
||||
CreateFileObject(ctx context.Context, name string, size int64, tags []string, reader io.Reader) (params.FileObject, error)
|
||||
UpdateFileObject(ctx context.Context, objID uint, param params.UpdateFileObjectParams) (params.FileObject, error)
|
||||
DeleteFileObject(ctx context.Context, objID uint) error
|
||||
OpenFileObjectContent(ctx context.Context, objID uint) (io.ReadCloser, error)
|
||||
}
|
||||
|
||||
//go:generate go run github.com/vektra/mockery/v2@latest
|
||||
type Store interface {
|
||||
RepoStore
|
||||
|
|
@ -196,6 +207,7 @@ type Store interface {
|
|||
GiteaEndpointStore
|
||||
GiteaCredentialsStore
|
||||
TemplateStore
|
||||
FileObjectStore
|
||||
|
||||
ControllerInfo() (params.ControllerInfo, error)
|
||||
InitController() (params.ControllerInfo, error)
|
||||
|
|
|
|||
|
|
@ -36,6 +36,7 @@ const (
|
|||
GithubEndpointEntityType DatabaseEntityType = "github_endpoint"
|
||||
ScaleSetEntityType DatabaseEntityType = "scaleset"
|
||||
TemplateEntityType DatabaseEntityType = "template"
|
||||
FileObjectEntityType DatabaseEntityType = "file_object"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
|||
370
database/sql/file_store.go
Normal file
370
database/sql/file_store.go
Normal file
|
|
@ -0,0 +1,370 @@
|
|||
package sql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"database/sql"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
runnerErrors "github.com/cloudbase/garm-provider-common/errors"
|
||||
"github.com/cloudbase/garm/database/common"
|
||||
"github.com/cloudbase/garm/params"
|
||||
"github.com/cloudbase/garm/util"
|
||||
"github.com/mattn/go-sqlite3"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
func (s *sqlDatabase) CreateFileObject(ctx context.Context, name string, size int64, tags []string, reader io.Reader) (fileObjParam params.FileObject, err error) {
|
||||
// Read first 8KB for type detection
|
||||
buffer := make([]byte, 8192)
|
||||
n, _ := io.ReadFull(reader, buffer)
|
||||
fileType := util.DetectFileType(buffer[:n])
|
||||
// Create document with pre-allocated blob
|
||||
fileObj := FileObject{
|
||||
Name: name,
|
||||
FileType: fileType,
|
||||
Size: size,
|
||||
Content: make([]byte, size),
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err == nil {
|
||||
s.sendNotify(common.FileObjectEntityType, common.CreateOperation, fileObjParam)
|
||||
}
|
||||
}()
|
||||
|
||||
if err := s.conn.Create(&fileObj).Error; err != nil {
|
||||
return params.FileObject{}, fmt.Errorf("failed to create file object: %w", err)
|
||||
}
|
||||
|
||||
// Stream file to blob and compute SHA256
|
||||
conn, err := s.sqlDB.Conn(ctx)
|
||||
if err != nil {
|
||||
return params.FileObject{}, fmt.Errorf("failed to get connection from pool: %w", err)
|
||||
}
|
||||
defer conn.Close()
|
||||
|
||||
var sha256sum string
|
||||
err = conn.Raw(func(driverConn any) error {
|
||||
sqliteConn := driverConn.(*sqlite3.SQLiteConn)
|
||||
|
||||
blob, err := sqliteConn.Blob("main", fileObj.TableName(), "content", int64(fileObj.ID), 1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer blob.Close()
|
||||
|
||||
// Create SHA256 hasher
|
||||
hasher := sha256.New()
|
||||
|
||||
// Write the buffered data first
|
||||
if _, err := blob.Write(buffer[:n]); err != nil {
|
||||
return err
|
||||
}
|
||||
hasher.Write(buffer[:n])
|
||||
|
||||
// Stream the rest with hash computation
|
||||
_, err = io.Copy(io.MultiWriter(blob, hasher), reader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Get final hash
|
||||
sha256sum = hex.EncodeToString(hasher.Sum(nil))
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return params.FileObject{}, fmt.Errorf("failed to write blob: %w", err)
|
||||
}
|
||||
|
||||
// Update document with SHA256
|
||||
if err := s.conn.Model(&fileObj).Update("sha256", sha256sum).Error; err != nil {
|
||||
return params.FileObject{}, fmt.Errorf("failed to update sha256sum: %w", err)
|
||||
}
|
||||
|
||||
// Create tag entries
|
||||
for _, tag := range tags {
|
||||
fileObjTag := FileObjectTag{
|
||||
FileObjectID: fileObj.ID,
|
||||
Tag: tag,
|
||||
}
|
||||
if err := s.conn.Create(&fileObjTag).Error; err != nil {
|
||||
return params.FileObject{}, fmt.Errorf("failed to add tag: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Reload document with tags
|
||||
if err := s.conn.Preload("TagsList").Omit("content").First(&fileObj, fileObj.ID).Error; err != nil {
|
||||
return params.FileObject{}, fmt.Errorf("failed to get file object: %w", err)
|
||||
}
|
||||
return s.sqlFileObjectToCommonParams(fileObj), nil
|
||||
}
|
||||
|
||||
func (s *sqlDatabase) UpdateFileObject(ctx context.Context, objID uint, param params.UpdateFileObjectParams) (fileObjParam params.FileObject, err error) {
|
||||
if err := param.Validate(); err != nil {
|
||||
return params.FileObject{}, fmt.Errorf("failed to validate update params: %w", err)
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err == nil {
|
||||
s.sendNotify(common.FileObjectEntityType, common.UpdateOperation, fileObjParam)
|
||||
}
|
||||
}()
|
||||
|
||||
var fileObj FileObject
|
||||
err = s.conn.Transaction(func(tx *gorm.DB) error {
|
||||
if err := tx.Where("id = ?", objID).Omit("content").First(&fileObj).Error; err != nil {
|
||||
if errors.Is(err, gorm.ErrRecordNotFound) {
|
||||
return runnerErrors.NewNotFoundError("could not find file object with ID: %d", objID)
|
||||
}
|
||||
return fmt.Errorf("error trying to find file object: %w", err)
|
||||
}
|
||||
|
||||
// Update name if provided
|
||||
if param.Name != nil {
|
||||
fileObj.Name = *param.Name
|
||||
}
|
||||
|
||||
// Update tags if provided
|
||||
if param.Tags != nil {
|
||||
// Delete existing tags
|
||||
if err := tx.Where("file_object_id = ?", objID).Delete(&FileObjectTag{}).Error; err != nil {
|
||||
return fmt.Errorf("failed to delete existing tags: %w", err)
|
||||
}
|
||||
|
||||
// Create new tags
|
||||
for _, tag := range param.Tags {
|
||||
fileObjTag := FileObjectTag{
|
||||
FileObjectID: fileObj.ID,
|
||||
Tag: tag,
|
||||
}
|
||||
if err := tx.Create(&fileObjTag).Error; err != nil {
|
||||
return fmt.Errorf("failed to add tag: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Save the updated file object
|
||||
if err := tx.Omit("content").Save(&fileObj).Error; err != nil {
|
||||
return fmt.Errorf("failed to update file object: %w", err)
|
||||
}
|
||||
|
||||
// Reload with tags
|
||||
if err := tx.Preload("TagsList").Omit("content").First(&fileObj, objID).Error; err != nil {
|
||||
return fmt.Errorf("failed to reload file object: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return params.FileObject{}, err
|
||||
}
|
||||
|
||||
return s.sqlFileObjectToCommonParams(fileObj), nil
|
||||
}
|
||||
|
||||
func (s *sqlDatabase) DeleteFileObject(ctx context.Context, objID uint) (err error) {
|
||||
var fileObjParam params.FileObject
|
||||
var noop bool
|
||||
defer func() {
|
||||
if err == nil && !noop {
|
||||
s.sendNotify(common.FileObjectEntityType, common.DeleteOperation, fileObjParam)
|
||||
}
|
||||
}()
|
||||
|
||||
var fileObj FileObject
|
||||
err = s.conn.Transaction(func(tx *gorm.DB) error {
|
||||
if err := tx.Where("id = ?", objID).Omit("content").First(&fileObj).Error; err != nil {
|
||||
if errors.Is(err, gorm.ErrRecordNotFound) {
|
||||
return runnerErrors.ErrNotFound
|
||||
}
|
||||
return fmt.Errorf("failed to find file obj: %w", err)
|
||||
}
|
||||
if q := tx.Unscoped().Where("id = ?", objID).Delete(&FileObject{}); q.Error != nil {
|
||||
if errors.Is(q.Error, gorm.ErrRecordNotFound) {
|
||||
return runnerErrors.ErrNotFound
|
||||
}
|
||||
return fmt.Errorf("error deleting file object: %w", q.Error)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
if errors.Is(err, runnerErrors.ErrNotFound) {
|
||||
noop = true
|
||||
return nil
|
||||
}
|
||||
return fmt.Errorf("failed to delete file object: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *sqlDatabase) GetFileObject(ctx context.Context, objID uint) (params.FileObject, error) {
|
||||
var fileObj FileObject
|
||||
if err := s.conn.Preload("TagsList").Where("id = ?", objID).Omit("content").First(&fileObj).Error; err != nil {
|
||||
if errors.Is(err, gorm.ErrRecordNotFound) {
|
||||
return params.FileObject{}, runnerErrors.NewNotFoundError("could not find file object with ID: %d", objID)
|
||||
}
|
||||
return params.FileObject{}, fmt.Errorf("error trying to find file object: %w", err)
|
||||
}
|
||||
return s.sqlFileObjectToCommonParams(fileObj), nil
|
||||
}
|
||||
|
||||
func (s *sqlDatabase) SearchFileObjectByTags(ctx context.Context, tags []string, page, pageSize uint64) (params.FileObjectPaginatedResponse, error) {
|
||||
if page == 0 {
|
||||
page = 1
|
||||
}
|
||||
if pageSize == 0 {
|
||||
pageSize = 20
|
||||
}
|
||||
|
||||
var fileObjectRes []FileObject
|
||||
query := s.conn.Model(&FileObject{}).Preload("TagsList").Omit("content")
|
||||
for _, t := range tags {
|
||||
query = query.Where("EXISTS (SELECT 1 FROM file_object_tags WHERE file_object_tags.file_object_id = file_objects.id AND file_object_tags.tag = ?)", t)
|
||||
}
|
||||
|
||||
var total int64
|
||||
if err := query.Count(&total).Error; err != nil {
|
||||
return params.FileObjectPaginatedResponse{}, fmt.Errorf("failed to count results: %w", err)
|
||||
}
|
||||
|
||||
totalPages := uint64(0)
|
||||
if total > 0 {
|
||||
totalPages = (uint64(total) + pageSize - 1) / pageSize
|
||||
}
|
||||
|
||||
offset := (page - 1) * pageSize
|
||||
|
||||
if err := query.
|
||||
Limit(int(pageSize)).
|
||||
Offset(int(offset)).
|
||||
Order("created_at DESC").
|
||||
Omit("content").
|
||||
Find(&fileObjectRes).Error; err != nil {
|
||||
return params.FileObjectPaginatedResponse{}, fmt.Errorf("failed to query database: %w", err)
|
||||
}
|
||||
|
||||
ret := make([]params.FileObject, len(fileObjectRes))
|
||||
for idx, val := range fileObjectRes {
|
||||
ret[idx] = s.sqlFileObjectToCommonParams(val)
|
||||
}
|
||||
|
||||
return params.FileObjectPaginatedResponse{
|
||||
Pages: totalPages,
|
||||
CurrentPage: page,
|
||||
Results: ret,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// OpenFileObjectContent opens a blob for reading and returns an io.ReadCloser
|
||||
func (s *sqlDatabase) OpenFileObjectContent(ctx context.Context, objID uint) (io.ReadCloser, error) {
|
||||
conn, err := s.sqlDB.Conn(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get connection: %w", err)
|
||||
}
|
||||
|
||||
var blobReader io.ReadCloser
|
||||
err = conn.Raw(func(driverConn any) error {
|
||||
sqliteConn := driverConn.(*sqlite3.SQLiteConn)
|
||||
|
||||
blob, err := sqliteConn.Blob("main", (FileObject{}).TableName(), "content", int64(objID), 0)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to open blob: %w", err)
|
||||
}
|
||||
|
||||
// Wrap blob and connection so both are closed when reader is closed
|
||||
blobReader = &blobReadCloser{
|
||||
blob: blob,
|
||||
conn: conn,
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
conn.Close()
|
||||
return nil, fmt.Errorf("failed to open blob for reading: %w", err)
|
||||
}
|
||||
|
||||
return blobReader, nil
|
||||
}
|
||||
|
||||
// blobReadCloser wraps both the blob and connection for proper cleanup
|
||||
type blobReadCloser struct {
|
||||
blob io.ReadCloser
|
||||
conn *sql.Conn
|
||||
}
|
||||
|
||||
func (b *blobReadCloser) Read(p []byte) (n int, err error) {
|
||||
return b.blob.Read(p)
|
||||
}
|
||||
|
||||
func (b *blobReadCloser) Close() error {
|
||||
blobErr := b.blob.Close()
|
||||
connErr := b.conn.Close()
|
||||
if blobErr != nil {
|
||||
return blobErr
|
||||
}
|
||||
return connErr
|
||||
}
|
||||
|
||||
func (s *sqlDatabase) ListFileObjects(ctx context.Context, page, pageSize uint64) (params.FileObjectPaginatedResponse, error) {
|
||||
if page == 0 {
|
||||
page = 1
|
||||
}
|
||||
if pageSize == 0 {
|
||||
pageSize = 20
|
||||
}
|
||||
|
||||
var total int64
|
||||
if err := s.conn.Model(&FileObject{}).Count(&total).Error; err != nil {
|
||||
return params.FileObjectPaginatedResponse{}, fmt.Errorf("failed to count file objects: %w", err)
|
||||
}
|
||||
|
||||
totalPages := uint64(0)
|
||||
if total > 0 {
|
||||
totalPages = (uint64(total) + pageSize - 1) / pageSize
|
||||
}
|
||||
|
||||
offset := (page - 1) * pageSize
|
||||
var fileObjs []FileObject
|
||||
if err := s.conn.Preload("TagsList").Omit("content").
|
||||
Limit(int(pageSize)).
|
||||
Offset(int(offset)).
|
||||
Order("created_at DESC").
|
||||
Find(&fileObjs).Error; err != nil {
|
||||
return params.FileObjectPaginatedResponse{}, fmt.Errorf("failed to list file objects: %w", err)
|
||||
}
|
||||
|
||||
results := make([]params.FileObject, len(fileObjs))
|
||||
for i, obj := range fileObjs {
|
||||
results[i] = s.sqlFileObjectToCommonParams(obj)
|
||||
}
|
||||
|
||||
return params.FileObjectPaginatedResponse{
|
||||
Pages: totalPages,
|
||||
CurrentPage: page,
|
||||
Results: results,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (s *sqlDatabase) sqlFileObjectToCommonParams(obj FileObject) params.FileObject {
|
||||
tags := make([]string, len(obj.TagsList))
|
||||
for idx, val := range obj.TagsList {
|
||||
tags[idx] = val.Tag
|
||||
}
|
||||
return params.FileObject{
|
||||
ID: obj.ID,
|
||||
CreatedAt: obj.CreatedAt,
|
||||
UpdatedAt: obj.UpdatedAt,
|
||||
Name: obj.Name,
|
||||
Size: obj.Size,
|
||||
FileType: obj.FileType,
|
||||
SHA256: obj.SHA256,
|
||||
Tags: tags,
|
||||
}
|
||||
}
|
||||
596
database/sql/file_store_test.go
Normal file
596
database/sql/file_store_test.go
Normal file
|
|
@ -0,0 +1,596 @@
|
|||
// Copyright 2025 Cloudbase Solutions SRL
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package sql
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
runnerErrors "github.com/cloudbase/garm-provider-common/errors"
|
||||
dbCommon "github.com/cloudbase/garm/database/common"
|
||||
"github.com/cloudbase/garm/database/watcher"
|
||||
garmTesting "github.com/cloudbase/garm/internal/testing"
|
||||
"github.com/cloudbase/garm/params"
|
||||
)
|
||||
|
||||
type FileStoreTestFixtures struct {
|
||||
FileObjects []params.FileObject
|
||||
}
|
||||
|
||||
type FileStoreTestSuite struct {
|
||||
suite.Suite
|
||||
Store dbCommon.Store
|
||||
ctx context.Context
|
||||
adminCtx context.Context
|
||||
Fixtures *FileStoreTestFixtures
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TearDownTest() {
|
||||
watcher.CloseWatcher()
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) SetupTest() {
|
||||
ctx := context.Background()
|
||||
watcher.InitWatcher(ctx)
|
||||
|
||||
db, err := NewSQLDatabase(context.Background(), garmTesting.GetTestSqliteDBConfig(s.T()))
|
||||
if err != nil {
|
||||
s.FailNow(fmt.Sprintf("failed to create db connection: %s", err))
|
||||
}
|
||||
s.Store = db
|
||||
|
||||
adminCtx := garmTesting.ImpersonateAdminContext(context.Background(), db, s.T())
|
||||
s.adminCtx = adminCtx
|
||||
s.ctx = adminCtx
|
||||
|
||||
// Create test file objects
|
||||
fileObjects := []params.FileObject{}
|
||||
|
||||
// File 1: Small text file with tags
|
||||
content1 := []byte("Hello, World! This is test file 1.")
|
||||
fileObj1, err := s.Store.CreateFileObject(s.ctx, "test-file-1.txt", int64(len(content1)), []string{"test", "text"}, bytes.NewReader(content1))
|
||||
if err != nil {
|
||||
s.FailNow(fmt.Sprintf("failed to create test file 1: %s", err))
|
||||
}
|
||||
fileObjects = append(fileObjects, fileObj1)
|
||||
|
||||
// File 2: Binary-like content with different tags
|
||||
content2 := []byte{0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A, 0x00} // PNG header-like
|
||||
fileObj2, err := s.Store.CreateFileObject(s.ctx, "test-image.png", int64(len(content2)), []string{"image", "binary"}, bytes.NewReader(content2))
|
||||
if err != nil {
|
||||
s.FailNow(fmt.Sprintf("failed to create test file 2: %s", err))
|
||||
}
|
||||
fileObjects = append(fileObjects, fileObj2)
|
||||
|
||||
// File 3: No tags
|
||||
content3 := []byte("File without tags.")
|
||||
fileObj3, err := s.Store.CreateFileObject(s.ctx, "no-tags.txt", int64(len(content3)), []string{}, bytes.NewReader(content3))
|
||||
if err != nil {
|
||||
s.FailNow(fmt.Sprintf("failed to create test file 3: %s", err))
|
||||
}
|
||||
fileObjects = append(fileObjects, fileObj3)
|
||||
|
||||
s.Fixtures = &FileStoreTestFixtures{
|
||||
FileObjects: fileObjects,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestCreateFileObject() {
|
||||
content := []byte("New test file content")
|
||||
tags := []string{"new", "test"}
|
||||
|
||||
fileObj, err := s.Store.CreateFileObject(s.ctx, "new-file.txt", int64(len(content)), tags, bytes.NewReader(content))
|
||||
s.Require().Nil(err)
|
||||
s.Require().NotZero(fileObj.ID)
|
||||
s.Require().Equal("new-file.txt", fileObj.Name)
|
||||
s.Require().Equal(int64(len(content)), fileObj.Size)
|
||||
s.Require().ElementsMatch(tags, fileObj.Tags)
|
||||
s.Require().NotEmpty(fileObj.SHA256)
|
||||
s.Require().NotEmpty(fileObj.FileType)
|
||||
|
||||
// Verify SHA256 is correct
|
||||
expectedHash := sha256.Sum256(content)
|
||||
expectedHashStr := hex.EncodeToString(expectedHash[:])
|
||||
s.Require().Equal(expectedHashStr, fileObj.SHA256)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestCreateFileObjectEmpty() {
|
||||
content := []byte{}
|
||||
fileObj, err := s.Store.CreateFileObject(s.ctx, "empty-file.txt", 0, []string{}, bytes.NewReader(content))
|
||||
s.Require().Nil(err)
|
||||
s.Require().NotZero(fileObj.ID)
|
||||
s.Require().Equal("empty-file.txt", fileObj.Name)
|
||||
s.Require().Equal(int64(0), fileObj.Size)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestGetFileObject() {
|
||||
fileObj, err := s.Store.GetFileObject(s.ctx, s.Fixtures.FileObjects[0].ID)
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(s.Fixtures.FileObjects[0].ID, fileObj.ID)
|
||||
s.Require().Equal(s.Fixtures.FileObjects[0].Name, fileObj.Name)
|
||||
s.Require().Equal(s.Fixtures.FileObjects[0].Size, fileObj.Size)
|
||||
s.Require().Equal(s.Fixtures.FileObjects[0].SHA256, fileObj.SHA256)
|
||||
s.Require().ElementsMatch(s.Fixtures.FileObjects[0].Tags, fileObj.Tags)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestGetFileObjectNotFound() {
|
||||
_, err := s.Store.GetFileObject(s.ctx, 99999)
|
||||
s.Require().NotNil(err)
|
||||
s.Require().ErrorIs(err, runnerErrors.ErrNotFound)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestOpenFileObjectContent() {
|
||||
// Create a file with known content
|
||||
content := []byte("Test content for reading")
|
||||
fileObj, err := s.Store.CreateFileObject(s.ctx, "read-test.txt", int64(len(content)), []string{"read"}, bytes.NewReader(content))
|
||||
s.Require().Nil(err)
|
||||
|
||||
// Open and read the content
|
||||
reader, err := s.Store.OpenFileObjectContent(s.ctx, fileObj.ID)
|
||||
s.Require().Nil(err)
|
||||
s.Require().NotNil(reader)
|
||||
defer reader.Close()
|
||||
|
||||
readContent, err := io.ReadAll(reader)
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(content, readContent)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestOpenFileObjectContentNotFound() {
|
||||
_, err := s.Store.OpenFileObjectContent(s.ctx, 99999)
|
||||
s.Require().NotNil(err)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestListFileObjects() {
|
||||
result, err := s.Store.ListFileObjects(s.ctx, 1, 10)
|
||||
s.Require().Nil(err)
|
||||
s.Require().GreaterOrEqual(len(result.Results), len(s.Fixtures.FileObjects))
|
||||
s.Require().Equal(uint64(1), result.CurrentPage)
|
||||
s.Require().GreaterOrEqual(result.Pages, uint64(1))
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestListFileObjectsPagination() {
|
||||
// Create more files to test pagination
|
||||
for i := 0; i < 5; i++ {
|
||||
content := []byte(fmt.Sprintf("File %d", i))
|
||||
_, err := s.Store.CreateFileObject(s.ctx, fmt.Sprintf("page-test-%d.txt", i), int64(len(content)), []string{"pagination"}, bytes.NewReader(content))
|
||||
s.Require().Nil(err)
|
||||
}
|
||||
|
||||
// Test first page with page size of 2
|
||||
page1, err := s.Store.ListFileObjects(s.ctx, 1, 2)
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(2, len(page1.Results))
|
||||
s.Require().Equal(uint64(1), page1.CurrentPage)
|
||||
|
||||
// Test second page
|
||||
page2, err := s.Store.ListFileObjects(s.ctx, 2, 2)
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(2, len(page2.Results))
|
||||
s.Require().Equal(uint64(2), page2.CurrentPage)
|
||||
s.Require().Equal(page1.Pages, page2.Pages)
|
||||
|
||||
// Verify different results on different pages
|
||||
if len(page1.Results) > 0 && len(page2.Results) > 0 {
|
||||
page1File := page1.Results[0]
|
||||
page2File := page2.Results[0]
|
||||
s.Require().NotEqual(page1File.ID, page2File.ID)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestListFileObjectsDefaultPagination() {
|
||||
// Test default values (page 0 should become 1, pageSize 0 should become 20)
|
||||
result, err := s.Store.ListFileObjects(s.ctx, 0, 0)
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(uint64(1), result.CurrentPage)
|
||||
s.Require().LessOrEqual(len(result.Results), 20)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestUpdateFileObjectName() {
|
||||
newName := "updated-name.txt"
|
||||
updated, err := s.Store.UpdateFileObject(s.ctx, s.Fixtures.FileObjects[0].ID, params.UpdateFileObjectParams{
|
||||
Name: &newName,
|
||||
})
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(newName, updated.Name)
|
||||
s.Require().Equal(s.Fixtures.FileObjects[0].ID, updated.ID)
|
||||
s.Require().Equal(s.Fixtures.FileObjects[0].Size, updated.Size) // Size should not change
|
||||
s.Require().Equal(s.Fixtures.FileObjects[0].SHA256, updated.SHA256) // SHA256 should not change
|
||||
|
||||
// Verify the change persists
|
||||
retrieved, err := s.Store.GetFileObject(s.ctx, s.Fixtures.FileObjects[0].ID)
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(newName, retrieved.Name)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestUpdateFileObjectTags() {
|
||||
newTags := []string{"updated", "tags", "here"}
|
||||
updated, err := s.Store.UpdateFileObject(s.ctx, s.Fixtures.FileObjects[0].ID, params.UpdateFileObjectParams{
|
||||
Tags: newTags,
|
||||
})
|
||||
s.Require().Nil(err)
|
||||
s.Require().ElementsMatch(newTags, updated.Tags)
|
||||
s.Require().Equal(s.Fixtures.FileObjects[0].Name, updated.Name) // Name should not change
|
||||
|
||||
// Verify the change persists
|
||||
retrieved, err := s.Store.GetFileObject(s.ctx, s.Fixtures.FileObjects[0].ID)
|
||||
s.Require().Nil(err)
|
||||
s.Require().ElementsMatch(newTags, retrieved.Tags)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestUpdateFileObjectNameAndTags() {
|
||||
newName := "completely-updated.txt"
|
||||
newTags := []string{"both", "changed"}
|
||||
|
||||
updated, err := s.Store.UpdateFileObject(s.ctx, s.Fixtures.FileObjects[0].ID, params.UpdateFileObjectParams{
|
||||
Name: &newName,
|
||||
Tags: newTags,
|
||||
})
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(newName, updated.Name)
|
||||
s.Require().ElementsMatch(newTags, updated.Tags)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestUpdateFileObjectEmptyTags() {
|
||||
// Test clearing all tags
|
||||
emptyTags := []string{}
|
||||
updated, err := s.Store.UpdateFileObject(s.ctx, s.Fixtures.FileObjects[0].ID, params.UpdateFileObjectParams{
|
||||
Tags: emptyTags,
|
||||
})
|
||||
s.Require().Nil(err)
|
||||
s.Require().Empty(updated.Tags)
|
||||
|
||||
// Verify the change persists
|
||||
retrieved, err := s.Store.GetFileObject(s.ctx, s.Fixtures.FileObjects[0].ID)
|
||||
s.Require().Nil(err)
|
||||
s.Require().Empty(retrieved.Tags)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestUpdateFileObjectNoChanges() {
|
||||
// Update with no changes
|
||||
updated, err := s.Store.UpdateFileObject(s.ctx, s.Fixtures.FileObjects[0].ID, params.UpdateFileObjectParams{})
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(s.Fixtures.FileObjects[0].Name, updated.Name)
|
||||
s.Require().ElementsMatch(s.Fixtures.FileObjects[0].Tags, updated.Tags)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestUpdateFileObjectNotFound() {
|
||||
newName := "does-not-exist.txt"
|
||||
_, err := s.Store.UpdateFileObject(s.ctx, 99999, params.UpdateFileObjectParams{
|
||||
Name: &newName,
|
||||
})
|
||||
s.Require().NotNil(err)
|
||||
s.Require().ErrorIs(err, runnerErrors.ErrNotFound)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestUpdateFileObjectEmptyName() {
|
||||
emptyName := ""
|
||||
_, err := s.Store.UpdateFileObject(s.ctx, s.Fixtures.FileObjects[0].ID, params.UpdateFileObjectParams{
|
||||
Name: &emptyName,
|
||||
})
|
||||
s.Require().NotNil(err)
|
||||
s.Require().Contains(err.Error(), "name cannot be empty")
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestDeleteFileObject() {
|
||||
// Create a file to delete
|
||||
content := []byte("To be deleted")
|
||||
fileObj, err := s.Store.CreateFileObject(s.ctx, "delete-me.txt", int64(len(content)), []string{"delete"}, bytes.NewReader(content))
|
||||
s.Require().Nil(err)
|
||||
|
||||
// Delete the file
|
||||
err = s.Store.DeleteFileObject(s.ctx, fileObj.ID)
|
||||
s.Require().Nil(err)
|
||||
|
||||
// Verify it's deleted
|
||||
_, err = s.Store.GetFileObject(s.ctx, fileObj.ID)
|
||||
s.Require().NotNil(err)
|
||||
s.Require().ErrorIs(err, runnerErrors.ErrNotFound)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestDeleteFileObjectNotFound() {
|
||||
// Deleting non-existent file should not error
|
||||
err := s.Store.DeleteFileObject(s.ctx, 99999)
|
||||
s.Require().Nil(err)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestCreateFileObjectLargeContent() {
|
||||
// Test with larger content (1MB)
|
||||
size := 1024 * 1024
|
||||
content := make([]byte, size)
|
||||
for i := range content {
|
||||
content[i] = byte(i % 256)
|
||||
}
|
||||
|
||||
fileObj, err := s.Store.CreateFileObject(s.ctx, "large-file.bin", int64(size), []string{"large", "binary"}, bytes.NewReader(content))
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(int64(size), fileObj.Size)
|
||||
|
||||
// Verify we can read it back
|
||||
reader, err := s.Store.OpenFileObjectContent(s.ctx, fileObj.ID)
|
||||
s.Require().Nil(err)
|
||||
defer reader.Close()
|
||||
|
||||
readContent, err := io.ReadAll(reader)
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(content, readContent)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestFileObjectImmutableFields() {
|
||||
// Create a file
|
||||
content := []byte("Immutable test content")
|
||||
fileObj, err := s.Store.CreateFileObject(s.ctx, "immutable-test.txt", int64(len(content)), []string{"original"}, bytes.NewReader(content))
|
||||
s.Require().Nil(err)
|
||||
|
||||
originalSize := fileObj.Size
|
||||
originalSHA256 := fileObj.SHA256
|
||||
originalFileType := fileObj.FileType
|
||||
|
||||
// Update name and tags
|
||||
newName := "updated-immutable-test.txt"
|
||||
updated, err := s.Store.UpdateFileObject(s.ctx, fileObj.ID, params.UpdateFileObjectParams{
|
||||
Name: &newName,
|
||||
Tags: []string{"updated"},
|
||||
})
|
||||
s.Require().Nil(err)
|
||||
|
||||
// Verify immutable fields haven't changed
|
||||
s.Require().Equal(originalSize, updated.Size)
|
||||
s.Require().Equal(originalSHA256, updated.SHA256)
|
||||
s.Require().Equal(originalFileType, updated.FileType)
|
||||
|
||||
// Verify content hasn't changed
|
||||
reader, err := s.Store.OpenFileObjectContent(s.ctx, fileObj.ID)
|
||||
s.Require().Nil(err)
|
||||
defer reader.Close()
|
||||
|
||||
readContent, err := io.ReadAll(reader)
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(content, readContent)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestSearchFileObjectByTags() {
|
||||
// Create files with specific tags for searching
|
||||
content1 := []byte("File with tag1 and tag2")
|
||||
file1, err := s.Store.CreateFileObject(s.ctx, "search-file-1.txt", int64(len(content1)), []string{"tag1", "tag2"}, bytes.NewReader(content1))
|
||||
s.Require().Nil(err)
|
||||
|
||||
content2 := []byte("File with tag1, tag2, and tag3")
|
||||
file2, err := s.Store.CreateFileObject(s.ctx, "search-file-2.txt", int64(len(content2)), []string{"tag1", "tag2", "tag3"}, bytes.NewReader(content2))
|
||||
s.Require().Nil(err)
|
||||
|
||||
content3 := []byte("File with only tag1")
|
||||
file3, err := s.Store.CreateFileObject(s.ctx, "search-file-3.txt", int64(len(content3)), []string{"tag1"}, bytes.NewReader(content3))
|
||||
s.Require().Nil(err)
|
||||
|
||||
content4 := []byte("File with tag3 only")
|
||||
_, err = s.Store.CreateFileObject(s.ctx, "search-file-4.txt", int64(len(content4)), []string{"tag3"}, bytes.NewReader(content4))
|
||||
s.Require().Nil(err)
|
||||
|
||||
// Search for files with tag1 - should return 3 files
|
||||
result, err := s.Store.SearchFileObjectByTags(s.ctx, []string{"tag1"}, 1, 10)
|
||||
s.Require().Nil(err)
|
||||
s.Require().GreaterOrEqual(len(result.Results), 3)
|
||||
|
||||
// Verify the expected files are in the results
|
||||
foundIDs := make(map[uint]bool)
|
||||
for _, fileObj := range result.Results {
|
||||
foundIDs[fileObj.ID] = true
|
||||
}
|
||||
s.Require().True(foundIDs[file1.ID], "file1 should be in results")
|
||||
s.Require().True(foundIDs[file2.ID], "file2 should be in results")
|
||||
s.Require().True(foundIDs[file3.ID], "file3 should be in results")
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestSearchFileObjectByTagsMultipleTags() {
|
||||
// Create files with various tag combinations
|
||||
content1 := []byte("File with search1 and search2")
|
||||
file1, err := s.Store.CreateFileObject(s.ctx, "multi-search-1.txt", int64(len(content1)), []string{"search1", "search2"}, bytes.NewReader(content1))
|
||||
s.Require().Nil(err)
|
||||
|
||||
content2 := []byte("File with search1, search2, and search3")
|
||||
file2, err := s.Store.CreateFileObject(s.ctx, "multi-search-2.txt", int64(len(content2)), []string{"search1", "search2", "search3"}, bytes.NewReader(content2))
|
||||
s.Require().Nil(err)
|
||||
|
||||
content3 := []byte("File with only search1")
|
||||
_, err = s.Store.CreateFileObject(s.ctx, "multi-search-3.txt", int64(len(content3)), []string{"search1"}, bytes.NewReader(content3))
|
||||
s.Require().Nil(err)
|
||||
|
||||
// Search for files with both search1 AND search2 - should return only 2 files
|
||||
result, err := s.Store.SearchFileObjectByTags(s.ctx, []string{"search1", "search2"}, 1, 10)
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(2, len(result.Results))
|
||||
|
||||
// Verify the correct files are returned
|
||||
foundIDs := make(map[uint]bool)
|
||||
for _, fileObj := range result.Results {
|
||||
foundIDs[fileObj.ID] = true
|
||||
}
|
||||
s.Require().True(foundIDs[file1.ID], "file1 should be in results")
|
||||
s.Require().True(foundIDs[file2.ID], "file2 should be in results")
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestSearchFileObjectByTagsNoResults() {
|
||||
// Search for a tag that doesn't exist
|
||||
result, err := s.Store.SearchFileObjectByTags(s.ctx, []string{"nonexistent-tag"}, 1, 10)
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(0, len(result.Results))
|
||||
s.Require().Equal(uint64(0), result.Pages)
|
||||
s.Require().Equal(uint64(1), result.CurrentPage)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestSearchFileObjectByTagsEmptyTags() {
|
||||
// Search with empty tag list - should return all files
|
||||
result, err := s.Store.SearchFileObjectByTags(s.ctx, []string{}, 1, 100)
|
||||
s.Require().Nil(err)
|
||||
// Should return all files (fixtures + any created in other tests)
|
||||
s.Require().GreaterOrEqual(len(result.Results), len(s.Fixtures.FileObjects))
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestSearchFileObjectByTagsPagination() {
|
||||
// Create multiple files with the same tag
|
||||
for i := 0; i < 5; i++ {
|
||||
content := []byte(fmt.Sprintf("Pagination test file %d", i))
|
||||
_, err := s.Store.CreateFileObject(s.ctx, fmt.Sprintf("page-search-%d.txt", i), int64(len(content)), []string{"pagination-test"}, bytes.NewReader(content))
|
||||
s.Require().Nil(err)
|
||||
}
|
||||
|
||||
// Test first page with page size of 2
|
||||
page1, err := s.Store.SearchFileObjectByTags(s.ctx, []string{"pagination-test"}, 1, 2)
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(2, len(page1.Results))
|
||||
s.Require().Equal(uint64(1), page1.CurrentPage)
|
||||
s.Require().GreaterOrEqual(page1.Pages, uint64(3)) // At least 3 pages for 5 files
|
||||
|
||||
// Test second page
|
||||
page2, err := s.Store.SearchFileObjectByTags(s.ctx, []string{"pagination-test"}, 2, 2)
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(2, len(page2.Results))
|
||||
s.Require().Equal(uint64(2), page2.CurrentPage)
|
||||
|
||||
// Verify different results on different pages
|
||||
if len(page1.Results) > 0 && len(page2.Results) > 0 {
|
||||
page1File := page1.Results[0]
|
||||
page2File := page2.Results[0]
|
||||
s.Require().NotEqual(page1File.ID, page2File.ID)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestSearchFileObjectByTagsDefaultPagination() {
|
||||
// Create a file with a unique tag
|
||||
content := []byte("Default pagination test")
|
||||
_, err := s.Store.CreateFileObject(s.ctx, "default-page-search.txt", int64(len(content)), []string{"default-pagination"}, bytes.NewReader(content))
|
||||
s.Require().Nil(err)
|
||||
|
||||
// Test default values (page 0 should become 1, pageSize 0 should become 20)
|
||||
result, err := s.Store.SearchFileObjectByTags(s.ctx, []string{"default-pagination"}, 0, 0)
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(uint64(1), result.CurrentPage)
|
||||
s.Require().LessOrEqual(len(result.Results), 20)
|
||||
s.Require().GreaterOrEqual(len(result.Results), 1)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestSearchFileObjectByTagsAllTagsRequired() {
|
||||
// Test that search requires ALL specified tags (AND logic, not OR)
|
||||
content1 := []byte("Has A and B")
|
||||
file1, err := s.Store.CreateFileObject(s.ctx, "and-test-1.txt", int64(len(content1)), []string{"tagA", "tagB"}, bytes.NewReader(content1))
|
||||
s.Require().Nil(err)
|
||||
|
||||
content2 := []byte("Has A, B, and C")
|
||||
file2, err := s.Store.CreateFileObject(s.ctx, "and-test-2.txt", int64(len(content2)), []string{"tagA", "tagB", "tagC"}, bytes.NewReader(content2))
|
||||
s.Require().Nil(err)
|
||||
|
||||
content3 := []byte("Has only A")
|
||||
_, err = s.Store.CreateFileObject(s.ctx, "and-test-3.txt", int64(len(content3)), []string{"tagA"}, bytes.NewReader(content3))
|
||||
s.Require().Nil(err)
|
||||
|
||||
content4 := []byte("Has only B")
|
||||
_, err = s.Store.CreateFileObject(s.ctx, "and-test-4.txt", int64(len(content4)), []string{"tagB"}, bytes.NewReader(content4))
|
||||
s.Require().Nil(err)
|
||||
|
||||
// Search for files with BOTH tagA AND tagB
|
||||
result, err := s.Store.SearchFileObjectByTags(s.ctx, []string{"tagA", "tagB"}, 1, 10)
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(2, len(result.Results), "Should only return files with BOTH tags")
|
||||
|
||||
// Verify the correct files are returned
|
||||
foundIDs := make(map[uint]bool)
|
||||
for _, fileObj := range result.Results {
|
||||
foundIDs[fileObj.ID] = true
|
||||
// Verify each result has both tags
|
||||
s.Require().Contains(fileObj.Tags, "tagA")
|
||||
s.Require().Contains(fileObj.Tags, "tagB")
|
||||
}
|
||||
s.Require().True(foundIDs[file1.ID])
|
||||
s.Require().True(foundIDs[file2.ID])
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestSearchFileObjectByTagsCaseSensitive() {
|
||||
// Test case sensitivity of tag search
|
||||
content1 := []byte("File with lowercase tag")
|
||||
file1, err := s.Store.CreateFileObject(s.ctx, "case-test-1.txt", int64(len(content1)), []string{"lowercase"}, bytes.NewReader(content1))
|
||||
s.Require().Nil(err)
|
||||
|
||||
content2 := []byte("File with UPPERCASE tag")
|
||||
file2, err := s.Store.CreateFileObject(s.ctx, "case-test-2.txt", int64(len(content2)), []string{"UPPERCASE"}, bytes.NewReader(content2))
|
||||
s.Require().Nil(err)
|
||||
|
||||
// Search for lowercase - should only return file1
|
||||
result, err := s.Store.SearchFileObjectByTags(s.ctx, []string{"lowercase"}, 1, 10)
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(1, len(result.Results))
|
||||
s.Require().Equal(file1.ID, result.Results[0].ID)
|
||||
|
||||
// Search for UPPERCASE - should only return file2
|
||||
result, err = s.Store.SearchFileObjectByTags(s.ctx, []string{"UPPERCASE"}, 1, 10)
|
||||
s.Require().Nil(err)
|
||||
s.Require().Equal(1, len(result.Results))
|
||||
s.Require().Equal(file2.ID, result.Results[0].ID)
|
||||
}
|
||||
|
||||
func (s *FileStoreTestSuite) TestSearchFileObjectByTagsOrderByCreatedAt() {
|
||||
// Create files with same tag at different times to test ordering
|
||||
tag := "order-test"
|
||||
|
||||
content1 := []byte("First file")
|
||||
file1, err := s.Store.CreateFileObject(s.ctx, "order-1.txt", int64(len(content1)), []string{tag}, bytes.NewReader(content1))
|
||||
s.Require().Nil(err)
|
||||
|
||||
content2 := []byte("Second file")
|
||||
file2, err := s.Store.CreateFileObject(s.ctx, "order-2.txt", int64(len(content2)), []string{tag}, bytes.NewReader(content2))
|
||||
s.Require().Nil(err)
|
||||
|
||||
content3 := []byte("Third file")
|
||||
file3, err := s.Store.CreateFileObject(s.ctx, "order-3.txt", int64(len(content3)), []string{tag}, bytes.NewReader(content3))
|
||||
s.Require().Nil(err)
|
||||
|
||||
// Search and verify order (should be DESC by created_at, so newest first)
|
||||
result, err := s.Store.SearchFileObjectByTags(s.ctx, []string{tag}, 1, 10)
|
||||
s.Require().Nil(err)
|
||||
s.Require().GreaterOrEqual(len(result.Results), 3)
|
||||
|
||||
// The most recently created files should be first
|
||||
// We can at least verify that file3 comes before file1 in the results
|
||||
var file1Idx, file3Idx int
|
||||
for i, fileObj := range result.Results {
|
||||
if fileObj.ID == file1.ID {
|
||||
file1Idx = i
|
||||
}
|
||||
if fileObj.ID == file3.ID {
|
||||
file3Idx = i
|
||||
}
|
||||
}
|
||||
s.Require().Less(file3Idx, file1Idx, "Newer file (file3) should appear before older file (file1)")
|
||||
|
||||
// Also verify file2 comes before file1
|
||||
var file2Idx int
|
||||
for i, fileObj := range result.Results {
|
||||
if fileObj.ID == file2.ID {
|
||||
file2Idx = i
|
||||
}
|
||||
}
|
||||
s.Require().Less(file2Idx, file1Idx, "Newer file (file2) should appear before older file (file1)")
|
||||
}
|
||||
|
||||
func TestFileStoreTestSuite(t *testing.T) {
|
||||
suite.Run(t, new(FileStoreTestSuite))
|
||||
}
|
||||
|
|
@ -456,3 +456,38 @@ type GiteaCredentials struct {
|
|||
Repositories []Repository `gorm:"foreignKey:GiteaCredentialsID"`
|
||||
Organizations []Organization `gorm:"foreignKey:GiteaCredentialsID"`
|
||||
}
|
||||
|
||||
// FileObject represents the table that holds files. This can be used to store
|
||||
// GARM agent binaries, runner binary downloads that may be cached, etc.
|
||||
type FileObject struct {
|
||||
gorm.Model
|
||||
// Name is the name of the file
|
||||
Name string `gotm:"type:text,index:idx_fo_name"`
|
||||
// FileType holds the MIME type or file type description
|
||||
FileType string `gorm:"type:text"`
|
||||
// Size is the file size in bytes
|
||||
Size int64 `gorm:"type:integer"`
|
||||
// SHA256 is the sha256 checksum (hex encoded)
|
||||
SHA256 string `gorm:"type:text"`
|
||||
// Tags is a JSON array of tags
|
||||
TagsList []FileObjectTag `gorm:"foreignKey:FileObjectID;constraint:OnDelete:CASCADE"`
|
||||
// Content is a BLOB column for storing binary data
|
||||
Content []byte `gorm:"type:blob"`
|
||||
}
|
||||
|
||||
// TableName overrides the default table name
|
||||
func (FileObject) TableName() string {
|
||||
return "file_objects"
|
||||
}
|
||||
|
||||
// FileObjectTag represents the many-to-many relationship between documents and tags
|
||||
type FileObjectTag struct {
|
||||
ID uint `gorm:"primaryKey"`
|
||||
FileObjectID uint `gorm:"index:idx_fileobject_tags_doc_id,priority:1;index:idx_fileobject_tags_tag,priority:1;not null"`
|
||||
Tag string `gorm:"type:TEXT;index:idx_fileobject_tags_tag,priority:2;not null"`
|
||||
}
|
||||
|
||||
// TableName overrides the default table name
|
||||
func (FileObjectTag) TableName() string {
|
||||
return "file_object_tags"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ package sql
|
|||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
|
|
@ -84,8 +85,14 @@ func NewSQLDatabase(ctx context.Context, cfg config.Database) (common.Store, err
|
|||
if err != nil {
|
||||
return nil, fmt.Errorf("error registering producer: %w", err)
|
||||
}
|
||||
|
||||
sqlDB, err := conn.DB()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get underlying database connection: %w", err)
|
||||
}
|
||||
db := &sqlDatabase{
|
||||
conn: conn,
|
||||
sqlDB: sqlDB,
|
||||
ctx: ctx,
|
||||
cfg: cfg,
|
||||
producer: producer,
|
||||
|
|
@ -98,7 +105,9 @@ func NewSQLDatabase(ctx context.Context, cfg config.Database) (common.Store, err
|
|||
}
|
||||
|
||||
type sqlDatabase struct {
|
||||
conn *gorm.DB
|
||||
conn *gorm.DB
|
||||
sqlDB *sql.DB
|
||||
|
||||
ctx context.Context
|
||||
cfg config.Database
|
||||
producer common.Producer
|
||||
|
|
@ -607,6 +616,8 @@ func (s *sqlDatabase) migrateDB() error {
|
|||
&ControllerInfo{},
|
||||
&WorkflowJob{},
|
||||
&ScaleSet{},
|
||||
&FileObject{},
|
||||
&FileObjectTag{},
|
||||
); err != nil {
|
||||
return fmt.Errorf("error running auto migrate: %w", err)
|
||||
}
|
||||
|
|
|
|||
5
go.mod
5
go.mod
|
|
@ -18,8 +18,10 @@ require (
|
|||
github.com/gorilla/handlers v1.5.2
|
||||
github.com/gorilla/mux v1.8.1
|
||||
github.com/gorilla/websocket v1.5.4-0.20240702125206-a62d9d2a8413
|
||||
github.com/h2non/filetype v1.1.3
|
||||
github.com/jedib0t/go-pretty/v6 v6.6.8
|
||||
github.com/manifoldco/promptui v0.9.0
|
||||
github.com/mattn/go-sqlite3 v1.14.32
|
||||
github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354
|
||||
github.com/prometheus/client_golang v1.23.2
|
||||
github.com/rivo/tview v0.42.0
|
||||
|
|
@ -37,6 +39,8 @@ require (
|
|||
gorm.io/gorm v1.31.0
|
||||
)
|
||||
|
||||
replace github.com/mattn/go-sqlite3 => github.com/gabriel-samfira/go-sqlite3 v0.0.0-20251005121134-bc61ecf9b4c7
|
||||
|
||||
require (
|
||||
filippo.io/edwards25519 v1.1.0 // indirect
|
||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
|
||||
|
|
@ -74,7 +78,6 @@ require (
|
|||
github.com/lucasb-eyer/go-colorful v1.3.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.17 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.32 // indirect
|
||||
github.com/minio/sio v0.4.2 // indirect
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
|
|
|
|||
6
go.sum
6
go.sum
|
|
@ -27,6 +27,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
|
|||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
||||
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||
github.com/gabriel-samfira/go-sqlite3 v0.0.0-20251005121134-bc61ecf9b4c7 h1:+r9O7HrPI4OpkdFsZ9l5sjRD99KOl9uw4XpYJiP2HV4=
|
||||
github.com/gabriel-samfira/go-sqlite3 v0.0.0-20251005121134-bc61ecf9b4c7/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/gdamore/encoding v1.0.1 h1:YzKZckdBL6jVt2Gc+5p82qhrGiqMdG/eNs6Wy0u3Uhw=
|
||||
github.com/gdamore/encoding v1.0.1/go.mod h1:0Z0cMFinngz9kS1QfMjCP8TY7em3bZYeeklsSDPivEo=
|
||||
github.com/gdamore/tcell/v2 v2.9.0 h1:N6t+eqK7/xwtRPwxzs1PXeRWnm0H9l02CrgJ7DLn1ys=
|
||||
|
|
@ -107,6 +109,8 @@ github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
|||
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||
github.com/gorilla/websocket v1.5.4-0.20240702125206-a62d9d2a8413 h1:0Zn/h+BUQg6QHkybGvjFD7BnIbjjz3oWUObacn//1Go=
|
||||
github.com/gorilla/websocket v1.5.4-0.20240702125206-a62d9d2a8413/go.mod h1:r4w70xmWCQKmi1ONH4KIaBptdivuRPyosB9RmPlGEwA=
|
||||
github.com/h2non/filetype v1.1.3 h1:FKkx9QbD7HR/zjK1Ia5XiBsq9zdLi5Kf3zGyFTAFkGg=
|
||||
github.com/h2non/filetype v1.1.3/go.mod h1:319b3zT68BvV+WRj7cwy856M2ehB3HqNOt6sy1HndBY=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
||||
|
|
@ -139,8 +143,6 @@ github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWE
|
|||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-runewidth v0.0.17 h1:78v8ZlW0bP43XfmAfPsdXcoNCelfMHsDmd/pkENfrjQ=
|
||||
github.com/mattn/go-runewidth v0.0.17/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/mattn/go-sqlite3 v1.14.32 h1:JD12Ag3oLy1zQA+BNn74xRgaBbdhbNIDYvQUEuuErjs=
|
||||
github.com/mattn/go-sqlite3 v1.14.32/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/microsoft/go-mssqldb v1.7.2 h1:CHkFJiObW7ItKTJfHo1QX7QBBD1iV+mn1eOyRP3b/PA=
|
||||
github.com/microsoft/go-mssqldb v1.7.2/go.mod h1:kOvZKUdrhhFQmxLZqbwUV0rHkNkZpthMITIb2Ko1IoA=
|
||||
github.com/minio/sio v0.4.2 h1:+ayQoaniewWpKzz6b27F075b+q1HJajQr8ViG9KFZwA=
|
||||
|
|
|
|||
|
|
@ -1307,3 +1307,23 @@ type Template struct {
|
|||
// used by swagger client generated code
|
||||
// swagger:model Templates
|
||||
type Templates []Template
|
||||
|
||||
type FileObject struct {
|
||||
ID uint `json:"id"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
Name string `json:"name"`
|
||||
Size int64 `json:"size"`
|
||||
Tags []string `json:"tags"`
|
||||
SHA256 string `json:"sha256,omitempty"`
|
||||
FileType string `json:"file_type"`
|
||||
}
|
||||
|
||||
type PaginatedResponse[T any] struct {
|
||||
Pages uint64 `json:"pages"`
|
||||
CurrentPage uint64 `json:"current_page"`
|
||||
Results []T `json:"results"`
|
||||
}
|
||||
|
||||
// swagger:model FileObjectPaginatedResponse
|
||||
type FileObjectPaginatedResponse = PaginatedResponse[FileObject]
|
||||
|
|
|
|||
|
|
@ -866,3 +866,16 @@ func (u *UpdateTemplateParams) Validate() error {
|
|||
|
||||
return nil
|
||||
}
|
||||
|
||||
type UpdateFileObjectParams struct {
|
||||
Name *string `json:"name"`
|
||||
Tags []string `json:"tags"`
|
||||
}
|
||||
|
||||
func (u *UpdateFileObjectParams) Validate() error {
|
||||
if u.Name != nil && *u.Name == "" {
|
||||
return fmt.Errorf("name cannot be empty")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
22
util/util.go
22
util/util.go
|
|
@ -25,6 +25,8 @@ import (
|
|||
"github.com/cloudbase/garm-provider-common/cloudconfig"
|
||||
runnerErrors "github.com/cloudbase/garm-provider-common/errors"
|
||||
commonParams "github.com/cloudbase/garm-provider-common/params"
|
||||
"github.com/h2non/filetype"
|
||||
|
||||
"github.com/cloudbase/garm/internal/templates"
|
||||
"github.com/cloudbase/garm/runner/common"
|
||||
)
|
||||
|
|
@ -157,3 +159,23 @@ func MaybeAddWrapperToExtraSpecs(ctx context.Context, specs json.RawMessage, osT
|
|||
|
||||
return json.RawMessage(ret)
|
||||
}
|
||||
|
||||
// DetectFileType detects the MIME type from file content
|
||||
func DetectFileType(data []byte) string {
|
||||
// First, try http.DetectContentType (good for text files)
|
||||
httpType := http.DetectContentType(data)
|
||||
|
||||
// If http detected text, use that
|
||||
if httpType != "application/octet-stream" {
|
||||
return httpType
|
||||
}
|
||||
|
||||
// For binary files, use filetype library for specific format detection
|
||||
kind, err := filetype.Match(data)
|
||||
if err == nil && kind != filetype.Unknown {
|
||||
return kind.MIME.Value
|
||||
}
|
||||
|
||||
// Default to application/octet-stream for unknown types
|
||||
return "application/octet-stream"
|
||||
}
|
||||
|
|
|
|||
12
vendor/github.com/h2non/filetype/.editorconfig
generated
vendored
Normal file
12
vendor/github.com/h2non/filetype/.editorconfig
generated
vendored
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = tabs
|
||||
indent_size = 2
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
2
vendor/github.com/h2non/filetype/.gitignore
generated
vendored
Normal file
2
vendor/github.com/h2non/filetype/.gitignore
generated
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
bin
|
||||
.DS_Store
|
||||
16
vendor/github.com/h2non/filetype/.travis.yml
generated
vendored
Normal file
16
vendor/github.com/h2non/filetype/.travis.yml
generated
vendored
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
language: go
|
||||
arch:
|
||||
- AMD64
|
||||
- ppc64le
|
||||
go:
|
||||
- "1.13"
|
||||
- "1.14"
|
||||
|
||||
before_install:
|
||||
- go get -u -v golang.org/x/lint/golint
|
||||
|
||||
script:
|
||||
- diff -u <(echo -n) <(gofmt -s -d ./)
|
||||
- diff -u <(echo -n) <(go vet ./...)
|
||||
- diff -u <(echo -n) <(golint)
|
||||
- go test -v -race ./...
|
||||
163
vendor/github.com/h2non/filetype/History.md
generated
vendored
Normal file
163
vendor/github.com/h2non/filetype/History.md
generated
vendored
Normal file
|
|
@ -0,0 +1,163 @@
|
|||
|
||||
v1.0.3 / 2021-11-21
|
||||
===================
|
||||
|
||||
* fix(#108): add application file matchers
|
||||
* Merge pull request #106 from hannesbraun/aiff-support
|
||||
* Add AIFF support
|
||||
* fix(archive): format issue indentation
|
||||
* feat(version): bump patch
|
||||
* Merge pull request #100 from da2018/master
|
||||
* Enhance Zstd support
|
||||
* Merge pull request #98 from cfergeau/zstd
|
||||
* Add zstd support
|
||||
* Merge pull request #99 from cfergeau/byteprefixmatcher
|
||||
* Introduce bytePrefixMatcher helper
|
||||
|
||||
v1.1.0 / 2020-06-06
|
||||
===================
|
||||
|
||||
* feat: version bump v1.10
|
||||
* feat(ci): add go 1.14
|
||||
* Merge pull request #82 from andrewstucki/sqlite-update
|
||||
* Merge pull request #84 from evanoberholster/master
|
||||
* Better differentiation: between image/x-canon-cr2 and image/tiff
|
||||
* Merge pull request #1 from h2non/master
|
||||
* Update ico filetype per https://www.iana.org/assignments/media-types/image/vnd.microsoft.icon
|
||||
* Update rar filetype per https://www.iana.org/assignments/media-types/application/vnd.rar
|
||||
* Update exe filetype per https://www.iana.org/assignments/media-types/application/vnd.microsoft.portable-executable
|
||||
* Update deb filetype per https://www.iana.org/assignments/media-types/application/vnd.debian.binary-package
|
||||
* Update sqlite filetype per https://www.iana.org/assignments/media-types/application/vnd.sqlite3
|
||||
* Merge pull request #72 from turn88/master
|
||||
* Update document.go
|
||||
* Update document.go
|
||||
* Update document.go
|
||||
* add matchers for office 2003
|
||||
|
||||
v1.0.10 / 2019-08-06
|
||||
====================
|
||||
|
||||
* Merge pull request #76 from lex-r/fix-matroska-detection
|
||||
* fix: mkv and webm types detection
|
||||
|
||||
v1.0.9 / 2019-07-25
|
||||
===================
|
||||
|
||||
* Merge pull request #75 from Trane9991/master
|
||||
* add video/3gpp support
|
||||
* fix: use proper iso file mime type
|
||||
* feat: add iso image format
|
||||
* Merge pull request #65 from Fentonz/master
|
||||
* Merge pull request #70 from fanpei91/master
|
||||
* add image/vnd.dwg to README
|
||||
* add image/vnd.dwg support
|
||||
* Added support for .iso files
|
||||
|
||||
v1.0.8 / 2019-02-10
|
||||
===================
|
||||
|
||||
* refactor(images): heic -> heif
|
||||
* feat(docs): add heif format
|
||||
* Merge pull request #60 from rikonor/master
|
||||
* add heif/heic support
|
||||
* fix(docs): dicom -> dcm
|
||||
* feat: add dicom type
|
||||
* Merge pull request #58 from Fentonz/master
|
||||
* Merge pull request #59 from kmanley/master
|
||||
* fix example; related to h2non/filetype#43
|
||||
* Added DICOM type to archive
|
||||
|
||||
|
||||
v1.0.7 / 2019-02-09
|
||||
===================
|
||||
|
||||
* Merge pull request #56 from akupila/wasm
|
||||
* add wasm to readme
|
||||
* detect wasm file type
|
||||
|
||||
v1.0.6 / 2019-01-22
|
||||
===================
|
||||
|
||||
* Merge pull request #55 from ivanlemeshev/master
|
||||
* Added ftypmp4v to MP4 matcher
|
||||
* Merge pull request #54 from aofei/master
|
||||
* chore: add support for Go modules
|
||||
* feat: add support for AAC (audio/aac)
|
||||
* Merge pull request #53 from lynxbyorion/check-for-docoments
|
||||
* Added checks for documents.
|
||||
* Merge pull request #51 from eriken/master
|
||||
* fixed bad mime and import paths
|
||||
* Merge pull request #50 from eriken/jpeg2000_support
|
||||
* fix import paths
|
||||
* jpeg2000 support
|
||||
* Merge pull request #47 from Ma124/master
|
||||
* Merge pull request #49 from amoore614/master
|
||||
* more robust check for .mov files
|
||||
* bugfix: reverse order of matcher key list so user registered matchers appear first
|
||||
* bugfix: store ptr to MatcherKeys in case user registered matchers are used.
|
||||
* update comment
|
||||
* Bump buffer size to 8K to allow for more custom file matching
|
||||
* refactor(readme): update package import path
|
||||
* Merge pull request #48 from kumakichi/support_msooxml
|
||||
* do not use v1
|
||||
* ok, master already changed travis
|
||||
* add fixtures, but MatchReader may not work for some msooxml files, 4096 bytes maybe not enough
|
||||
* support ms ooxml, #40
|
||||
* Fixed misspells
|
||||
* fix(travis): use string notation for matrix items
|
||||
* Merge pull request #42 from bruth/patch-2
|
||||
* refactor(travis): remove Go 1.6, add Go 1.10
|
||||
* Change maximum bytes required for detection
|
||||
* Merge pull request #36 from yiiTT/patch-1
|
||||
* Add MP4 dash and additional ISO formats
|
||||
* Merge pull request #34 from RangelReale/fix-mp4-case
|
||||
* Merge pull request #32 from yiiTT/fix-m4v
|
||||
* Fixed mp4 detection case-sensitivity according to http://www.ftyps.com/
|
||||
* Fix M4v matcher
|
||||
|
||||
v1.0.5 / 2017-12-12
|
||||
===================
|
||||
|
||||
* Merge pull request #30 from RangelReale/fix_mp4
|
||||
* Fix duplicated item in mp4 fix
|
||||
* Fix MP4 matcher, with information from http://www.file-recovery.com/mp4-signature-format.htm
|
||||
* Merge pull request #28 from ikovic/master
|
||||
* Updated file header example.
|
||||
|
||||
v1.0.4 / 2017-11-29
|
||||
===================
|
||||
|
||||
* fix: tests and document types matchers
|
||||
* refactor(docs): remove codesponsor
|
||||
* Merge pull request #26 from bienkma/master
|
||||
* Add support check file type: .doc, .docx, .pptx, .ppt, .xls, .xlsx
|
||||
* feat(docs): add code sponsor banner
|
||||
* feat(travis): add go 1.9
|
||||
* Merge pull request #24 from strazzere/patch-1
|
||||
* Fix typo in unknown
|
||||
|
||||
v1.0.3 / 2017-08-03
|
||||
===================
|
||||
|
||||
* Merge pull request #21 from elemeta/master
|
||||
* Add Elf file as supported matcher archive type
|
||||
|
||||
v1.0.2 / 2017-07-26
|
||||
===================
|
||||
|
||||
* Merge pull request #20 from marshyski/master
|
||||
* Added RedHat RPM as supported matcher archive type
|
||||
* Merge pull request #19 from nlamirault/patch-1
|
||||
* Fix typo in documentation
|
||||
|
||||
v1.0.1 / 2017-02-24
|
||||
===================
|
||||
|
||||
* Merge pull request #18 from Impyy/enable-webm
|
||||
* Enable the webm matcher
|
||||
* feat(docs): add Go version badge
|
||||
|
||||
1.0.0 / 2016-12-11
|
||||
==================
|
||||
|
||||
- Initial stable version (v1.0.0).
|
||||
24
vendor/github.com/h2non/filetype/LICENSE
generated
vendored
Normal file
24
vendor/github.com/h2non/filetype/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
The MIT License
|
||||
|
||||
Copyright (c) Tomas Aparicio
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
||||
294
vendor/github.com/h2non/filetype/README.md
generated
vendored
Normal file
294
vendor/github.com/h2non/filetype/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,294 @@
|
|||
# filetype [](https://travis-ci.org/h2non/filetype) [](https://godoc.org/github.com/h2non/filetype) [](http://goreportcard.com/report/h2non/filetype) [](https://github.com/h2non/gentleman)
|
||||
|
||||
Small and dependency free [Go](https://golang.org) package to infer file and MIME type checking the [magic numbers](<https://en.wikipedia.org/wiki/Magic_number_(programming)#Magic_numbers_in_files>) signature.
|
||||
|
||||
For SVG file type checking, see [go-is-svg](https://github.com/h2non/go-is-svg) package. Python port: [filetype.py](https://github.com/h2non/filetype.py).
|
||||
|
||||
## Features
|
||||
|
||||
- Supports a [wide range](#supported-types) of file types
|
||||
- Provides file extension and proper MIME type
|
||||
- File discovery by extension or MIME type
|
||||
- File discovery by class (image, video, audio...)
|
||||
- Provides a bunch of helpers and file matching shortcuts
|
||||
- [Pluggable](#add-additional-file-type-matchers): add custom new types and matchers
|
||||
- Simple and semantic API
|
||||
- [Blazing fast](#benchmarks), even processing large files
|
||||
- Only first 262 bytes representing the max file header is required, so you can just [pass a slice](#file-header)
|
||||
- Dependency free (just Go code, no C compilation needed)
|
||||
- Cross-platform file recognition
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
go get github.com/h2non/filetype
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
See [Godoc](https://godoc.org/github.com/h2non/filetype) reference.
|
||||
|
||||
### Subpackages
|
||||
|
||||
- [`github.com/h2non/filetype/types`](https://godoc.org/github.com/h2non/filetype/types)
|
||||
- [`github.com/h2non/filetype/matchers`](https://godoc.org/github.com/h2non/filetype/matchers)
|
||||
|
||||
## Examples
|
||||
|
||||
#### Simple file type checking
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
|
||||
"github.com/h2non/filetype"
|
||||
)
|
||||
|
||||
func main() {
|
||||
buf, _ := ioutil.ReadFile("sample.jpg")
|
||||
|
||||
kind, _ := filetype.Match(buf)
|
||||
if kind == filetype.Unknown {
|
||||
fmt.Println("Unknown file type")
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Printf("File type: %s. MIME: %s\n", kind.Extension, kind.MIME.Value)
|
||||
}
|
||||
```
|
||||
|
||||
#### Check type class
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
|
||||
"github.com/h2non/filetype"
|
||||
)
|
||||
|
||||
func main() {
|
||||
buf, _ := ioutil.ReadFile("sample.jpg")
|
||||
|
||||
if filetype.IsImage(buf) {
|
||||
fmt.Println("File is an image")
|
||||
} else {
|
||||
fmt.Println("Not an image")
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Supported type
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/h2non/filetype"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Check if file is supported by extension
|
||||
if filetype.IsSupported("jpg") {
|
||||
fmt.Println("Extension supported")
|
||||
} else {
|
||||
fmt.Println("Extension not supported")
|
||||
}
|
||||
|
||||
// Check if file is supported by extension
|
||||
if filetype.IsMIMESupported("image/jpeg") {
|
||||
fmt.Println("MIME type supported")
|
||||
} else {
|
||||
fmt.Println("MIME type not supported")
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### File header
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
|
||||
"github.com/h2non/filetype"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Open a file descriptor
|
||||
file, _ := os.Open("movie.mp4")
|
||||
|
||||
// We only have to pass the file header = first 261 bytes
|
||||
head := make([]byte, 261)
|
||||
file.Read(head)
|
||||
|
||||
if filetype.IsImage(head) {
|
||||
fmt.Println("File is an image")
|
||||
} else {
|
||||
fmt.Println("Not an image")
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Add additional file type matchers
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/h2non/filetype"
|
||||
)
|
||||
|
||||
var fooType = filetype.NewType("foo", "foo/foo")
|
||||
|
||||
func fooMatcher(buf []byte) bool {
|
||||
return len(buf) > 1 && buf[0] == 0x01 && buf[1] == 0x02
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Register the new matcher and its type
|
||||
filetype.AddMatcher(fooType, fooMatcher)
|
||||
|
||||
// Check if the new type is supported by extension
|
||||
if filetype.IsSupported("foo") {
|
||||
fmt.Println("New supported type: foo")
|
||||
}
|
||||
|
||||
// Check if the new type is supported by MIME
|
||||
if filetype.IsMIMESupported("foo/foo") {
|
||||
fmt.Println("New supported MIME type: foo/foo")
|
||||
}
|
||||
|
||||
// Try to match the file
|
||||
fooFile := []byte{0x01, 0x02}
|
||||
kind, _ := filetype.Match(fooFile)
|
||||
if kind == filetype.Unknown {
|
||||
fmt.Println("Unknown file type")
|
||||
} else {
|
||||
fmt.Printf("File type matched: %s\n", kind.Extension)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Supported types
|
||||
|
||||
#### Image
|
||||
|
||||
- **jpg** - `image/jpeg`
|
||||
- **png** - `image/png`
|
||||
- **gif** - `image/gif`
|
||||
- **webp** - `image/webp`
|
||||
- **cr2** - `image/x-canon-cr2`
|
||||
- **tif** - `image/tiff`
|
||||
- **bmp** - `image/bmp`
|
||||
- **heif** - `image/heif`
|
||||
- **jxr** - `image/vnd.ms-photo`
|
||||
- **psd** - `image/vnd.adobe.photoshop`
|
||||
- **ico** - `image/vnd.microsoft.icon`
|
||||
- **dwg** - `image/vnd.dwg`
|
||||
|
||||
#### Video
|
||||
|
||||
- **mp4** - `video/mp4`
|
||||
- **m4v** - `video/x-m4v`
|
||||
- **mkv** - `video/x-matroska`
|
||||
- **webm** - `video/webm`
|
||||
- **mov** - `video/quicktime`
|
||||
- **avi** - `video/x-msvideo`
|
||||
- **wmv** - `video/x-ms-wmv`
|
||||
- **mpg** - `video/mpeg`
|
||||
- **flv** - `video/x-flv`
|
||||
- **3gp** - `video/3gpp`
|
||||
|
||||
#### Audio
|
||||
|
||||
- **mid** - `audio/midi`
|
||||
- **mp3** - `audio/mpeg`
|
||||
- **m4a** - `audio/m4a`
|
||||
- **ogg** - `audio/ogg`
|
||||
- **flac** - `audio/x-flac`
|
||||
- **wav** - `audio/x-wav`
|
||||
- **amr** - `audio/amr`
|
||||
- **aac** - `audio/aac`
|
||||
- **aiff** - `audio/x-aiff`
|
||||
|
||||
#### Archive
|
||||
|
||||
- **epub** - `application/epub+zip`
|
||||
- **zip** - `application/zip`
|
||||
- **tar** - `application/x-tar`
|
||||
- **rar** - `application/vnd.rar`
|
||||
- **gz** - `application/gzip`
|
||||
- **bz2** - `application/x-bzip2`
|
||||
- **7z** - `application/x-7z-compressed`
|
||||
- **xz** - `application/x-xz`
|
||||
- **zstd** - `application/zstd`
|
||||
- **pdf** - `application/pdf`
|
||||
- **exe** - `application/vnd.microsoft.portable-executable`
|
||||
- **swf** - `application/x-shockwave-flash`
|
||||
- **rtf** - `application/rtf`
|
||||
- **iso** - `application/x-iso9660-image`
|
||||
- **eot** - `application/octet-stream`
|
||||
- **ps** - `application/postscript`
|
||||
- **sqlite** - `application/vnd.sqlite3`
|
||||
- **nes** - `application/x-nintendo-nes-rom`
|
||||
- **crx** - `application/x-google-chrome-extension`
|
||||
- **cab** - `application/vnd.ms-cab-compressed`
|
||||
- **deb** - `application/vnd.debian.binary-package`
|
||||
- **ar** - `application/x-unix-archive`
|
||||
- **Z** - `application/x-compress`
|
||||
- **lz** - `application/x-lzip`
|
||||
- **rpm** - `application/x-rpm`
|
||||
- **elf** - `application/x-executable`
|
||||
- **dcm** - `application/dicom`
|
||||
|
||||
#### Documents
|
||||
|
||||
- **doc** - `application/msword`
|
||||
- **docx** - `application/vnd.openxmlformats-officedocument.wordprocessingml.document`
|
||||
- **xls** - `application/vnd.ms-excel`
|
||||
- **xlsx** - `application/vnd.openxmlformats-officedocument.spreadsheetml.sheet`
|
||||
- **ppt** - `application/vnd.ms-powerpoint`
|
||||
- **pptx** - `application/vnd.openxmlformats-officedocument.presentationml.presentation`
|
||||
|
||||
#### Font
|
||||
|
||||
- **woff** - `application/font-woff`
|
||||
- **woff2** - `application/font-woff`
|
||||
- **ttf** - `application/font-sfnt`
|
||||
- **otf** - `application/font-sfnt`
|
||||
|
||||
#### Application
|
||||
|
||||
- **wasm** - `application/wasm`
|
||||
- **dex** - `application/vnd.android.dex`
|
||||
- **dey** - `application/vnd.android.dey`
|
||||
|
||||
## Benchmarks
|
||||
|
||||
Measured using [real files](https://github.com/h2non/filetype/tree/master/fixtures).
|
||||
|
||||
Environment: OSX x64 i7 2.7 Ghz
|
||||
|
||||
```bash
|
||||
BenchmarkMatchTar-8 1000000 1083 ns/op
|
||||
BenchmarkMatchZip-8 1000000 1162 ns/op
|
||||
BenchmarkMatchJpeg-8 1000000 1280 ns/op
|
||||
BenchmarkMatchGif-8 1000000 1315 ns/op
|
||||
BenchmarkMatchPng-8 1000000 1121 ns/op
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
MIT - Tomas Aparicio
|
||||
102
vendor/github.com/h2non/filetype/filetype.go
generated
vendored
Normal file
102
vendor/github.com/h2non/filetype/filetype.go
generated
vendored
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
package filetype
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/h2non/filetype/matchers"
|
||||
"github.com/h2non/filetype/types"
|
||||
)
|
||||
|
||||
// Types stores a map of supported types
|
||||
var Types = types.Types
|
||||
|
||||
// NewType creates and registers a new type
|
||||
var NewType = types.NewType
|
||||
|
||||
// Unknown represents an unknown file type
|
||||
var Unknown = types.Unknown
|
||||
|
||||
// ErrEmptyBuffer represents an empty buffer error
|
||||
var ErrEmptyBuffer = errors.New("Empty buffer")
|
||||
|
||||
// ErrUnknownBuffer represents a unknown buffer error
|
||||
var ErrUnknownBuffer = errors.New("Unknown buffer type")
|
||||
|
||||
// AddType registers a new file type
|
||||
func AddType(ext, mime string) types.Type {
|
||||
return types.NewType(ext, mime)
|
||||
}
|
||||
|
||||
// Is checks if a given buffer matches with the given file type extension
|
||||
func Is(buf []byte, ext string) bool {
|
||||
kind := types.Get(ext)
|
||||
if kind != types.Unknown {
|
||||
return IsType(buf, kind)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IsExtension semantic alias to Is()
|
||||
func IsExtension(buf []byte, ext string) bool {
|
||||
return Is(buf, ext)
|
||||
}
|
||||
|
||||
// IsType checks if a given buffer matches with the given file type
|
||||
func IsType(buf []byte, kind types.Type) bool {
|
||||
matcher := matchers.Matchers[kind]
|
||||
if matcher == nil {
|
||||
return false
|
||||
}
|
||||
return matcher(buf) != types.Unknown
|
||||
}
|
||||
|
||||
// IsMIME checks if a given buffer matches with the given MIME type
|
||||
func IsMIME(buf []byte, mime string) bool {
|
||||
result := false
|
||||
types.Types.Range(func(k, v interface{}) bool {
|
||||
kind := v.(types.Type)
|
||||
if kind.MIME.Value == mime {
|
||||
matcher := matchers.Matchers[kind]
|
||||
result = matcher(buf) != types.Unknown
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// IsSupported checks if a given file extension is supported
|
||||
func IsSupported(ext string) bool {
|
||||
result := false
|
||||
types.Types.Range(func(k, v interface{}) bool {
|
||||
key := k.(string)
|
||||
if key == ext {
|
||||
result = true
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// IsMIMESupported checks if a given MIME type is supported
|
||||
func IsMIMESupported(mime string) bool {
|
||||
result := false
|
||||
types.Types.Range(func(k, v interface{}) bool {
|
||||
kind := v.(types.Type)
|
||||
if kind.MIME.Value == mime {
|
||||
result = true
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// GetType retrieves a Type by file extension
|
||||
func GetType(ext string) types.Type {
|
||||
return types.Get(ext)
|
||||
}
|
||||
91
vendor/github.com/h2non/filetype/kind.go
generated
vendored
Normal file
91
vendor/github.com/h2non/filetype/kind.go
generated
vendored
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
package filetype
|
||||
|
||||
import (
|
||||
"github.com/h2non/filetype/matchers"
|
||||
"github.com/h2non/filetype/types"
|
||||
)
|
||||
|
||||
// Image tries to match a file as image type
|
||||
func Image(buf []byte) (types.Type, error) {
|
||||
return doMatchMap(buf, matchers.Image)
|
||||
}
|
||||
|
||||
// IsImage checks if the given buffer is an image type
|
||||
func IsImage(buf []byte) bool {
|
||||
kind, _ := Image(buf)
|
||||
return kind != types.Unknown
|
||||
}
|
||||
|
||||
// Audio tries to match a file as audio type
|
||||
func Audio(buf []byte) (types.Type, error) {
|
||||
return doMatchMap(buf, matchers.Audio)
|
||||
}
|
||||
|
||||
// IsAudio checks if the given buffer is an audio type
|
||||
func IsAudio(buf []byte) bool {
|
||||
kind, _ := Audio(buf)
|
||||
return kind != types.Unknown
|
||||
}
|
||||
|
||||
// Video tries to match a file as video type
|
||||
func Video(buf []byte) (types.Type, error) {
|
||||
return doMatchMap(buf, matchers.Video)
|
||||
}
|
||||
|
||||
// IsVideo checks if the given buffer is a video type
|
||||
func IsVideo(buf []byte) bool {
|
||||
kind, _ := Video(buf)
|
||||
return kind != types.Unknown
|
||||
}
|
||||
|
||||
// Font tries to match a file as text font type
|
||||
func Font(buf []byte) (types.Type, error) {
|
||||
return doMatchMap(buf, matchers.Font)
|
||||
}
|
||||
|
||||
// IsFont checks if the given buffer is a font type
|
||||
func IsFont(buf []byte) bool {
|
||||
kind, _ := Font(buf)
|
||||
return kind != types.Unknown
|
||||
}
|
||||
|
||||
// Archive tries to match a file as generic archive type
|
||||
func Archive(buf []byte) (types.Type, error) {
|
||||
return doMatchMap(buf, matchers.Archive)
|
||||
}
|
||||
|
||||
// IsArchive checks if the given buffer is an archive type
|
||||
func IsArchive(buf []byte) bool {
|
||||
kind, _ := Archive(buf)
|
||||
return kind != types.Unknown
|
||||
}
|
||||
|
||||
// Document tries to match a file as document type
|
||||
func Document(buf []byte) (types.Type, error) {
|
||||
return doMatchMap(buf, matchers.Document)
|
||||
}
|
||||
|
||||
// IsDocument checks if the given buffer is an document type
|
||||
func IsDocument(buf []byte) bool {
|
||||
kind, _ := Document(buf)
|
||||
return kind != types.Unknown
|
||||
}
|
||||
|
||||
// Application tries to match a file as an application type
|
||||
func Application(buf []byte) (types.Type, error) {
|
||||
return doMatchMap(buf, matchers.Application)
|
||||
}
|
||||
|
||||
// IsApplication checks if the given buffer is an application type
|
||||
func IsApplication(buf []byte) bool {
|
||||
kind, _ := Application(buf)
|
||||
return kind != types.Unknown
|
||||
}
|
||||
|
||||
func doMatchMap(buf []byte, machers matchers.Map) (types.Type, error) {
|
||||
kind := MatchMap(buf, machers)
|
||||
if kind != types.Unknown {
|
||||
return kind, nil
|
||||
}
|
||||
return kind, ErrUnknownBuffer
|
||||
}
|
||||
90
vendor/github.com/h2non/filetype/match.go
generated
vendored
Normal file
90
vendor/github.com/h2non/filetype/match.go
generated
vendored
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
package filetype
|
||||
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
|
||||
"github.com/h2non/filetype/matchers"
|
||||
"github.com/h2non/filetype/types"
|
||||
)
|
||||
|
||||
// Matchers is an alias to matchers.Matchers
|
||||
var Matchers = matchers.Matchers
|
||||
|
||||
// MatcherKeys is an alias to matchers.MatcherKeys
|
||||
var MatcherKeys = &matchers.MatcherKeys
|
||||
|
||||
// NewMatcher is an alias to matchers.NewMatcher
|
||||
var NewMatcher = matchers.NewMatcher
|
||||
|
||||
// Match infers the file type of a given buffer inspecting its magic numbers signature
|
||||
func Match(buf []byte) (types.Type, error) {
|
||||
length := len(buf)
|
||||
if length == 0 {
|
||||
return types.Unknown, ErrEmptyBuffer
|
||||
}
|
||||
|
||||
for _, kind := range *MatcherKeys {
|
||||
checker := Matchers[kind]
|
||||
match := checker(buf)
|
||||
if match != types.Unknown && match.Extension != "" {
|
||||
return match, nil
|
||||
}
|
||||
}
|
||||
|
||||
return types.Unknown, nil
|
||||
}
|
||||
|
||||
// Get is an alias to Match()
|
||||
func Get(buf []byte) (types.Type, error) {
|
||||
return Match(buf)
|
||||
}
|
||||
|
||||
// MatchFile infers a file type for a file
|
||||
func MatchFile(filepath string) (types.Type, error) {
|
||||
file, err := os.Open(filepath)
|
||||
if err != nil {
|
||||
return types.Unknown, err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
return MatchReader(file)
|
||||
}
|
||||
|
||||
// MatchReader is convenient wrapper to Match() any Reader
|
||||
func MatchReader(reader io.Reader) (types.Type, error) {
|
||||
buffer := make([]byte, 8192) // 8K makes msooxml tests happy and allows for expanded custom file checks
|
||||
|
||||
_, err := reader.Read(buffer)
|
||||
if err != nil && err != io.EOF {
|
||||
return types.Unknown, err
|
||||
}
|
||||
|
||||
return Match(buffer)
|
||||
}
|
||||
|
||||
// AddMatcher registers a new matcher type
|
||||
func AddMatcher(fileType types.Type, matcher matchers.Matcher) matchers.TypeMatcher {
|
||||
return matchers.NewMatcher(fileType, matcher)
|
||||
}
|
||||
|
||||
// Matches checks if the given buffer matches with some supported file type
|
||||
func Matches(buf []byte) bool {
|
||||
kind, _ := Match(buf)
|
||||
return kind != types.Unknown
|
||||
}
|
||||
|
||||
// MatchMap performs a file matching against a map of match functions
|
||||
func MatchMap(buf []byte, matchers matchers.Map) types.Type {
|
||||
for kind, matcher := range matchers {
|
||||
if matcher(buf) {
|
||||
return kind
|
||||
}
|
||||
}
|
||||
return types.Unknown
|
||||
}
|
||||
|
||||
// MatchesMap is an alias to Matches() but using matching against a map of match functions
|
||||
func MatchesMap(buf []byte, matchers matchers.Map) bool {
|
||||
return MatchMap(buf, matchers) != types.Unknown
|
||||
}
|
||||
43
vendor/github.com/h2non/filetype/matchers/application.go
generated
vendored
Normal file
43
vendor/github.com/h2non/filetype/matchers/application.go
generated
vendored
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
package matchers
|
||||
|
||||
var (
|
||||
TypeWasm = newType("wasm", "application/wasm")
|
||||
TypeDex = newType("dex", "application/vnd.android.dex")
|
||||
TypeDey = newType("dey", "application/vnd.android.dey")
|
||||
)
|
||||
|
||||
var Application = Map{
|
||||
TypeWasm: Wasm,
|
||||
TypeDex: Dex,
|
||||
TypeDey: Dey,
|
||||
}
|
||||
|
||||
// Wasm detects a Web Assembly 1.0 filetype.
|
||||
func Wasm(buf []byte) bool {
|
||||
// WASM has starts with `\0asm`, followed by the version.
|
||||
// http://webassembly.github.io/spec/core/binary/modules.html#binary-magic
|
||||
return len(buf) >= 8 &&
|
||||
buf[0] == 0x00 && buf[1] == 0x61 &&
|
||||
buf[2] == 0x73 && buf[3] == 0x6D &&
|
||||
buf[4] == 0x01 && buf[5] == 0x00 &&
|
||||
buf[6] == 0x00 && buf[7] == 0x00
|
||||
}
|
||||
|
||||
// Dex detects dalvik executable(DEX)
|
||||
func Dex(buf []byte) bool {
|
||||
// https://source.android.com/devices/tech/dalvik/dex-format#dex-file-magic
|
||||
return len(buf) > 36 &&
|
||||
// magic
|
||||
buf[0] == 0x64 && buf[1] == 0x65 && buf[2] == 0x78 && buf[3] == 0x0A &&
|
||||
// file sise
|
||||
buf[36] == 0x70
|
||||
}
|
||||
|
||||
// Dey Optimized Dalvik Executable(ODEX)
|
||||
func Dey(buf []byte) bool {
|
||||
return len(buf) > 100 &&
|
||||
// dey magic
|
||||
buf[0] == 0x64 && buf[1] == 0x65 && buf[2] == 0x79 && buf[3] == 0x0A &&
|
||||
// dex
|
||||
Dex(buf[40:100])
|
||||
}
|
||||
211
vendor/github.com/h2non/filetype/matchers/archive.go
generated
vendored
Normal file
211
vendor/github.com/h2non/filetype/matchers/archive.go
generated
vendored
Normal file
|
|
@ -0,0 +1,211 @@
|
|||
package matchers
|
||||
|
||||
import "encoding/binary"
|
||||
|
||||
const (
|
||||
ZstdMagicSkippableStart = 0x184D2A50
|
||||
ZstdMagicSkippableMask = 0xFFFFFFF0
|
||||
)
|
||||
|
||||
var (
|
||||
TypeEpub = newType("epub", "application/epub+zip")
|
||||
TypeZip = newType("zip", "application/zip")
|
||||
TypeTar = newType("tar", "application/x-tar")
|
||||
TypeRar = newType("rar", "application/vnd.rar")
|
||||
TypeGz = newType("gz", "application/gzip")
|
||||
TypeBz2 = newType("bz2", "application/x-bzip2")
|
||||
Type7z = newType("7z", "application/x-7z-compressed")
|
||||
TypeXz = newType("xz", "application/x-xz")
|
||||
TypeZstd = newType("zst", "application/zstd")
|
||||
TypePdf = newType("pdf", "application/pdf")
|
||||
TypeExe = newType("exe", "application/vnd.microsoft.portable-executable")
|
||||
TypeSwf = newType("swf", "application/x-shockwave-flash")
|
||||
TypeRtf = newType("rtf", "application/rtf")
|
||||
TypeEot = newType("eot", "application/octet-stream")
|
||||
TypePs = newType("ps", "application/postscript")
|
||||
TypeSqlite = newType("sqlite", "application/vnd.sqlite3")
|
||||
TypeNes = newType("nes", "application/x-nintendo-nes-rom")
|
||||
TypeCrx = newType("crx", "application/x-google-chrome-extension")
|
||||
TypeCab = newType("cab", "application/vnd.ms-cab-compressed")
|
||||
TypeDeb = newType("deb", "application/vnd.debian.binary-package")
|
||||
TypeAr = newType("ar", "application/x-unix-archive")
|
||||
TypeZ = newType("Z", "application/x-compress")
|
||||
TypeLz = newType("lz", "application/x-lzip")
|
||||
TypeRpm = newType("rpm", "application/x-rpm")
|
||||
TypeElf = newType("elf", "application/x-executable")
|
||||
TypeDcm = newType("dcm", "application/dicom")
|
||||
TypeIso = newType("iso", "application/x-iso9660-image")
|
||||
TypeMachO = newType("macho", "application/x-mach-binary") // Mach-O binaries have no common extension.
|
||||
)
|
||||
|
||||
var Archive = Map{
|
||||
TypeEpub: bytePrefixMatcher(epubMagic),
|
||||
TypeZip: Zip,
|
||||
TypeTar: Tar,
|
||||
TypeRar: Rar,
|
||||
TypeGz: bytePrefixMatcher(gzMagic),
|
||||
TypeBz2: bytePrefixMatcher(bz2Magic),
|
||||
Type7z: bytePrefixMatcher(sevenzMagic),
|
||||
TypeXz: bytePrefixMatcher(xzMagic),
|
||||
TypeZstd: Zst,
|
||||
TypePdf: bytePrefixMatcher(pdfMagic),
|
||||
TypeExe: bytePrefixMatcher(exeMagic),
|
||||
TypeSwf: Swf,
|
||||
TypeRtf: bytePrefixMatcher(rtfMagic),
|
||||
TypeEot: Eot,
|
||||
TypePs: bytePrefixMatcher(psMagic),
|
||||
TypeSqlite: bytePrefixMatcher(sqliteMagic),
|
||||
TypeNes: bytePrefixMatcher(nesMagic),
|
||||
TypeCrx: bytePrefixMatcher(crxMagic),
|
||||
TypeCab: Cab,
|
||||
TypeDeb: bytePrefixMatcher(debMagic),
|
||||
TypeAr: bytePrefixMatcher(arMagic),
|
||||
TypeZ: Z,
|
||||
TypeLz: bytePrefixMatcher(lzMagic),
|
||||
TypeRpm: Rpm,
|
||||
TypeElf: Elf,
|
||||
TypeDcm: Dcm,
|
||||
TypeIso: Iso,
|
||||
TypeMachO: MachO,
|
||||
}
|
||||
|
||||
var (
|
||||
epubMagic = []byte{
|
||||
0x50, 0x4B, 0x03, 0x04, 0x6D, 0x69, 0x6D, 0x65,
|
||||
0x74, 0x79, 0x70, 0x65, 0x61, 0x70, 0x70, 0x6C,
|
||||
0x69, 0x63, 0x61, 0x74, 0x69, 0x6F, 0x6E, 0x2F,
|
||||
0x65, 0x70, 0x75, 0x62, 0x2B, 0x7A, 0x69, 0x70,
|
||||
}
|
||||
gzMagic = []byte{0x1F, 0x8B, 0x08}
|
||||
bz2Magic = []byte{0x42, 0x5A, 0x68}
|
||||
sevenzMagic = []byte{0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C}
|
||||
pdfMagic = []byte{0x25, 0x50, 0x44, 0x46}
|
||||
exeMagic = []byte{0x4D, 0x5A}
|
||||
rtfMagic = []byte{0x7B, 0x5C, 0x72, 0x74, 0x66}
|
||||
nesMagic = []byte{0x4E, 0x45, 0x53, 0x1A}
|
||||
crxMagic = []byte{0x43, 0x72, 0x32, 0x34}
|
||||
psMagic = []byte{0x25, 0x21}
|
||||
xzMagic = []byte{0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00}
|
||||
sqliteMagic = []byte{0x53, 0x51, 0x4C, 0x69}
|
||||
debMagic = []byte{
|
||||
0x21, 0x3C, 0x61, 0x72, 0x63, 0x68, 0x3E, 0x0A,
|
||||
0x64, 0x65, 0x62, 0x69, 0x61, 0x6E, 0x2D, 0x62,
|
||||
0x69, 0x6E, 0x61, 0x72, 0x79,
|
||||
}
|
||||
arMagic = []byte{0x21, 0x3C, 0x61, 0x72, 0x63, 0x68, 0x3E}
|
||||
zstdMagic = []byte{0x28, 0xB5, 0x2F, 0xFD}
|
||||
lzMagic = []byte{0x4C, 0x5A, 0x49, 0x50}
|
||||
)
|
||||
|
||||
func bytePrefixMatcher(magicPattern []byte) Matcher {
|
||||
return func(data []byte) bool {
|
||||
return compareBytes(data, magicPattern, 0)
|
||||
}
|
||||
}
|
||||
|
||||
func Zip(buf []byte) bool {
|
||||
return len(buf) > 3 &&
|
||||
buf[0] == 0x50 && buf[1] == 0x4B &&
|
||||
(buf[2] == 0x3 || buf[2] == 0x5 || buf[2] == 0x7) &&
|
||||
(buf[3] == 0x4 || buf[3] == 0x6 || buf[3] == 0x8)
|
||||
}
|
||||
|
||||
func Tar(buf []byte) bool {
|
||||
return len(buf) > 261 &&
|
||||
buf[257] == 0x75 && buf[258] == 0x73 &&
|
||||
buf[259] == 0x74 && buf[260] == 0x61 &&
|
||||
buf[261] == 0x72
|
||||
}
|
||||
|
||||
func Rar(buf []byte) bool {
|
||||
return len(buf) > 6 &&
|
||||
buf[0] == 0x52 && buf[1] == 0x61 && buf[2] == 0x72 &&
|
||||
buf[3] == 0x21 && buf[4] == 0x1A && buf[5] == 0x7 &&
|
||||
(buf[6] == 0x0 || buf[6] == 0x1)
|
||||
}
|
||||
|
||||
func Swf(buf []byte) bool {
|
||||
return len(buf) > 2 &&
|
||||
(buf[0] == 0x43 || buf[0] == 0x46) &&
|
||||
buf[1] == 0x57 && buf[2] == 0x53
|
||||
}
|
||||
|
||||
func Cab(buf []byte) bool {
|
||||
return len(buf) > 3 &&
|
||||
((buf[0] == 0x4D && buf[1] == 0x53 && buf[2] == 0x43 && buf[3] == 0x46) ||
|
||||
(buf[0] == 0x49 && buf[1] == 0x53 && buf[2] == 0x63 && buf[3] == 0x28))
|
||||
}
|
||||
|
||||
func Eot(buf []byte) bool {
|
||||
return len(buf) > 35 &&
|
||||
buf[34] == 0x4C && buf[35] == 0x50 &&
|
||||
((buf[8] == 0x02 && buf[9] == 0x00 &&
|
||||
buf[10] == 0x01) || (buf[8] == 0x01 &&
|
||||
buf[9] == 0x00 && buf[10] == 0x00) ||
|
||||
(buf[8] == 0x02 && buf[9] == 0x00 &&
|
||||
buf[10] == 0x02))
|
||||
}
|
||||
|
||||
func Z(buf []byte) bool {
|
||||
return len(buf) > 1 &&
|
||||
((buf[0] == 0x1F && buf[1] == 0xA0) ||
|
||||
(buf[0] == 0x1F && buf[1] == 0x9D))
|
||||
}
|
||||
|
||||
func Rpm(buf []byte) bool {
|
||||
return len(buf) > 96 &&
|
||||
buf[0] == 0xED && buf[1] == 0xAB &&
|
||||
buf[2] == 0xEE && buf[3] == 0xDB
|
||||
}
|
||||
|
||||
func Elf(buf []byte) bool {
|
||||
return len(buf) > 52 &&
|
||||
buf[0] == 0x7F && buf[1] == 0x45 &&
|
||||
buf[2] == 0x4C && buf[3] == 0x46
|
||||
}
|
||||
|
||||
func Dcm(buf []byte) bool {
|
||||
return len(buf) > 131 &&
|
||||
buf[128] == 0x44 && buf[129] == 0x49 &&
|
||||
buf[130] == 0x43 && buf[131] == 0x4D
|
||||
}
|
||||
|
||||
func Iso(buf []byte) bool {
|
||||
return len(buf) > 32773 &&
|
||||
buf[32769] == 0x43 && buf[32770] == 0x44 &&
|
||||
buf[32771] == 0x30 && buf[32772] == 0x30 &&
|
||||
buf[32773] == 0x31
|
||||
}
|
||||
|
||||
func MachO(buf []byte) bool {
|
||||
return len(buf) > 3 && ((buf[0] == 0xFE && buf[1] == 0xED && buf[2] == 0xFA && buf[3] == 0xCF) ||
|
||||
(buf[0] == 0xFE && buf[1] == 0xED && buf[2] == 0xFA && buf[3] == 0xCE) ||
|
||||
(buf[0] == 0xBE && buf[1] == 0xBA && buf[2] == 0xFE && buf[3] == 0xCA) ||
|
||||
// Big endian versions below here...
|
||||
(buf[0] == 0xCF && buf[1] == 0xFA && buf[2] == 0xED && buf[3] == 0xFE) ||
|
||||
(buf[0] == 0xCE && buf[1] == 0xFA && buf[2] == 0xED && buf[3] == 0xFE) ||
|
||||
(buf[0] == 0xCA && buf[1] == 0xFE && buf[2] == 0xBA && buf[3] == 0xBE))
|
||||
}
|
||||
|
||||
// Zstandard compressed data is made of one or more frames.
|
||||
// There are two frame formats defined by Zstandard: Zstandard frames and Skippable frames.
|
||||
// See more details from https://tools.ietf.org/id/draft-kucherawy-dispatch-zstd-00.html#rfc.section.2
|
||||
func Zst(buf []byte) bool {
|
||||
if compareBytes(buf, zstdMagic, 0) {
|
||||
return true
|
||||
} else {
|
||||
// skippable frames
|
||||
if len(buf) < 8 {
|
||||
return false
|
||||
}
|
||||
if binary.LittleEndian.Uint32(buf[:4]) & ZstdMagicSkippableMask == ZstdMagicSkippableStart {
|
||||
userDataLength := binary.LittleEndian.Uint32(buf[4:8])
|
||||
if len(buf) < 8 + int(userDataLength) {
|
||||
return false
|
||||
}
|
||||
nextFrame := buf[8+userDataLength:]
|
||||
return Zst(nextFrame)
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
85
vendor/github.com/h2non/filetype/matchers/audio.go
generated
vendored
Normal file
85
vendor/github.com/h2non/filetype/matchers/audio.go
generated
vendored
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
package matchers
|
||||
|
||||
var (
|
||||
TypeMidi = newType("mid", "audio/midi")
|
||||
TypeMp3 = newType("mp3", "audio/mpeg")
|
||||
TypeM4a = newType("m4a", "audio/m4a")
|
||||
TypeOgg = newType("ogg", "audio/ogg")
|
||||
TypeFlac = newType("flac", "audio/x-flac")
|
||||
TypeWav = newType("wav", "audio/x-wav")
|
||||
TypeAmr = newType("amr", "audio/amr")
|
||||
TypeAac = newType("aac", "audio/aac")
|
||||
TypeAiff = newType("aiff", "audio/x-aiff")
|
||||
)
|
||||
|
||||
var Audio = Map{
|
||||
TypeMidi: Midi,
|
||||
TypeMp3: Mp3,
|
||||
TypeM4a: M4a,
|
||||
TypeOgg: Ogg,
|
||||
TypeFlac: Flac,
|
||||
TypeWav: Wav,
|
||||
TypeAmr: Amr,
|
||||
TypeAac: Aac,
|
||||
TypeAiff: Aiff,
|
||||
}
|
||||
|
||||
func Midi(buf []byte) bool {
|
||||
return len(buf) > 3 &&
|
||||
buf[0] == 0x4D && buf[1] == 0x54 &&
|
||||
buf[2] == 0x68 && buf[3] == 0x64
|
||||
}
|
||||
|
||||
func Mp3(buf []byte) bool {
|
||||
return len(buf) > 2 &&
|
||||
((buf[0] == 0x49 && buf[1] == 0x44 && buf[2] == 0x33) ||
|
||||
(buf[0] == 0xFF && buf[1] == 0xfb))
|
||||
}
|
||||
|
||||
func M4a(buf []byte) bool {
|
||||
return len(buf) > 10 &&
|
||||
((buf[4] == 0x66 && buf[5] == 0x74 && buf[6] == 0x79 &&
|
||||
buf[7] == 0x70 && buf[8] == 0x4D && buf[9] == 0x34 && buf[10] == 0x41) ||
|
||||
(buf[0] == 0x4D && buf[1] == 0x34 && buf[2] == 0x41 && buf[3] == 0x20))
|
||||
}
|
||||
|
||||
func Ogg(buf []byte) bool {
|
||||
return len(buf) > 3 &&
|
||||
buf[0] == 0x4F && buf[1] == 0x67 &&
|
||||
buf[2] == 0x67 && buf[3] == 0x53
|
||||
}
|
||||
|
||||
func Flac(buf []byte) bool {
|
||||
return len(buf) > 3 &&
|
||||
buf[0] == 0x66 && buf[1] == 0x4C &&
|
||||
buf[2] == 0x61 && buf[3] == 0x43
|
||||
}
|
||||
|
||||
func Wav(buf []byte) bool {
|
||||
return len(buf) > 11 &&
|
||||
buf[0] == 0x52 && buf[1] == 0x49 &&
|
||||
buf[2] == 0x46 && buf[3] == 0x46 &&
|
||||
buf[8] == 0x57 && buf[9] == 0x41 &&
|
||||
buf[10] == 0x56 && buf[11] == 0x45
|
||||
}
|
||||
|
||||
func Amr(buf []byte) bool {
|
||||
return len(buf) > 11 &&
|
||||
buf[0] == 0x23 && buf[1] == 0x21 &&
|
||||
buf[2] == 0x41 && buf[3] == 0x4D &&
|
||||
buf[4] == 0x52 && buf[5] == 0x0A
|
||||
}
|
||||
|
||||
func Aac(buf []byte) bool {
|
||||
return len(buf) > 1 &&
|
||||
((buf[0] == 0xFF && buf[1] == 0xF1) ||
|
||||
(buf[0] == 0xFF && buf[1] == 0xF9))
|
||||
}
|
||||
|
||||
func Aiff(buf []byte) bool {
|
||||
return len(buf) > 11 &&
|
||||
buf[0] == 0x46 && buf[1] == 0x4F &&
|
||||
buf[2] == 0x52 && buf[3] == 0x4D &&
|
||||
buf[8] == 0x41 && buf[9] == 0x49 &&
|
||||
buf[10] == 0x46 && buf[11] == 0x46
|
||||
}
|
||||
197
vendor/github.com/h2non/filetype/matchers/document.go
generated
vendored
Normal file
197
vendor/github.com/h2non/filetype/matchers/document.go
generated
vendored
Normal file
|
|
@ -0,0 +1,197 @@
|
|||
package matchers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
)
|
||||
|
||||
var (
|
||||
TypeDoc = newType("doc", "application/msword")
|
||||
TypeDocx = newType("docx", "application/vnd.openxmlformats-officedocument.wordprocessingml.document")
|
||||
TypeXls = newType("xls", "application/vnd.ms-excel")
|
||||
TypeXlsx = newType("xlsx", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")
|
||||
TypePpt = newType("ppt", "application/vnd.ms-powerpoint")
|
||||
TypePptx = newType("pptx", "application/vnd.openxmlformats-officedocument.presentationml.presentation")
|
||||
)
|
||||
|
||||
var Document = Map{
|
||||
TypeDoc: Doc,
|
||||
TypeDocx: Docx,
|
||||
TypeXls: Xls,
|
||||
TypeXlsx: Xlsx,
|
||||
TypePpt: Ppt,
|
||||
TypePptx: Pptx,
|
||||
}
|
||||
|
||||
type docType int
|
||||
|
||||
const (
|
||||
TYPE_DOC docType = iota
|
||||
TYPE_DOCX
|
||||
TYPE_XLS
|
||||
TYPE_XLSX
|
||||
TYPE_PPT
|
||||
TYPE_PPTX
|
||||
TYPE_OOXML
|
||||
)
|
||||
|
||||
//reference: https://bz.apache.org/ooo/show_bug.cgi?id=111457
|
||||
func Doc(buf []byte) bool {
|
||||
if len(buf) > 513 {
|
||||
return buf[0] == 0xD0 && buf[1] == 0xCF &&
|
||||
buf[2] == 0x11 && buf[3] == 0xE0 &&
|
||||
buf[512] == 0xEC && buf[513] == 0xA5
|
||||
} else {
|
||||
return len(buf) > 3 &&
|
||||
buf[0] == 0xD0 && buf[1] == 0xCF &&
|
||||
buf[2] == 0x11 && buf[3] == 0xE0
|
||||
}
|
||||
}
|
||||
|
||||
func Docx(buf []byte) bool {
|
||||
typ, ok := msooxml(buf)
|
||||
return ok && typ == TYPE_DOCX
|
||||
}
|
||||
|
||||
func Xls(buf []byte) bool {
|
||||
if len(buf) > 513 {
|
||||
return buf[0] == 0xD0 && buf[1] == 0xCF &&
|
||||
buf[2] == 0x11 && buf[3] == 0xE0 &&
|
||||
buf[512] == 0x09 && buf[513] == 0x08
|
||||
} else {
|
||||
return len(buf) > 3 &&
|
||||
buf[0] == 0xD0 && buf[1] == 0xCF &&
|
||||
buf[2] == 0x11 && buf[3] == 0xE0
|
||||
}
|
||||
}
|
||||
|
||||
func Xlsx(buf []byte) bool {
|
||||
typ, ok := msooxml(buf)
|
||||
return ok && typ == TYPE_XLSX
|
||||
}
|
||||
|
||||
func Ppt(buf []byte) bool {
|
||||
if len(buf) > 513 {
|
||||
return buf[0] == 0xD0 && buf[1] == 0xCF &&
|
||||
buf[2] == 0x11 && buf[3] == 0xE0 &&
|
||||
buf[512] == 0xA0 && buf[513] == 0x46
|
||||
} else {
|
||||
return len(buf) > 3 &&
|
||||
buf[0] == 0xD0 && buf[1] == 0xCF &&
|
||||
buf[2] == 0x11 && buf[3] == 0xE0
|
||||
}
|
||||
}
|
||||
|
||||
func Pptx(buf []byte) bool {
|
||||
typ, ok := msooxml(buf)
|
||||
return ok && typ == TYPE_PPTX
|
||||
}
|
||||
|
||||
func msooxml(buf []byte) (typ docType, found bool) {
|
||||
signature := []byte{'P', 'K', 0x03, 0x04}
|
||||
|
||||
// start by checking for ZIP local file header signature
|
||||
if ok := compareBytes(buf, signature, 0); !ok {
|
||||
return
|
||||
}
|
||||
|
||||
// make sure the first file is correct
|
||||
if v, ok := checkMSOoml(buf, 0x1E); ok {
|
||||
return v, ok
|
||||
}
|
||||
|
||||
if !compareBytes(buf, []byte("[Content_Types].xml"), 0x1E) &&
|
||||
!compareBytes(buf, []byte("_rels/.rels"), 0x1E) &&
|
||||
!compareBytes(buf, []byte("docProps"), 0x1E) {
|
||||
return
|
||||
}
|
||||
|
||||
// skip to the second local file header
|
||||
// since some documents include a 520-byte extra field following the file
|
||||
// header, we need to scan for the next header
|
||||
startOffset := int(binary.LittleEndian.Uint32(buf[18:22]) + 49)
|
||||
idx := search(buf, startOffset, 6000)
|
||||
if idx == -1 {
|
||||
return
|
||||
}
|
||||
|
||||
// now skip to the *third* local file header; again, we need to scan due to a
|
||||
// 520-byte extra field following the file header
|
||||
startOffset += idx + 4 + 26
|
||||
idx = search(buf, startOffset, 6000)
|
||||
if idx == -1 {
|
||||
return
|
||||
}
|
||||
|
||||
// and check the subdirectory name to determine which type of OOXML
|
||||
// file we have. Correct the mimetype with the registered ones:
|
||||
// http://technet.microsoft.com/en-us/library/cc179224.aspx
|
||||
startOffset += idx + 4 + 26
|
||||
if typ, ok := checkMSOoml(buf, startOffset); ok {
|
||||
return typ, ok
|
||||
}
|
||||
|
||||
// OpenOffice/Libreoffice orders ZIP entry differently, so check the 4th file
|
||||
startOffset += 26
|
||||
idx = search(buf, startOffset, 6000)
|
||||
if idx == -1 {
|
||||
return TYPE_OOXML, true
|
||||
}
|
||||
|
||||
startOffset += idx + 4 + 26
|
||||
if typ, ok := checkMSOoml(buf, startOffset); ok {
|
||||
return typ, ok
|
||||
} else {
|
||||
return TYPE_OOXML, true
|
||||
}
|
||||
}
|
||||
|
||||
func compareBytes(slice, subSlice []byte, startOffset int) bool {
|
||||
sl := len(subSlice)
|
||||
|
||||
if startOffset+sl > len(slice) {
|
||||
return false
|
||||
}
|
||||
|
||||
s := slice[startOffset : startOffset+sl]
|
||||
for i := range s {
|
||||
if subSlice[i] != s[i] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func checkMSOoml(buf []byte, offset int) (typ docType, ok bool) {
|
||||
ok = true
|
||||
|
||||
switch {
|
||||
case compareBytes(buf, []byte("word/"), offset):
|
||||
typ = TYPE_DOCX
|
||||
case compareBytes(buf, []byte("ppt/"), offset):
|
||||
typ = TYPE_PPTX
|
||||
case compareBytes(buf, []byte("xl/"), offset):
|
||||
typ = TYPE_XLSX
|
||||
default:
|
||||
ok = false
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func search(buf []byte, start, rangeNum int) int {
|
||||
length := len(buf)
|
||||
end := start + rangeNum
|
||||
signature := []byte{'P', 'K', 0x03, 0x04}
|
||||
|
||||
if end > length {
|
||||
end = length
|
||||
}
|
||||
|
||||
if start >= end {
|
||||
return -1
|
||||
}
|
||||
|
||||
return bytes.Index(buf[start:end], signature)
|
||||
}
|
||||
45
vendor/github.com/h2non/filetype/matchers/font.go
generated
vendored
Normal file
45
vendor/github.com/h2non/filetype/matchers/font.go
generated
vendored
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
package matchers
|
||||
|
||||
var (
|
||||
TypeWoff = newType("woff", "application/font-woff")
|
||||
TypeWoff2 = newType("woff2", "application/font-woff")
|
||||
TypeTtf = newType("ttf", "application/font-sfnt")
|
||||
TypeOtf = newType("otf", "application/font-sfnt")
|
||||
)
|
||||
|
||||
var Font = Map{
|
||||
TypeWoff: Woff,
|
||||
TypeWoff2: Woff2,
|
||||
TypeTtf: Ttf,
|
||||
TypeOtf: Otf,
|
||||
}
|
||||
|
||||
func Woff(buf []byte) bool {
|
||||
return len(buf) > 7 &&
|
||||
buf[0] == 0x77 && buf[1] == 0x4F &&
|
||||
buf[2] == 0x46 && buf[3] == 0x46 &&
|
||||
buf[4] == 0x00 && buf[5] == 0x01 &&
|
||||
buf[6] == 0x00 && buf[7] == 0x00
|
||||
}
|
||||
|
||||
func Woff2(buf []byte) bool {
|
||||
return len(buf) > 7 &&
|
||||
buf[0] == 0x77 && buf[1] == 0x4F &&
|
||||
buf[2] == 0x46 && buf[3] == 0x32 &&
|
||||
buf[4] == 0x00 && buf[5] == 0x01 &&
|
||||
buf[6] == 0x00 && buf[7] == 0x00
|
||||
}
|
||||
|
||||
func Ttf(buf []byte) bool {
|
||||
return len(buf) > 4 &&
|
||||
buf[0] == 0x00 && buf[1] == 0x01 &&
|
||||
buf[2] == 0x00 && buf[3] == 0x00 &&
|
||||
buf[4] == 0x00
|
||||
}
|
||||
|
||||
func Otf(buf []byte) bool {
|
||||
return len(buf) > 4 &&
|
||||
buf[0] == 0x4F && buf[1] == 0x54 &&
|
||||
buf[2] == 0x54 && buf[3] == 0x4F &&
|
||||
buf[4] == 0x00
|
||||
}
|
||||
143
vendor/github.com/h2non/filetype/matchers/image.go
generated
vendored
Normal file
143
vendor/github.com/h2non/filetype/matchers/image.go
generated
vendored
Normal file
|
|
@ -0,0 +1,143 @@
|
|||
package matchers
|
||||
|
||||
import "github.com/h2non/filetype/matchers/isobmff"
|
||||
|
||||
var (
|
||||
TypeJpeg = newType("jpg", "image/jpeg")
|
||||
TypeJpeg2000 = newType("jp2", "image/jp2")
|
||||
TypePng = newType("png", "image/png")
|
||||
TypeGif = newType("gif", "image/gif")
|
||||
TypeWebp = newType("webp", "image/webp")
|
||||
TypeCR2 = newType("cr2", "image/x-canon-cr2")
|
||||
TypeTiff = newType("tif", "image/tiff")
|
||||
TypeBmp = newType("bmp", "image/bmp")
|
||||
TypeJxr = newType("jxr", "image/vnd.ms-photo")
|
||||
TypePsd = newType("psd", "image/vnd.adobe.photoshop")
|
||||
TypeIco = newType("ico", "image/vnd.microsoft.icon")
|
||||
TypeHeif = newType("heif", "image/heif")
|
||||
TypeDwg = newType("dwg", "image/vnd.dwg")
|
||||
)
|
||||
|
||||
var Image = Map{
|
||||
TypeJpeg: Jpeg,
|
||||
TypeJpeg2000: Jpeg2000,
|
||||
TypePng: Png,
|
||||
TypeGif: Gif,
|
||||
TypeWebp: Webp,
|
||||
TypeCR2: CR2,
|
||||
TypeTiff: Tiff,
|
||||
TypeBmp: Bmp,
|
||||
TypeJxr: Jxr,
|
||||
TypePsd: Psd,
|
||||
TypeIco: Ico,
|
||||
TypeHeif: Heif,
|
||||
TypeDwg: Dwg,
|
||||
}
|
||||
|
||||
func Jpeg(buf []byte) bool {
|
||||
return len(buf) > 2 &&
|
||||
buf[0] == 0xFF &&
|
||||
buf[1] == 0xD8 &&
|
||||
buf[2] == 0xFF
|
||||
}
|
||||
|
||||
func Jpeg2000(buf []byte) bool {
|
||||
return len(buf) > 12 &&
|
||||
buf[0] == 0x0 &&
|
||||
buf[1] == 0x0 &&
|
||||
buf[2] == 0x0 &&
|
||||
buf[3] == 0xC &&
|
||||
buf[4] == 0x6A &&
|
||||
buf[5] == 0x50 &&
|
||||
buf[6] == 0x20 &&
|
||||
buf[7] == 0x20 &&
|
||||
buf[8] == 0xD &&
|
||||
buf[9] == 0xA &&
|
||||
buf[10] == 0x87 &&
|
||||
buf[11] == 0xA &&
|
||||
buf[12] == 0x0
|
||||
}
|
||||
|
||||
func Png(buf []byte) bool {
|
||||
return len(buf) > 3 &&
|
||||
buf[0] == 0x89 && buf[1] == 0x50 &&
|
||||
buf[2] == 0x4E && buf[3] == 0x47
|
||||
}
|
||||
|
||||
func Gif(buf []byte) bool {
|
||||
return len(buf) > 2 &&
|
||||
buf[0] == 0x47 && buf[1] == 0x49 && buf[2] == 0x46
|
||||
}
|
||||
|
||||
func Webp(buf []byte) bool {
|
||||
return len(buf) > 11 &&
|
||||
buf[8] == 0x57 && buf[9] == 0x45 &&
|
||||
buf[10] == 0x42 && buf[11] == 0x50
|
||||
}
|
||||
|
||||
func CR2(buf []byte) bool {
|
||||
return len(buf) > 10 &&
|
||||
((buf[0] == 0x49 && buf[1] == 0x49 && buf[2] == 0x2A && buf[3] == 0x0) || // Little Endian
|
||||
(buf[0] == 0x4D && buf[1] == 0x4D && buf[2] == 0x0 && buf[3] == 0x2A)) && // Big Endian
|
||||
buf[8] == 0x43 && buf[9] == 0x52 && // CR2 magic word
|
||||
buf[10] == 0x02 // CR2 major version
|
||||
}
|
||||
|
||||
func Tiff(buf []byte) bool {
|
||||
return len(buf) > 10 &&
|
||||
((buf[0] == 0x49 && buf[1] == 0x49 && buf[2] == 0x2A && buf[3] == 0x0) || // Little Endian
|
||||
(buf[0] == 0x4D && buf[1] == 0x4D && buf[2] == 0x0 && buf[3] == 0x2A)) && // Big Endian
|
||||
!CR2(buf) // To avoid conflicts differentiate Tiff from CR2
|
||||
}
|
||||
|
||||
func Bmp(buf []byte) bool {
|
||||
return len(buf) > 1 &&
|
||||
buf[0] == 0x42 &&
|
||||
buf[1] == 0x4D
|
||||
}
|
||||
|
||||
func Jxr(buf []byte) bool {
|
||||
return len(buf) > 2 &&
|
||||
buf[0] == 0x49 &&
|
||||
buf[1] == 0x49 &&
|
||||
buf[2] == 0xBC
|
||||
}
|
||||
|
||||
func Psd(buf []byte) bool {
|
||||
return len(buf) > 3 &&
|
||||
buf[0] == 0x38 && buf[1] == 0x42 &&
|
||||
buf[2] == 0x50 && buf[3] == 0x53
|
||||
}
|
||||
|
||||
func Ico(buf []byte) bool {
|
||||
return len(buf) > 3 &&
|
||||
buf[0] == 0x00 && buf[1] == 0x00 &&
|
||||
buf[2] == 0x01 && buf[3] == 0x00
|
||||
}
|
||||
|
||||
func Heif(buf []byte) bool {
|
||||
if !isobmff.IsISOBMFF(buf) {
|
||||
return false
|
||||
}
|
||||
|
||||
majorBrand, _, compatibleBrands := isobmff.GetFtyp(buf)
|
||||
if majorBrand == "heic" {
|
||||
return true
|
||||
}
|
||||
|
||||
if majorBrand == "mif1" || majorBrand == "msf1" {
|
||||
for _, compatibleBrand := range compatibleBrands {
|
||||
if compatibleBrand == "heic" {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func Dwg(buf []byte) bool {
|
||||
return len(buf) > 3 &&
|
||||
buf[0] == 0x41 && buf[1] == 0x43 &&
|
||||
buf[2] == 0x31 && buf[3] == 0x30
|
||||
}
|
||||
37
vendor/github.com/h2non/filetype/matchers/isobmff/isobmff.go
generated
vendored
Normal file
37
vendor/github.com/h2non/filetype/matchers/isobmff/isobmff.go
generated
vendored
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
package isobmff
|
||||
|
||||
import "encoding/binary"
|
||||
|
||||
// IsISOBMFF checks whether the given buffer represents ISO Base Media File Format data
|
||||
func IsISOBMFF(buf []byte) bool {
|
||||
if len(buf) < 16 || string(buf[4:8]) != "ftyp" {
|
||||
return false
|
||||
}
|
||||
|
||||
if ftypLength := binary.BigEndian.Uint32(buf[0:4]); len(buf) < int(ftypLength) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// GetFtyp returns the major brand, minor version and compatible brands of the ISO-BMFF data
|
||||
func GetFtyp(buf []byte) (string, string, []string) {
|
||||
if len(buf) < 17 {
|
||||
return "", "", []string{""}
|
||||
}
|
||||
|
||||
ftypLength := binary.BigEndian.Uint32(buf[0:4])
|
||||
|
||||
majorBrand := string(buf[8:12])
|
||||
minorVersion := string(buf[12:16])
|
||||
|
||||
compatibleBrands := []string{}
|
||||
for i := 16; i < int(ftypLength); i += 4 {
|
||||
if len(buf) >= (i + 4) {
|
||||
compatibleBrands = append(compatibleBrands, string(buf[i:i+4]))
|
||||
}
|
||||
}
|
||||
|
||||
return majorBrand, minorVersion, compatibleBrands
|
||||
}
|
||||
51
vendor/github.com/h2non/filetype/matchers/matchers.go
generated
vendored
Normal file
51
vendor/github.com/h2non/filetype/matchers/matchers.go
generated
vendored
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
package matchers
|
||||
|
||||
import (
|
||||
"github.com/h2non/filetype/types"
|
||||
)
|
||||
|
||||
// Internal shortcut to NewType
|
||||
var newType = types.NewType
|
||||
|
||||
// Matcher function interface as type alias
|
||||
type Matcher func([]byte) bool
|
||||
|
||||
// Type interface to store pairs of type with its matcher function
|
||||
type Map map[types.Type]Matcher
|
||||
|
||||
// Type specific matcher function interface
|
||||
type TypeMatcher func([]byte) types.Type
|
||||
|
||||
// Store registered file type matchers
|
||||
var Matchers = make(map[types.Type]TypeMatcher)
|
||||
var MatcherKeys []types.Type
|
||||
|
||||
// Create and register a new type matcher function
|
||||
func NewMatcher(kind types.Type, fn Matcher) TypeMatcher {
|
||||
matcher := func(buf []byte) types.Type {
|
||||
if fn(buf) {
|
||||
return kind
|
||||
}
|
||||
return types.Unknown
|
||||
}
|
||||
|
||||
Matchers[kind] = matcher
|
||||
// prepend here so any user defined matchers get added first
|
||||
MatcherKeys = append([]types.Type{kind}, MatcherKeys...)
|
||||
return matcher
|
||||
}
|
||||
|
||||
func register(matchers ...Map) {
|
||||
MatcherKeys = MatcherKeys[:0]
|
||||
for _, m := range matchers {
|
||||
for kind, matcher := range m {
|
||||
NewMatcher(kind, matcher)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func init() {
|
||||
// Arguments order is intentional
|
||||
// Archive files will be checked last due to prepend above in func NewMatcher
|
||||
register(Archive, Document, Font, Audio, Video, Image, Application)
|
||||
}
|
||||
145
vendor/github.com/h2non/filetype/matchers/video.go
generated
vendored
Normal file
145
vendor/github.com/h2non/filetype/matchers/video.go
generated
vendored
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
package matchers
|
||||
|
||||
import "bytes"
|
||||
|
||||
var (
|
||||
TypeMp4 = newType("mp4", "video/mp4")
|
||||
TypeM4v = newType("m4v", "video/x-m4v")
|
||||
TypeMkv = newType("mkv", "video/x-matroska")
|
||||
TypeWebm = newType("webm", "video/webm")
|
||||
TypeMov = newType("mov", "video/quicktime")
|
||||
TypeAvi = newType("avi", "video/x-msvideo")
|
||||
TypeWmv = newType("wmv", "video/x-ms-wmv")
|
||||
TypeMpeg = newType("mpg", "video/mpeg")
|
||||
TypeFlv = newType("flv", "video/x-flv")
|
||||
Type3gp = newType("3gp", "video/3gpp")
|
||||
)
|
||||
|
||||
var Video = Map{
|
||||
TypeMp4: Mp4,
|
||||
TypeM4v: M4v,
|
||||
TypeMkv: Mkv,
|
||||
TypeWebm: Webm,
|
||||
TypeMov: Mov,
|
||||
TypeAvi: Avi,
|
||||
TypeWmv: Wmv,
|
||||
TypeMpeg: Mpeg,
|
||||
TypeFlv: Flv,
|
||||
Type3gp: Match3gp,
|
||||
}
|
||||
|
||||
func M4v(buf []byte) bool {
|
||||
return len(buf) > 10 &&
|
||||
buf[4] == 0x66 && buf[5] == 0x74 &&
|
||||
buf[6] == 0x79 && buf[7] == 0x70 &&
|
||||
buf[8] == 0x4D && buf[9] == 0x34 &&
|
||||
buf[10] == 0x56
|
||||
}
|
||||
|
||||
func Mkv(buf []byte) bool {
|
||||
return len(buf) > 3 &&
|
||||
buf[0] == 0x1A && buf[1] == 0x45 &&
|
||||
buf[2] == 0xDF && buf[3] == 0xA3 &&
|
||||
containsMatroskaSignature(buf, []byte{'m', 'a', 't', 'r', 'o', 's', 'k', 'a'})
|
||||
}
|
||||
|
||||
func Webm(buf []byte) bool {
|
||||
return len(buf) > 3 &&
|
||||
buf[0] == 0x1A && buf[1] == 0x45 &&
|
||||
buf[2] == 0xDF && buf[3] == 0xA3 &&
|
||||
containsMatroskaSignature(buf, []byte{'w', 'e', 'b', 'm'})
|
||||
}
|
||||
|
||||
func Mov(buf []byte) bool {
|
||||
return len(buf) > 15 && ((buf[0] == 0x0 && buf[1] == 0x0 &&
|
||||
buf[2] == 0x0 && buf[3] == 0x14 &&
|
||||
buf[4] == 0x66 && buf[5] == 0x74 &&
|
||||
buf[6] == 0x79 && buf[7] == 0x70) ||
|
||||
(buf[4] == 0x6d && buf[5] == 0x6f && buf[6] == 0x6f && buf[7] == 0x76) ||
|
||||
(buf[4] == 0x6d && buf[5] == 0x64 && buf[6] == 0x61 && buf[7] == 0x74) ||
|
||||
(buf[12] == 0x6d && buf[13] == 0x64 && buf[14] == 0x61 && buf[15] == 0x74))
|
||||
}
|
||||
|
||||
func Avi(buf []byte) bool {
|
||||
return len(buf) > 10 &&
|
||||
buf[0] == 0x52 && buf[1] == 0x49 &&
|
||||
buf[2] == 0x46 && buf[3] == 0x46 &&
|
||||
buf[8] == 0x41 && buf[9] == 0x56 &&
|
||||
buf[10] == 0x49
|
||||
}
|
||||
|
||||
func Wmv(buf []byte) bool {
|
||||
return len(buf) > 9 &&
|
||||
buf[0] == 0x30 && buf[1] == 0x26 &&
|
||||
buf[2] == 0xB2 && buf[3] == 0x75 &&
|
||||
buf[4] == 0x8E && buf[5] == 0x66 &&
|
||||
buf[6] == 0xCF && buf[7] == 0x11 &&
|
||||
buf[8] == 0xA6 && buf[9] == 0xD9
|
||||
}
|
||||
|
||||
func Mpeg(buf []byte) bool {
|
||||
return len(buf) > 3 &&
|
||||
buf[0] == 0x0 && buf[1] == 0x0 &&
|
||||
buf[2] == 0x1 && buf[3] >= 0xb0 &&
|
||||
buf[3] <= 0xbf
|
||||
}
|
||||
|
||||
func Flv(buf []byte) bool {
|
||||
return len(buf) > 3 &&
|
||||
buf[0] == 0x46 && buf[1] == 0x4C &&
|
||||
buf[2] == 0x56 && buf[3] == 0x01
|
||||
}
|
||||
|
||||
func Mp4(buf []byte) bool {
|
||||
return len(buf) > 11 &&
|
||||
(buf[4] == 'f' && buf[5] == 't' && buf[6] == 'y' && buf[7] == 'p') &&
|
||||
((buf[8] == 'a' && buf[9] == 'v' && buf[10] == 'c' && buf[11] == '1') ||
|
||||
(buf[8] == 'd' && buf[9] == 'a' && buf[10] == 's' && buf[11] == 'h') ||
|
||||
(buf[8] == 'i' && buf[9] == 's' && buf[10] == 'o' && buf[11] == '2') ||
|
||||
(buf[8] == 'i' && buf[9] == 's' && buf[10] == 'o' && buf[11] == '3') ||
|
||||
(buf[8] == 'i' && buf[9] == 's' && buf[10] == 'o' && buf[11] == '4') ||
|
||||
(buf[8] == 'i' && buf[9] == 's' && buf[10] == 'o' && buf[11] == '5') ||
|
||||
(buf[8] == 'i' && buf[9] == 's' && buf[10] == 'o' && buf[11] == '6') ||
|
||||
(buf[8] == 'i' && buf[9] == 's' && buf[10] == 'o' && buf[11] == 'm') ||
|
||||
(buf[8] == 'm' && buf[9] == 'm' && buf[10] == 'p' && buf[11] == '4') ||
|
||||
(buf[8] == 'm' && buf[9] == 'p' && buf[10] == '4' && buf[11] == '1') ||
|
||||
(buf[8] == 'm' && buf[9] == 'p' && buf[10] == '4' && buf[11] == '2') ||
|
||||
(buf[8] == 'm' && buf[9] == 'p' && buf[10] == '4' && buf[11] == 'v') ||
|
||||
(buf[8] == 'm' && buf[9] == 'p' && buf[10] == '7' && buf[11] == '1') ||
|
||||
(buf[8] == 'M' && buf[9] == 'S' && buf[10] == 'N' && buf[11] == 'V') ||
|
||||
(buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'A' && buf[11] == 'S') ||
|
||||
(buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'S' && buf[11] == 'C') ||
|
||||
(buf[8] == 'N' && buf[9] == 'S' && buf[10] == 'D' && buf[11] == 'C') ||
|
||||
(buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'S' && buf[11] == 'H') ||
|
||||
(buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'S' && buf[11] == 'M') ||
|
||||
(buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'S' && buf[11] == 'P') ||
|
||||
(buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'S' && buf[11] == 'S') ||
|
||||
(buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'X' && buf[11] == 'C') ||
|
||||
(buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'X' && buf[11] == 'H') ||
|
||||
(buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'X' && buf[11] == 'M') ||
|
||||
(buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'X' && buf[11] == 'P') ||
|
||||
(buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'X' && buf[11] == 'S') ||
|
||||
(buf[8] == 'F' && buf[9] == '4' && buf[10] == 'V' && buf[11] == ' ') ||
|
||||
(buf[8] == 'F' && buf[9] == '4' && buf[10] == 'P' && buf[11] == ' '))
|
||||
}
|
||||
|
||||
func Match3gp(buf []byte) bool {
|
||||
return len(buf) > 10 &&
|
||||
buf[4] == 0x66 && buf[5] == 0x74 && buf[6] == 0x79 &&
|
||||
buf[7] == 0x70 && buf[8] == 0x33 && buf[9] == 0x67 &&
|
||||
buf[10] == 0x70
|
||||
}
|
||||
|
||||
func containsMatroskaSignature(buf, subType []byte) bool {
|
||||
limit := 4096
|
||||
if len(buf) < limit {
|
||||
limit = len(buf)
|
||||
}
|
||||
|
||||
index := bytes.Index(buf[:limit], subType)
|
||||
if index < 3 {
|
||||
return false
|
||||
}
|
||||
|
||||
return buf[index-3] == 0x42 && buf[index-2] == 0x82
|
||||
}
|
||||
4
vendor/github.com/h2non/filetype/types/defaults.go
generated
vendored
Normal file
4
vendor/github.com/h2non/filetype/types/defaults.go
generated
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
package types
|
||||
|
||||
// Unknown default type
|
||||
var Unknown = NewType("unknown", "")
|
||||
14
vendor/github.com/h2non/filetype/types/mime.go
generated
vendored
Normal file
14
vendor/github.com/h2non/filetype/types/mime.go
generated
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
package types
|
||||
|
||||
// MIME stores the file MIME type values
|
||||
type MIME struct {
|
||||
Type string
|
||||
Subtype string
|
||||
Value string
|
||||
}
|
||||
|
||||
// Creates a new MIME type
|
||||
func NewMIME(mime string) MIME {
|
||||
kind, subtype := splitMime(mime)
|
||||
return MIME{Type: kind, Subtype: subtype, Value: mime}
|
||||
}
|
||||
11
vendor/github.com/h2non/filetype/types/split.go
generated
vendored
Normal file
11
vendor/github.com/h2non/filetype/types/split.go
generated
vendored
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
package types
|
||||
|
||||
import "strings"
|
||||
|
||||
func splitMime(s string) (string, string) {
|
||||
x := strings.Split(s, "/")
|
||||
if len(x) > 1 {
|
||||
return x[0], x[1]
|
||||
}
|
||||
return x[0], ""
|
||||
}
|
||||
16
vendor/github.com/h2non/filetype/types/type.go
generated
vendored
Normal file
16
vendor/github.com/h2non/filetype/types/type.go
generated
vendored
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
package types
|
||||
|
||||
// Type represents a file MIME type and its extension
|
||||
type Type struct {
|
||||
MIME MIME
|
||||
Extension string
|
||||
}
|
||||
|
||||
// NewType creates a new Type
|
||||
func NewType(ext, mime string) Type {
|
||||
t := Type{
|
||||
MIME: NewMIME(mime),
|
||||
Extension: ext,
|
||||
}
|
||||
return Add(t)
|
||||
}
|
||||
23
vendor/github.com/h2non/filetype/types/types.go
generated
vendored
Normal file
23
vendor/github.com/h2non/filetype/types/types.go
generated
vendored
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
package types
|
||||
|
||||
import "sync"
|
||||
|
||||
// Types Support concurrent map writes
|
||||
var Types sync.Map
|
||||
|
||||
// Add registers a new type in the package
|
||||
func Add(t Type) Type {
|
||||
Types.Store(t.Extension, t)
|
||||
return t
|
||||
}
|
||||
|
||||
// Get retrieves a Type by extension
|
||||
func Get(ext string) Type {
|
||||
if tmp, ok := Types.Load(ext); ok {
|
||||
kind := tmp.(Type)
|
||||
if kind.Extension != "" {
|
||||
return kind
|
||||
}
|
||||
}
|
||||
return Unknown
|
||||
}
|
||||
4
vendor/github.com/h2non/filetype/version.go
generated
vendored
Normal file
4
vendor/github.com/h2non/filetype/version.go
generated
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
package filetype
|
||||
|
||||
// Version exposes the current package version.
|
||||
const Version = "1.1.3"
|
||||
169
vendor/github.com/mattn/go-sqlite3/blob_io.go
generated
vendored
Normal file
169
vendor/github.com/mattn/go-sqlite3/blob_io.go
generated
vendored
Normal file
|
|
@ -0,0 +1,169 @@
|
|||
// Copyright (C) 2022 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
|
||||
//
|
||||
// Use of this source code is governed by an MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package sqlite3
|
||||
|
||||
/*
|
||||
#ifndef USE_LIBSQLITE3
|
||||
#include "sqlite3-binding.h"
|
||||
#else
|
||||
#include <sqlite3.h>
|
||||
#endif
|
||||
#include <stdlib.h>
|
||||
*/
|
||||
import "C"
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"runtime"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// SQLiteBlob implements the SQLite Blob I/O interface.
|
||||
type SQLiteBlob struct {
|
||||
conn *SQLiteConn
|
||||
blob *C.sqlite3_blob
|
||||
size int
|
||||
offset int
|
||||
}
|
||||
|
||||
// Blob opens a blob.
|
||||
//
|
||||
// See https://www.sqlite.org/c3ref/blob_open.html for usage.
|
||||
//
|
||||
// Should only be used with conn.Raw.
|
||||
func (conn *SQLiteConn) Blob(database, table, column string, rowid int64, flags int) (*SQLiteBlob, error) {
|
||||
databaseptr := C.CString(database)
|
||||
defer C.free(unsafe.Pointer(databaseptr))
|
||||
|
||||
tableptr := C.CString(table)
|
||||
defer C.free(unsafe.Pointer(tableptr))
|
||||
|
||||
columnptr := C.CString(column)
|
||||
defer C.free(unsafe.Pointer(columnptr))
|
||||
|
||||
var blob *C.sqlite3_blob
|
||||
ret := C.sqlite3_blob_open(conn.db, databaseptr, tableptr, columnptr, C.longlong(rowid), C.int(flags), &blob)
|
||||
|
||||
if ret != C.SQLITE_OK {
|
||||
return nil, conn.lastError()
|
||||
}
|
||||
|
||||
size := int(C.sqlite3_blob_bytes(blob))
|
||||
bb := &SQLiteBlob{conn: conn, blob: blob, size: size, offset: 0}
|
||||
|
||||
runtime.SetFinalizer(bb, (*SQLiteBlob).Close)
|
||||
|
||||
return bb, nil
|
||||
}
|
||||
|
||||
// Read implements the io.Reader interface.
|
||||
func (s *SQLiteBlob) Read(b []byte) (n int, err error) {
|
||||
if s.offset >= s.size {
|
||||
return 0, io.EOF
|
||||
}
|
||||
|
||||
if len(b) == 0 {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
n = s.size - s.offset
|
||||
if len(b) < n {
|
||||
n = len(b)
|
||||
}
|
||||
|
||||
p := &b[0]
|
||||
ret := C.sqlite3_blob_read(s.blob, unsafe.Pointer(p), C.int(n), C.int(s.offset))
|
||||
if ret != C.SQLITE_OK {
|
||||
return 0, s.conn.lastError()
|
||||
}
|
||||
|
||||
s.offset += n
|
||||
|
||||
return n, nil
|
||||
}
|
||||
|
||||
// Write implements the io.Writer interface.
|
||||
func (s *SQLiteBlob) Write(b []byte) (n int, err error) {
|
||||
if len(b) == 0 {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
if s.offset >= s.size {
|
||||
return 0, fmt.Errorf("sqlite3.SQLiteBlob.Write: insufficient space in %d-byte blob", s.size)
|
||||
}
|
||||
|
||||
n = s.size - s.offset
|
||||
if len(b) < n {
|
||||
n = len(b)
|
||||
}
|
||||
|
||||
if n != len(b) {
|
||||
return 0, fmt.Errorf("sqlite3.SQLiteBlob.Write: insufficient space in %d-byte blob", s.size)
|
||||
}
|
||||
|
||||
p := &b[0]
|
||||
ret := C.sqlite3_blob_write(s.blob, unsafe.Pointer(p), C.int(n), C.int(s.offset))
|
||||
if ret != C.SQLITE_OK {
|
||||
return 0, s.conn.lastError()
|
||||
}
|
||||
|
||||
s.offset += n
|
||||
|
||||
return n, nil
|
||||
}
|
||||
|
||||
// Seek implements the io.Seeker interface.
|
||||
func (s *SQLiteBlob) Seek(offset int64, whence int) (int64, error) {
|
||||
if offset > math.MaxInt32 {
|
||||
return 0, fmt.Errorf("sqlite3.SQLiteBlob.Seek: invalid offset %d", offset)
|
||||
}
|
||||
|
||||
var abs int64
|
||||
switch whence {
|
||||
case io.SeekStart:
|
||||
abs = offset
|
||||
case io.SeekCurrent:
|
||||
abs = int64(s.offset) + offset
|
||||
case io.SeekEnd:
|
||||
abs = int64(s.size) + offset
|
||||
default:
|
||||
return 0, fmt.Errorf("sqlite3.SQLiteBlob.Seek: invalid whence %d", whence)
|
||||
}
|
||||
|
||||
if abs < 0 {
|
||||
return 0, errors.New("sqlite.SQLiteBlob.Seek: negative position")
|
||||
}
|
||||
|
||||
if abs > math.MaxInt32 || abs > int64(s.size) {
|
||||
return 0, errors.New("sqlite3.SQLiteBlob.Seek: overflow position")
|
||||
}
|
||||
|
||||
s.offset = int(abs)
|
||||
|
||||
return abs, nil
|
||||
}
|
||||
|
||||
// Size returns the size of the blob.
|
||||
func (s *SQLiteBlob) Size() int {
|
||||
return s.size
|
||||
}
|
||||
|
||||
// Close implements the io.Closer interface.
|
||||
func (s *SQLiteBlob) Close() error {
|
||||
ret := C.sqlite3_blob_close(s.blob)
|
||||
|
||||
s.blob = nil
|
||||
runtime.SetFinalizer(s, nil)
|
||||
|
||||
if ret != C.SQLITE_OK {
|
||||
return s.conn.lastError()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
9
vendor/modules.txt
vendored
9
vendor/modules.txt
vendored
|
|
@ -199,6 +199,12 @@ github.com/gorilla/mux
|
|||
# github.com/gorilla/websocket v1.5.4-0.20240702125206-a62d9d2a8413
|
||||
## explicit; go 1.20
|
||||
github.com/gorilla/websocket
|
||||
# github.com/h2non/filetype v1.1.3
|
||||
## explicit; go 1.13
|
||||
github.com/h2non/filetype
|
||||
github.com/h2non/filetype/matchers
|
||||
github.com/h2non/filetype/matchers/isobmff
|
||||
github.com/h2non/filetype/types
|
||||
# github.com/inconshreveable/mousetrap v1.1.0
|
||||
## explicit; go 1.18
|
||||
github.com/inconshreveable/mousetrap
|
||||
|
|
@ -226,7 +232,7 @@ github.com/mattn/go-isatty
|
|||
# github.com/mattn/go-runewidth v0.0.17
|
||||
## explicit; go 1.9
|
||||
github.com/mattn/go-runewidth
|
||||
# github.com/mattn/go-sqlite3 v1.14.32
|
||||
# github.com/mattn/go-sqlite3 v1.14.32 => github.com/gabriel-samfira/go-sqlite3 v0.0.0-20251005121134-bc61ecf9b4c7
|
||||
## explicit; go 1.19
|
||||
github.com/mattn/go-sqlite3
|
||||
# github.com/minio/sio v0.4.2
|
||||
|
|
@ -448,3 +454,4 @@ gorm.io/gorm/logger
|
|||
gorm.io/gorm/migrator
|
||||
gorm.io/gorm/schema
|
||||
gorm.io/gorm/utils
|
||||
# github.com/mattn/go-sqlite3 => github.com/gabriel-samfira/go-sqlite3 v0.0.0-20251005121134-bc61ecf9b4c7
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue