1
Fork 0

Batch exiftool runs + scanner package restructuring

This commit is contained in:
viktorstrate 2021-05-06 21:54:31 +02:00
parent 87724ce086
commit b022a13e55
No known key found for this signature in database
GPG Key ID: 3F855605109C1E8A
32 changed files with 620 additions and 302 deletions

1
.gitignore vendored
View File

@ -17,6 +17,7 @@ node_modules/
# testing
/ui/coverage
/api/coverage.txt
# building
.cache/

View File

@ -0,0 +1,24 @@
package dataloader
import (
"time"
"github.com/barasher/go-exiftool"
)
func NewExiftoolLoader(et *exiftool.Exiftool) *ExiftoolLoader {
return &ExiftoolLoader{
wait: 100 * time.Millisecond,
maxBatch: 100,
fetch: func(keys []string) ([]exiftool.FileMetadata, []error) {
metadata := et.ExtractMetadata(keys...)
exifErrors := make([]error, len(metadata))
for i := 0; i < len(metadata); i++ {
exifErrors[i] = metadata[i].Err
}
return metadata, exifErrors
},
}
}

View File

@ -0,0 +1,221 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package dataloader
import (
"sync"
"time"
"github.com/barasher/go-exiftool"
)
// ExiftoolLoaderConfig captures the config to create a new ExiftoolLoader
type ExiftoolLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []string) ([]exiftool.FileMetadata, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewExiftoolLoader creates a new ExiftoolLoader given a fetch, wait, and maxBatch
// func NewExiftoolLoader(config ExiftoolLoaderConfig) *ExiftoolLoader {
// return &ExiftoolLoader{
// fetch: config.Fetch,
// wait: config.Wait,
// maxBatch: config.MaxBatch,
// }
// }
// ExiftoolLoader batches and caches requests
type ExiftoolLoader struct {
// this method provides the data for the loader
fetch func(keys []string) ([]exiftool.FileMetadata, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[string]exiftool.FileMetadata
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *exiftoolLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type exiftoolLoaderBatch struct {
keys []string
data []exiftool.FileMetadata
error []error
closing bool
done chan struct{}
}
// Load a exiftool.FileMetadata by key, batching and caching will be applied automatically
func (l *ExiftoolLoader) Load(key string) (exiftool.FileMetadata, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a exiftool.FileMetadata.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *ExiftoolLoader) LoadThunk(key string) func() (exiftool.FileMetadata, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (exiftool.FileMetadata, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &exiftoolLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (exiftool.FileMetadata, error) {
<-batch.done
var data exiftool.FileMetadata
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *ExiftoolLoader) LoadAll(keys []string) ([]exiftool.FileMetadata, []error) {
results := make([]func() (exiftool.FileMetadata, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
FileMetadatas := make([]exiftool.FileMetadata, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
FileMetadatas[i], errors[i] = thunk()
}
return FileMetadatas, errors
}
// LoadAllThunk returns a function that when called will block waiting for a FileMetadatas.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *ExiftoolLoader) LoadAllThunk(keys []string) func() ([]exiftool.FileMetadata, []error) {
results := make([]func() (exiftool.FileMetadata, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]exiftool.FileMetadata, []error) {
FileMetadatas := make([]exiftool.FileMetadata, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
FileMetadatas[i], errors[i] = thunk()
}
return FileMetadatas, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *ExiftoolLoader) Prime(key string, value exiftool.FileMetadata) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
l.unsafeSet(key, value)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *ExiftoolLoader) Clear(key string) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *ExiftoolLoader) unsafeSet(key string, value exiftool.FileMetadata) {
if l.cache == nil {
l.cache = map[string]exiftool.FileMetadata{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *exiftoolLoaderBatch) keyIndex(l *ExiftoolLoader, key string) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *exiftoolLoaderBatch) startTimer(l *ExiftoolLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *exiftoolLoaderBatch) end(l *ExiftoolLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View File

@ -4,7 +4,7 @@ import (
"time"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner"
"github.com/photoview/photoview/api/scanner/media_type"
"github.com/pkg/errors"
"gorm.io/gorm"
@ -56,7 +56,7 @@ func NewHighresMediaURLLoader(db *gorm.DB) *MediaURLLoader {
maxBatch: 100,
wait: 5 * time.Millisecond,
fetch: makeMediaURLLoader(db, func(query *gorm.DB) *gorm.DB {
return query.Where("purpose = ? OR (purpose = ? AND content_type IN ?)", models.PhotoHighRes, models.MediaOriginal, scanner.WebMimetypes)
return query.Where("purpose = ? OR (purpose = ? AND content_type IN ?)", models.PhotoHighRes, models.MediaOriginal, media_type.WebMimetypes)
}),
}
}

View File

@ -20,6 +20,7 @@ func NewUserFavoriteLoader(db *gorm.DB) *UserFavoritesLoader {
mediaIDMap[key.MediaID] = struct{}{}
}
uniqueUserIDs := make([]int, len(userIDMap))
uniqueMediaIDs := make([]int, len(mediaIDMap))

View File

@ -8,7 +8,7 @@ import (
"regexp"
"github.com/99designs/gqlgen/handler"
"github.com/photoview/photoview/api/graphql/dataloader"
"github.com/photoview/photoview/api/dataloader"
"github.com/photoview/photoview/api/graphql/models"
"gorm.io/gorm"
)

View File

@ -10,7 +10,7 @@ import (
"strings"
"github.com/Kagami/go-face"
"github.com/photoview/photoview/api/scanner/image_helpers"
"github.com/photoview/photoview/api/scanner/media_encoding/media_utils"
"gorm.io/gorm"
"gorm.io/gorm/schema"
)
@ -69,7 +69,7 @@ type FaceRectangle struct {
// ToDBFaceRectangle converts a pixel absolute rectangle to a relative FaceRectangle to be saved in the database
func ToDBFaceRectangle(imgRec image.Rectangle, imagePath string) (*FaceRectangle, error) {
size, err := image_helpers.GetPhotoDimensions(imagePath)
size, err := media_utils.GetPhotoDimensions(imagePath)
if err != nil {
return nil, err
}

View File

@ -4,9 +4,9 @@ import (
"context"
"strings"
"github.com/photoview/photoview/api/dataloader"
api "github.com/photoview/photoview/api/graphql"
"github.com/photoview/photoview/api/graphql/auth"
"github.com/photoview/photoview/api/graphql/dataloader"
"github.com/photoview/photoview/api/graphql/models"
"github.com/pkg/errors"
"gorm.io/gorm/clause"

View File

@ -7,6 +7,7 @@ import (
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/face_detection"
"github.com/photoview/photoview/api/scanner/scanner_utils"
"github.com/photoview/photoview/api/utils"
"github.com/pkg/errors"
"gorm.io/gorm"
@ -118,7 +119,7 @@ func deleteOldUserAlbums(db *gorm.DB, scannedAlbums []*models.Album, user *model
})
if err != nil {
ScannerError("Could not delete old albums from database:\n%s\n", err)
scanner_utils.ScannerError("Could not delete old albums from database:\n%s\n", err)
deleteErrors = append(deleteErrors, err)
}

View File

@ -6,7 +6,6 @@ import (
"github.com/pkg/errors"
"gorm.io/gorm"
"github.com/barasher/go-exiftool"
"github.com/photoview/photoview/api/graphql/models"
)
@ -14,19 +13,18 @@ type exifParser interface {
ParseExif(media_path string) (*models.MediaEXIF, error)
}
var use_exiftool bool = false
var globalExifParser exifParser
func InitializeEXIFParser() {
// Decide between internal or external Exif parser
et, err := exiftool.NewExiftool()
exiftoolParser, err := newExiftoolParser()
if err != nil {
use_exiftool = false
log.Printf("Failed to get exiftool, using internal exif parser instead: %v\n", err)
globalExifParser = &internalExifParser{}
} else {
et.Close()
log.Println("Found exiftool")
use_exiftool = true
globalExifParser = exiftoolParser
}
}
@ -46,14 +44,11 @@ func SaveEXIF(tx *gorm.DB, media *models.Media) (*models.MediaEXIF, error) {
}
}
var parser exifParser
if use_exiftool {
parser = &externalExifParser{}
} else {
parser = &internalExifParser{}
if globalExifParser == nil {
return nil, errors.New("No exif parser initialized")
}
exif, err := parser.ParseExif(media.Path)
exif, err := globalExifParser.ParseExif(media.Path)
if err != nil {
return nil, errors.Wrap(err, "failed to parse exif data")
}

View File

@ -5,124 +5,142 @@ import (
"time"
"github.com/barasher/go-exiftool"
"github.com/photoview/photoview/api/dataloader"
"github.com/photoview/photoview/api/graphql/models"
)
type externalExifParser struct{}
type externalExifParser struct {
et *exiftool.Exiftool
dataLoader *dataloader.ExiftoolLoader
}
func (p *externalExifParser) ParseExif(media_path string) (returnExif *models.MediaEXIF, returnErr error) {
// ExifTool - No print conversion mode
func newExiftoolParser() (*externalExifParser, error) {
et, err := exiftool.NewExiftool(exiftool.NoPrintConversion())
if err != nil {
log.Printf("Error initializing ExifTool: %s\n", err)
return nil, err
}
defer et.Close()
fileInfos := et.ExtractMetadata(media_path)
return &externalExifParser{
et: et,
dataLoader: dataloader.NewExiftoolLoader(et),
}, nil
}
func (p *externalExifParser) ParseExif(media_path string) (returnExif *models.MediaEXIF, returnErr error) {
// ExifTool - No print conversion mode
if p.et == nil {
et, err := exiftool.NewExiftool(exiftool.NoPrintConversion())
p.et = et
if err != nil {
log.Printf("Error initializing ExifTool: %s\n", err)
return nil, err
}
}
fileInfo, err := p.dataLoader.Load(media_path)
if err != nil {
return nil, err
}
newExif := models.MediaEXIF{}
found_exif := false
for _, fileInfo := range fileInfos {
if fileInfo.Err != nil {
log.Printf("Fileinfo error: %v\n", fileInfo.Err)
continue
}
// Get camera model
model, err := fileInfo.GetString("Model")
if err == nil {
found_exif = true
newExif.Camera = &model
}
// Get camera model
model, err := fileInfo.GetString("Model")
// Get Camera make
make, err := fileInfo.GetString("Make")
if err == nil {
found_exif = true
newExif.Maker = &make
}
// Get lens
lens, err := fileInfo.GetString("LensModel")
if err == nil {
found_exif = true
newExif.Lens = &lens
}
//Get time of photo
date, err := fileInfo.GetString("DateTimeOriginal")
if err == nil {
layout := "2006:01:02 15:04:05"
dateTime, err := time.Parse(layout, date)
if err == nil {
found_exif = true
newExif.Camera = &model
newExif.DateShot = &dateTime
}
}
// Get Camera make
make, err := fileInfo.GetString("Make")
if err == nil {
found_exif = true
newExif.Maker = &make
}
// Get exposure time
exposureTime, err := fileInfo.GetFloat("ExposureTime")
if err == nil {
found_exif = true
newExif.Exposure = &exposureTime
}
// Get lens
lens, err := fileInfo.GetString("LensModel")
if err == nil {
found_exif = true
newExif.Lens = &lens
}
// Get aperture
aperture, err := fileInfo.GetFloat("Aperture")
if err == nil {
found_exif = true
newExif.Aperture = &aperture
}
//Get time of photo
date, err := fileInfo.GetString("DateTimeOriginal")
if err == nil {
layout := "2006:01:02 15:04:05"
dateTime, err := time.Parse(layout, date)
if err == nil {
found_exif = true
newExif.DateShot = &dateTime
}
}
// Get ISO
iso, err := fileInfo.GetInt("ISO")
if err == nil {
found_exif = true
newExif.Iso = &iso
}
// Get exposure time
exposureTime, err := fileInfo.GetFloat("ExposureTime")
if err == nil {
found_exif = true
newExif.Exposure = &exposureTime
}
// Get focal length
focalLen, err := fileInfo.GetFloat("FocalLength")
if err == nil {
found_exif = true
newExif.FocalLength = &focalLen
}
// Get aperture
aperture, err := fileInfo.GetFloat("Aperture")
if err == nil {
found_exif = true
newExif.Aperture = &aperture
}
// Get flash info
flash, err := fileInfo.GetInt("Flash")
if err == nil {
found_exif = true
newExif.Flash = &flash
}
// Get ISO
iso, err := fileInfo.GetInt("ISO")
if err == nil {
found_exif = true
newExif.Iso = &iso
}
// Get orientation
orientation, err := fileInfo.GetInt("Orientation")
if err == nil {
found_exif = true
newExif.Orientation = &orientation
}
// Get focal length
focalLen, err := fileInfo.GetFloat("FocalLength")
if err == nil {
found_exif = true
newExif.FocalLength = &focalLen
}
// Get exposure program
expProgram, err := fileInfo.GetInt("ExposureProgram")
if err == nil {
found_exif = true
newExif.ExposureProgram = &expProgram
}
// Get flash info
flash, err := fileInfo.GetInt("Flash")
if err == nil {
found_exif = true
newExif.Flash = &flash
}
// GPS coordinates - longitude
longitudeRaw, err := fileInfo.GetFloat("GPSLongitude")
if err == nil {
found_exif = true
newExif.GPSLongitude = &longitudeRaw
}
// Get orientation
orientation, err := fileInfo.GetInt("Orientation")
if err == nil {
found_exif = true
newExif.Orientation = &orientation
}
// Get exposure program
expProgram, err := fileInfo.GetInt("ExposureProgram")
if err == nil {
found_exif = true
newExif.ExposureProgram = &expProgram
}
// GPS coordinates - longitude
longitudeRaw, err := fileInfo.GetFloat("GPSLongitude")
if err == nil {
found_exif = true
newExif.GPSLongitude = &longitudeRaw
}
// GPS coordinates - latitude
latitudeRaw, err := fileInfo.GetFloat("GPSLatitude")
if err == nil {
found_exif = true
newExif.GPSLatitude = &latitudeRaw
}
// GPS coordinates - latitude
latitudeRaw, err := fileInfo.GetFloat("GPSLatitude")
if err == nil {
found_exif = true
newExif.GPSLatitude = &latitudeRaw
}
if !found_exif {

View File

@ -28,10 +28,16 @@ func TestExifParsers(t *testing.T) {
name: "internal",
parser: &internalExifParser{},
},
{
}
if externalParser, err := newExiftoolParser(); err == nil {
parsers = append(parsers, struct {
name string
parser exifParser
}{
name: "external",
parser: &externalExifParser{},
},
parser: externalParser,
})
}
images := []struct {

View File

@ -1,13 +1,17 @@
package scanner
package media_encoding
import (
"context"
"image"
"image/jpeg"
"os"
"time"
"github.com/disintegration/imaging"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/image_helpers"
"github.com/photoview/photoview/api/scanner/media_encoding/executable_worker"
"github.com/photoview/photoview/api/scanner/media_encoding/media_utils"
"github.com/photoview/photoview/api/scanner/media_type"
"github.com/photoview/photoview/api/utils"
"github.com/pkg/errors"
"gopkg.in/vansante/go-ffprobe.v2"
@ -15,14 +19,14 @@ import (
_ "github.com/strukturag/libheif/go/heif"
)
func EncodeThumbnail(inputPath string, outputPath string) (*image_helpers.PhotoDimensions, error) {
func EncodeThumbnail(inputPath string, outputPath string) (*media_utils.PhotoDimensions, error) {
inputImage, err := imaging.Open(inputPath, imaging.AutoOrientation(true))
if err != nil {
return nil, err
}
dimensions := image_helpers.PhotoDimensionsFromRect(inputImage.Bounds())
dimensions := media_utils.PhotoDimensionsFromRect(inputImage.Bounds())
dimensions = dimensions.ThumbnailScale()
thumbImage := imaging.Resize(inputImage, dimensions.Width, dimensions.Height, imaging.NearestNeighbor)
@ -50,19 +54,19 @@ func encodeImageJPEG(image image.Image, outputPath string, jpegQuality int) erro
// EncodeMediaData is used to easily decode media data, with a cache so expensive operations are not repeated
type EncodeMediaData struct {
media *models.Media
Media *models.Media
_photoImage image.Image
_contentType *MediaType
_contentType *media_type.MediaType
_videoMetadata *ffprobe.ProbeData
}
// ContentType reads the image to determine its content type
func (img *EncodeMediaData) ContentType() (*MediaType, error) {
func (img *EncodeMediaData) ContentType() (*media_type.MediaType, error) {
if img._contentType != nil {
return img._contentType, nil
}
imgType, err := getMediaType(img.media.Path)
imgType, err := media_type.GetMediaType(img.Media.Path)
if err != nil {
return nil, err
}
@ -77,14 +81,14 @@ func (img *EncodeMediaData) EncodeHighRes(outputPath string) error {
return err
}
if !contentType.isSupported() {
if !contentType.IsSupported() {
return errors.New("could not convert photo as file format is not supported")
}
// Use darktable if there is no counterpart JPEG file to use instead
if contentType.isRaw() && img.media.CounterpartPath == nil {
if DarktableCli.IsInstalled() {
err := DarktableCli.EncodeJpeg(img.media.Path, outputPath, 70)
if contentType.IsRaw() && img.Media.CounterpartPath == nil {
if executable_worker.DarktableCli.IsInstalled() {
err := executable_worker.DarktableCli.EncodeJpeg(img.Media.Path, outputPath, 70)
if err != nil {
return err
}
@ -110,10 +114,10 @@ func (img *EncodeMediaData) photoImage() (image.Image, error) {
}
var photoPath string
if img.media.CounterpartPath != nil {
photoPath = *img.media.CounterpartPath
if img.Media.CounterpartPath != nil {
photoPath = *img.Media.CounterpartPath
} else {
photoPath = img.media.Path
photoPath = img.Media.Path
}
photoImg, err := img.decodeImage(photoPath)
@ -139,7 +143,7 @@ func (img *EncodeMediaData) decodeImage(imagePath string) (image.Image, error) {
var decodedImage image.Image
if *mediaType == TypeHeic {
if *mediaType == media_type.TypeHeic {
decodedImage, _, err = image.Decode(file)
if err != nil {
return nil, errors.Wrapf(err, "failed to decode HEIF image (%s)", imagePath)
@ -153,3 +157,20 @@ func (img *EncodeMediaData) decodeImage(imagePath string) (image.Image, error) {
return decodedImage, nil
}
func (enc *EncodeMediaData) VideoMetadata() (*ffprobe.ProbeData, error) {
if enc._videoMetadata != nil {
return enc._videoMetadata, nil
}
ctx, cancelFn := context.WithTimeout(context.Background(), 5*time.Second)
defer cancelFn()
data, err := ffprobe.ProbeURL(ctx, enc.Media.Path)
if err != nil {
return nil, errors.Wrapf(err, "could not read video metadata (%s)", enc.Media.Title)
}
enc._videoMetadata = data
return enc._videoMetadata, nil
}

View File

@ -1,4 +1,4 @@
package scanner
package executable_worker
import (
"fmt"
@ -9,6 +9,7 @@ import (
"strings"
"github.com/pkg/errors"
"gopkg.in/vansante/go-ffprobe.v2"
)
func InitializeExecutableWorkers() {
@ -126,14 +127,9 @@ func (worker *FfmpegWorker) EncodeMp4(inputPath string, outputPath string) error
return nil
}
func (worker *FfmpegWorker) EncodeVideoThumbnail(inputPath string, outputPath string, mediaData *EncodeMediaData) error {
func (worker *FfmpegWorker) EncodeVideoThumbnail(inputPath string, outputPath string, probeData *ffprobe.ProbeData) error {
metadata, err := mediaData.VideoMetadata()
if err != nil {
return errors.Wrapf(err, "get metadata to encode video thumbnail (%s)", inputPath)
}
thumbnailOffsetSeconds := fmt.Sprintf("%d", int(metadata.Format.DurationSeconds*0.25))
thumbnailOffsetSeconds := fmt.Sprintf("%d", int(probeData.Format.DurationSeconds*0.25))
args := []string{
"-i",

View File

@ -1,4 +1,4 @@
package image_helpers
package media_utils
import (
"image"

View File

@ -1,14 +1,15 @@
package scanner
package media_type
import (
"io"
"log"
"os"
"path"
"path/filepath"
"strings"
"github.com/h2non/filetype"
"github.com/photoview/photoview/api/scanner/media_encoding/executable_worker"
"github.com/photoview/photoview/api/scanner/scanner_utils"
"github.com/pkg/errors"
)
@ -197,7 +198,7 @@ var fileExtensions = map[string]MediaType{
".mts": TypeMTS,
}
func (imgType *MediaType) isRaw() bool {
func (imgType *MediaType) IsRaw() bool {
for _, raw_mime := range RawMimeTypes {
if raw_mime == *imgType {
return true
@ -207,7 +208,7 @@ func (imgType *MediaType) isRaw() bool {
return false
}
func (imgType *MediaType) isWebCompatible() bool {
func (imgType *MediaType) IsWebCompatible() bool {
for _, web_mime := range WebMimetypes {
if web_mime == *imgType {
return true
@ -223,7 +224,7 @@ func (imgType *MediaType) isWebCompatible() bool {
return false
}
func (imgType *MediaType) isVideo() bool {
func (imgType *MediaType) IsVideo() bool {
for _, video_mime := range VideoMimetypes {
if video_mime == *imgType {
return true
@ -233,7 +234,7 @@ func (imgType *MediaType) isVideo() bool {
return false
}
func (imgType *MediaType) isBasicTypeSupported() bool {
func (imgType *MediaType) IsBasicTypeSupported() bool {
for _, img_mime := range SupportedMimetypes {
if img_mime == *imgType {
return true
@ -243,31 +244,36 @@ func (imgType *MediaType) isBasicTypeSupported() bool {
return false
}
// isSupported determines if the given type can be processed
func (imgType *MediaType) isSupported() bool {
if imgType.isBasicTypeSupported() {
// IsSupported determines if the given type can be processed
func (imgType *MediaType) IsSupported() bool {
if imgType.IsBasicTypeSupported() {
return true
}
if DarktableCli.IsInstalled() && imgType.isRaw() {
if executable_worker.DarktableCli.IsInstalled() && imgType.IsRaw() {
return true
}
if FfmpegCli.IsInstalled() && imgType.isVideo() {
if executable_worker.FfmpegCli.IsInstalled() && imgType.IsVideo() {
return true
}
return false
}
func getMediaType(path string) (*MediaType, error) {
func GetExtensionMediaType(ext string) (MediaType, bool) {
result, found := fileExtensions[strings.ToLower(ext)]
return result, found
}
func GetMediaType(path string) (*MediaType, error) {
ext := filepath.Ext(path)
fileExtType, found := fileExtensions[strings.ToLower(ext)]
fileExtType, found := GetExtensionMediaType(ext)
if found {
if fileExtType.isSupported() {
if fileExtType.IsSupported() {
return &fileExtType, nil
} else {
return nil, nil
@ -296,39 +302,13 @@ func getMediaType(path string) (*MediaType, error) {
}
imgType := MediaType(_imgType.MIME.Value)
if imgType.isSupported() {
if imgType.IsSupported() {
return &imgType, nil
}
return nil, nil
}
func isPathMedia(mediaPath string, cache *AlbumScannerCache) bool {
mediaType, err := cache.GetMediaType(mediaPath)
if err != nil {
ScannerError("isPathMedia (%s): %s", mediaPath, err)
return false
}
// Ignore hidden files
if path.Base(mediaPath)[0:1] == "." {
return false
}
if mediaType != nil {
// Make sure file isn't empty
fileStats, err := os.Stat(mediaPath)
if err != nil || fileStats.Size() == 0 {
return false
}
return true
}
log.Printf("File is not a supported media %s\n", mediaPath)
return false
}
func (mediaType MediaType) FileExtensions() []string {
var extensions []string
@ -341,3 +321,18 @@ func (mediaType MediaType) FileExtensions() []string {
return extensions
}
func RawCounterpart(imagePath string) *string {
pathWithoutExt := strings.TrimSuffix(imagePath, path.Ext(imagePath))
for _, rawType := range RawMimeTypes {
for _, ext := range rawType.FileExtensions() {
testPath := pathWithoutExt + ext
if scanner_utils.FileExists(testPath) {
return &testPath
}
}
}
return nil
}

View File

@ -8,7 +8,9 @@ import (
"strconv"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/image_helpers"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/media_encoding/media_utils"
"github.com/photoview/photoview/api/scanner/media_type"
"github.com/photoview/photoview/api/utils"
"github.com/pkg/errors"
"gorm.io/gorm"
@ -61,8 +63,8 @@ func generateUniqueMediaName(mediaPath string) string {
}
func ProcessMedia(tx *gorm.DB, media *models.Media) (bool, error) {
imageData := EncodeMediaData{
media: media,
imageData := media_encoding.EncodeMediaData{
Media: media,
}
contentType, err := imageData.ContentType()
@ -76,16 +78,16 @@ func ProcessMedia(tx *gorm.DB, media *models.Media) (bool, error) {
return false, errors.Wrap(err, "cache directory error")
}
if contentType.isVideo() {
if contentType.IsVideo() {
return processVideo(tx, &imageData, mediaCachePath)
} else {
return processPhoto(tx, &imageData, mediaCachePath)
}
}
func processPhoto(tx *gorm.DB, imageData *EncodeMediaData, photoCachePath *string) (bool, error) {
func processPhoto(tx *gorm.DB, imageData *media_encoding.EncodeMediaData, photoCachePath *string) (bool, error) {
photo := imageData.media
photo := imageData.Media
log.Printf("Processing photo: %s\n", photo.Path)
@ -111,10 +113,10 @@ func processPhoto(tx *gorm.DB, imageData *EncodeMediaData, photoCachePath *strin
return false, errors.Wrap(err, "error processing photo highres")
}
var photoDimensions *image_helpers.PhotoDimensions
var photoDimensions *media_utils.PhotoDimensions
var baseImagePath string = photo.Path
mediaType, err := getMediaType(photo.Path)
mediaType, err := media_type.GetMediaType(photo.Path)
if err != nil {
return false, errors.Wrap(err, "could determine if media was photo or video")
}
@ -127,7 +129,7 @@ func processPhoto(tx *gorm.DB, imageData *EncodeMediaData, photoCachePath *strin
return false, err
}
if !contentType.isWebCompatible() {
if !contentType.IsWebCompatible() {
didProcess = true
highresName := generateUniqueMediaNamePrefixed("highres", photo.Path, ".jpg")
@ -162,7 +164,7 @@ func processPhoto(tx *gorm.DB, imageData *EncodeMediaData, photoCachePath *strin
// Make sure photo dimensions is set
if photoDimensions == nil {
photoDimensions, err = image_helpers.GetPhotoDimensions(baseImagePath)
photoDimensions, err = media_utils.GetPhotoDimensions(baseImagePath)
if err != nil {
return false, err
}
@ -193,14 +195,14 @@ func processPhoto(tx *gorm.DB, imageData *EncodeMediaData, photoCachePath *strin
didProcess = true
fmt.Printf("Thumbnail photo found in database but not in cache, re-encoding photo to cache: %s\n", thumbURL.MediaName)
_, err := EncodeThumbnail(baseImagePath, thumbPath)
_, err := media_encoding.EncodeThumbnail(baseImagePath, thumbPath)
if err != nil {
return false, errors.Wrap(err, "could not create thumbnail cached image")
}
}
}
if mediaType.isRaw() {
if mediaType.IsRaw() {
err = processRawSideCar(tx, imageData, highResURL, thumbURL, photoCachePath)
if err != nil {
return false, err
@ -243,7 +245,7 @@ func makeMediaCacheDir(media *models.Media) (*string, error) {
return &photoCachePath, nil
}
func saveOriginalPhotoToDB(tx *gorm.DB, photo *models.Media, imageData *EncodeMediaData, photoDimensions *image_helpers.PhotoDimensions) error {
func saveOriginalPhotoToDB(tx *gorm.DB, photo *models.Media, imageData *media_encoding.EncodeMediaData, photoDimensions *media_utils.PhotoDimensions) error {
originalImageName := generateUniqueMediaName(photo.Path)
contentType, err := imageData.ContentType()
@ -273,14 +275,14 @@ func saveOriginalPhotoToDB(tx *gorm.DB, photo *models.Media, imageData *EncodeMe
return nil
}
func generateSaveHighResJPEG(tx *gorm.DB, media *models.Media, imageData *EncodeMediaData, highres_name string, imagePath string, mediaURL *models.MediaURL) (*models.MediaURL, error) {
func generateSaveHighResJPEG(tx *gorm.DB, media *models.Media, imageData *media_encoding.EncodeMediaData, highres_name string, imagePath string, mediaURL *models.MediaURL) (*models.MediaURL, error) {
err := imageData.EncodeHighRes(imagePath)
if err != nil {
return nil, errors.Wrap(err, "creating high-res cached image")
}
photoDimensions, err := image_helpers.GetPhotoDimensions(imagePath)
photoDimensions, err := media_utils.GetPhotoDimensions(imagePath)
if err != nil {
return nil, err
}
@ -321,7 +323,7 @@ func generateSaveHighResJPEG(tx *gorm.DB, media *models.Media, imageData *Encode
func generateSaveThumbnailJPEG(tx *gorm.DB, media *models.Media, thumbnail_name string, photoCachePath *string, baseImagePath string, mediaURL *models.MediaURL) (*models.MediaURL, error) {
thumbOutputPath := path.Join(*photoCachePath, thumbnail_name)
thumbSize, err := EncodeThumbnail(baseImagePath, thumbOutputPath)
thumbSize, err := media_encoding.EncodeThumbnail(baseImagePath, thumbOutputPath)
if err != nil {
return nil, errors.Wrap(err, "could not create thumbnail cached image")
}
@ -359,8 +361,8 @@ func generateSaveThumbnailJPEG(tx *gorm.DB, media *models.Media, thumbnail_name
return mediaURL, nil
}
func processRawSideCar(tx *gorm.DB, imageData *EncodeMediaData, highResURL *models.MediaURL, thumbURL *models.MediaURL, photoCachePath *string) error {
photo := imageData.media
func processRawSideCar(tx *gorm.DB, imageData *media_encoding.EncodeMediaData, highResURL *models.MediaURL, thumbURL *models.MediaURL, photoCachePath *string) error {
photo := imageData.Media
sideCarFileHasChanged := false
var currentFileHash *string
currentSideCarPath := scanForSideCarFile(photo.Path)

View File

@ -10,15 +10,17 @@ import (
"time"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/image_helpers"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/media_encoding/executable_worker"
"github.com/photoview/photoview/api/scanner/media_encoding/media_utils"
"github.com/photoview/photoview/api/utils"
"github.com/pkg/errors"
"gopkg.in/vansante/go-ffprobe.v2"
"gorm.io/gorm"
)
func processVideo(tx *gorm.DB, mediaData *EncodeMediaData, videoCachePath *string) (bool, error) {
video := mediaData.media
func processVideo(tx *gorm.DB, mediaData *media_encoding.EncodeMediaData, videoCachePath *string) (bool, error) {
video := mediaData.Media
didProcess := false
log.Printf("Processing video: %s", video.Path)
@ -45,7 +47,7 @@ func processVideo(tx *gorm.DB, mediaData *EncodeMediaData, videoCachePath *strin
return false, errors.Wrap(err, "error getting video content type")
}
if videoOriginalURL == nil && videoType.isWebCompatible() {
if videoOriginalURL == nil && videoType.IsWebCompatible() {
didProcess = true
origVideoPath := video.Path
@ -77,7 +79,7 @@ func processVideo(tx *gorm.DB, mediaData *EncodeMediaData, videoCachePath *strin
}
if videoWebURL == nil && !videoType.isWebCompatible() {
if videoWebURL == nil && !videoType.IsWebCompatible() {
didProcess = true
web_video_name := fmt.Sprintf("web_video_%s_%s", path.Base(video.Path), utils.GenerateToken())
@ -87,7 +89,7 @@ func processVideo(tx *gorm.DB, mediaData *EncodeMediaData, videoCachePath *strin
webVideoPath := path.Join(*videoCachePath, web_video_name)
err = FfmpegCli.EncodeMp4(video.Path, webVideoPath)
err = executable_worker.FfmpegCli.EncodeMp4(video.Path, webVideoPath)
if err != nil {
return false, errors.Wrapf(err, "could not encode mp4 video (%s)", video.Path)
}
@ -117,6 +119,11 @@ func processVideo(tx *gorm.DB, mediaData *EncodeMediaData, videoCachePath *strin
}
}
probeData, err := mediaData.VideoMetadata()
if err != nil {
return false, err
}
if videoThumbnailURL == nil {
didProcess = true
@ -127,12 +134,12 @@ func processVideo(tx *gorm.DB, mediaData *EncodeMediaData, videoCachePath *strin
thumbImagePath := path.Join(*videoCachePath, video_thumb_name)
err = FfmpegCli.EncodeVideoThumbnail(video.Path, thumbImagePath, mediaData)
err = executable_worker.FfmpegCli.EncodeVideoThumbnail(video.Path, thumbImagePath, probeData)
if err != nil {
return false, errors.Wrapf(err, "failed to generate thumbnail for video (%s)", video.Title)
}
thumbDimensions, err := image_helpers.GetPhotoDimensions(thumbImagePath)
thumbDimensions, err := media_utils.GetPhotoDimensions(thumbImagePath)
if err != nil {
return false, errors.Wrap(err, "get dimensions of video thumbnail image")
}
@ -163,12 +170,12 @@ func processVideo(tx *gorm.DB, mediaData *EncodeMediaData, videoCachePath *strin
fmt.Printf("Video thumbnail found in database but not in cache, re-encoding photo to cache: %s\n", videoThumbnailURL.MediaName)
didProcess = true
err = FfmpegCli.EncodeVideoThumbnail(video.Path, thumbImagePath, mediaData)
err = executable_worker.FfmpegCli.EncodeVideoThumbnail(video.Path, thumbImagePath, probeData)
if err != nil {
return false, errors.Wrapf(err, "failed to generate thumbnail for video (%s)", video.Title)
}
thumbDimensions, err := image_helpers.GetPhotoDimensions(thumbImagePath)
thumbDimensions, err := media_utils.GetPhotoDimensions(thumbImagePath)
if err != nil {
return false, errors.Wrap(err, "get dimensions of video thumbnail image")
}
@ -191,23 +198,6 @@ func processVideo(tx *gorm.DB, mediaData *EncodeMediaData, videoCachePath *strin
return didProcess, nil
}
func (enc *EncodeMediaData) VideoMetadata() (*ffprobe.ProbeData, error) {
if enc._videoMetadata != nil {
return enc._videoMetadata, nil
}
ctx, cancelFn := context.WithTimeout(context.Background(), 5*time.Second)
defer cancelFn()
data, err := ffprobe.ProbeURL(ctx, enc.media.Path)
if err != nil {
return nil, errors.Wrapf(err, "could not read video metadata (%s)", enc.media.Title)
}
enc._videoMetadata = data
return enc._videoMetadata, nil
}
func readVideoMetadata(videoPath string) (*ffprobe.ProbeData, error) {
ctx, cancelFn := context.WithTimeout(context.Background(), 5*time.Second)
defer cancelFn()

View File

@ -8,6 +8,7 @@ import (
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/graphql/notification"
"github.com/photoview/photoview/api/scanner/scanner_cache"
"github.com/photoview/photoview/api/utils"
"github.com/pkg/errors"
"gorm.io/gorm"
@ -15,7 +16,7 @@ import (
type ScannerJob struct {
album *models.Album
cache *AlbumScannerCache
cache *scanner_cache.AlbumScannerCache
}
func (job *ScannerJob) Run(db *gorm.DB) {
@ -197,7 +198,7 @@ func AddAllToQueue() error {
}
func AddUserToQueue(user *models.User) error {
album_cache := MakeAlbumCache()
album_cache := scanner_cache.MakeAlbumCache()
albums, album_errors := findAlbumsForUser(global_scanner_queue.db, user, album_cache)
for _, err := range album_errors {
return errors.Wrapf(err, "find albums for user (user_id: %d)", user.ID)

View File

@ -4,6 +4,7 @@ import (
"testing"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/scanner_cache"
)
func makeAlbumWithID(id int) *models.Album {
@ -16,8 +17,8 @@ func makeAlbumWithID(id int) *models.Album {
func TestScannerQueue_AddJob(t *testing.T) {
scannerJobs := []ScannerJob{
{album: makeAlbumWithID(100), cache: MakeAlbumCache()},
{album: makeAlbumWithID(20), cache: MakeAlbumCache()},
{album: makeAlbumWithID(100), cache: scanner_cache.MakeAlbumCache()},
{album: makeAlbumWithID(20), cache: scanner_cache.MakeAlbumCache()},
}
mockScannerQueue := ScannerQueue{
@ -28,7 +29,7 @@ func TestScannerQueue_AddJob(t *testing.T) {
}
t.Run("add new job to scanner queue", func(t *testing.T) {
newJob := ScannerJob{album: makeAlbumWithID(42), cache: MakeAlbumCache()}
newJob := ScannerJob{album: makeAlbumWithID(42), cache: scanner_cache.MakeAlbumCache()}
startingJobs := len(mockScannerQueue.up_next)
@ -48,7 +49,7 @@ func TestScannerQueue_AddJob(t *testing.T) {
t.Run("add existing job to scanner queue", func(t *testing.T) {
startingJobs := len(mockScannerQueue.up_next)
err := mockScannerQueue.addJob(&ScannerJob{album: makeAlbumWithID(20), cache: MakeAlbumCache()})
err := mockScannerQueue.addJob(&ScannerJob{album: makeAlbumWithID(20), cache: scanner_cache.MakeAlbumCache()})
if err != nil {
t.Errorf(".AddJob() returned an unexpected error: %s", err)
}
@ -64,8 +65,8 @@ func TestScannerQueue_AddJob(t *testing.T) {
func TestScannerQueue_JobOnQueue(t *testing.T) {
scannerJobs := []ScannerJob{
{album: makeAlbumWithID(100), cache: MakeAlbumCache()},
{album: makeAlbumWithID(20), cache: MakeAlbumCache()},
{album: makeAlbumWithID(100), cache: scanner_cache.MakeAlbumCache()},
{album: makeAlbumWithID(20), cache: scanner_cache.MakeAlbumCache()},
}
mockScannerQueue := ScannerQueue{
@ -81,10 +82,10 @@ func TestScannerQueue_JobOnQueue(t *testing.T) {
ScannerJob
}{
{"album which owner is already on the queue", true, ScannerJob{
album: makeAlbumWithID(100), cache: MakeAlbumCache(),
album: makeAlbumWithID(100), cache: scanner_cache.MakeAlbumCache(),
}},
{"album that is not on the queue", false, ScannerJob{
album: makeAlbumWithID(321), cache: MakeAlbumCache(),
album: makeAlbumWithID(321), cache: scanner_cache.MakeAlbumCache(),
}},
}

View File

@ -11,6 +11,8 @@ import (
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/graphql/notification"
"github.com/photoview/photoview/api/scanner/face_detection"
"github.com/photoview/photoview/api/scanner/scanner_cache"
"github.com/photoview/photoview/api/scanner/scanner_utils"
"github.com/photoview/photoview/api/utils"
"github.com/pkg/errors"
ignore "github.com/sabhiram/go-gitignore"
@ -85,7 +87,7 @@ func ValidRootPath(rootPath string) bool {
return true
}
func scanAlbum(album *models.Album, cache *AlbumScannerCache, db *gorm.DB) {
func scanAlbum(album *models.Album, cache *scanner_cache.AlbumScannerCache, db *gorm.DB) {
album_notify_key := utils.GenerateToken()
notifyThrottle := utils.NewThrottle(500 * time.Millisecond)
@ -105,7 +107,7 @@ func scanAlbum(album *models.Album, cache *AlbumScannerCache, db *gorm.DB) {
}
})
if err != nil {
ScannerError("Failed to find media for album (%s): %s", album.Path, err)
scanner_utils.ScannerError("Failed to find media for album (%s): %s", album.Path, err)
}
album_has_changes := false
@ -134,13 +136,13 @@ func scanAlbum(album *models.Album, cache *AlbumScannerCache, db *gorm.DB) {
})
if transactionError != nil {
ScannerError("Failed to begin database transaction: %s", transactionError)
scanner_utils.ScannerError("Failed to begin database transaction: %s", transactionError)
}
if processing_was_needed && media.Type == models.MediaTypePhoto {
go func(media *models.Media) {
if err := face_detection.GlobalFaceDetector.DetectFaces(db, media); err != nil {
ScannerError("Error detecting faces in image (%s): %s", media.Path, err)
scanner_utils.ScannerError("Error detecting faces in image (%s): %s", media.Path, err)
}
}(media)
}
@ -148,7 +150,7 @@ func scanAlbum(album *models.Album, cache *AlbumScannerCache, db *gorm.DB) {
cleanup_errors := CleanupMedia(db, album.ID, albumMedia)
for _, err := range cleanup_errors {
ScannerError("Failed to delete old media: %s", err)
scanner_utils.ScannerError("Failed to delete old media: %s", err)
}
if album_has_changes {
@ -164,7 +166,7 @@ func scanAlbum(album *models.Album, cache *AlbumScannerCache, db *gorm.DB) {
}
}
func findMediaForAlbum(album *models.Album, cache *AlbumScannerCache, db *gorm.DB, onScanPhoto func(photo *models.Media, newPhoto bool)) ([]*models.Media, error) {
func findMediaForAlbum(album *models.Album, cache *scanner_cache.AlbumScannerCache, db *gorm.DB, onScanPhoto func(photo *models.Media, newPhoto bool)) ([]*models.Media, error) {
albumPhotos := make([]*models.Media, 0)
@ -179,7 +181,7 @@ func findMediaForAlbum(album *models.Album, cache *AlbumScannerCache, db *gorm.D
for _, item := range dirContent {
photoPath := path.Join(album.Path, item.Name())
if !item.IsDir() && isPathMedia(photoPath, cache) {
if !item.IsDir() && cache.IsPathMedia(photoPath) {
// Match file against ignore data
if albumIgnore.MatchesPath(item.Name()) {
log.Printf("File %s ignored\n", item.Name())
@ -206,7 +208,7 @@ func findMediaForAlbum(album *models.Album, cache *AlbumScannerCache, db *gorm.D
})
if err != nil {
ScannerError("Error scanning media for album (%d): %s\n", album.ID, err)
scanner_utils.ScannerError("Error scanning media for album (%d): %s\n", album.ID, err)
continue
}

View File

@ -1,15 +1,19 @@
package scanner
package scanner_cache
import (
"log"
"os"
"path"
"sync"
"github.com/photoview/photoview/api/scanner/media_type"
"github.com/photoview/photoview/api/scanner/scanner_utils"
"github.com/pkg/errors"
)
type AlbumScannerCache struct {
path_contains_photos map[string]bool
photo_types map[string]MediaType
photo_types map[string]media_type.MediaType
ignore_data map[string][]string
mutex sync.Mutex
}
@ -17,7 +21,7 @@ type AlbumScannerCache struct {
func MakeAlbumCache() *AlbumScannerCache {
return &AlbumScannerCache{
path_contains_photos: make(map[string]bool),
photo_types: make(map[string]MediaType),
photo_types: make(map[string]media_type.MediaType),
ignore_data: make(map[string][]string),
}
}
@ -65,7 +69,7 @@ func (c *AlbumScannerCache) AlbumContainsPhotos(path string) *bool {
// (c.photo_types)[path] = content_type
// }
func (c *AlbumScannerCache) GetMediaType(path string) (*MediaType, error) {
func (c *AlbumScannerCache) GetMediaType(path string) (*media_type.MediaType, error) {
c.mutex.Lock()
defer c.mutex.Unlock()
@ -75,7 +79,7 @@ func (c *AlbumScannerCache) GetMediaType(path string) (*MediaType, error) {
return &result, nil
}
mediaType, err := getMediaType(path)
mediaType, err := media_type.GetMediaType(path)
if err != nil {
return nil, errors.Wrapf(err, "get media type (%s)", path)
}
@ -105,3 +109,29 @@ func (c *AlbumScannerCache) InsertAlbumIgnore(path string, ignore_data []string)
c.ignore_data[path] = ignore_data
}
func (c *AlbumScannerCache) IsPathMedia(mediaPath string) bool {
mediaType, err := c.GetMediaType(mediaPath)
if err != nil {
scanner_utils.ScannerError("IsPathMedia (%s): %s", mediaPath, err)
return false
}
// Ignore hidden files
if path.Base(mediaPath)[0:1] == "." {
return false
}
if mediaType != nil {
// Make sure file isn't empty
fileStats, err := os.Stat(mediaPath)
if err != nil || fileStats.Size() == 0 {
return false
}
return true
}
log.Printf("File is not a supported media %s\n", mediaPath)
return false
}

View File

@ -12,27 +12,17 @@ import (
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/exif"
"github.com/photoview/photoview/api/scanner/media_type"
"github.com/photoview/photoview/api/scanner/scanner_cache"
"github.com/photoview/photoview/api/scanner/scanner_utils"
"github.com/pkg/errors"
"gorm.io/gorm"
)
func fileExists(testPath string) bool {
_, err := os.Stat(testPath)
if os.IsNotExist(err) {
return false
} else if err != nil {
// unexpected error logging
log.Printf("Error: checking for file existence (%s): %s", testPath, err)
return false
}
return true
}
func scanForSideCarFile(path string) *string {
testPath := path + ".xmp"
if fileExists(testPath) {
if scanner_utils.FileExists(testPath) {
return &testPath
}
@ -41,23 +31,17 @@ func scanForSideCarFile(path string) *string {
func scanForRawCounterpartFile(imagePath string) *string {
ext := filepath.Ext(imagePath)
fileExtType, found := fileExtensions[strings.ToLower(ext)]
fileExtType, found := media_type.GetExtensionMediaType(ext)
if found {
if !fileExtType.isBasicTypeSupported() {
if !fileExtType.IsBasicTypeSupported() {
return nil
}
}
pathWithoutExt := strings.TrimSuffix(imagePath, path.Ext(imagePath))
for _, rawType := range RawMimeTypes {
for _, ext := range rawType.FileExtensions() {
testPath := pathWithoutExt + ext
if fileExists(testPath) {
return &testPath
}
}
rawPath := media_type.RawCounterpart(imagePath)
if rawPath != nil {
return rawPath
}
return nil
@ -65,18 +49,18 @@ func scanForRawCounterpartFile(imagePath string) *string {
func scanForCompressedCounterpartFile(imagePath string) *string {
ext := filepath.Ext(imagePath)
fileExtType, found := fileExtensions[strings.ToLower(ext)]
fileExtType, found := media_type.GetExtensionMediaType(ext)
if found {
if fileExtType.isBasicTypeSupported() {
if fileExtType.IsBasicTypeSupported() {
return nil
}
}
pathWithoutExt := strings.TrimSuffix(imagePath, path.Ext(imagePath))
for _, ext := range TypeJpeg.FileExtensions() {
for _, ext := range media_type.TypeJpeg.FileExtensions() {
testPath := pathWithoutExt + ext
if fileExists(testPath) {
if scanner_utils.FileExists(testPath) {
return &testPath
}
}
@ -103,7 +87,7 @@ func hashSideCarFile(path *string) *string {
return &hash
}
func ScanMedia(tx *gorm.DB, mediaPath string, albumId int, cache *AlbumScannerCache) (*models.Media, bool, error) {
func ScanMedia(tx *gorm.DB, mediaPath string, albumId int, cache *scanner_cache.AlbumScannerCache) (*models.Media, bool, error) {
mediaName := path.Base(mediaPath)
// Check if media already exists
@ -134,12 +118,12 @@ func ScanMedia(tx *gorm.DB, mediaPath string, albumId int, cache *AlbumScannerCa
var sideCarPath *string = nil
var sideCarHash *string = nil
if mediaType.isVideo() {
if mediaType.IsVideo() {
mediaTypeText = models.MediaTypeVideo
} else {
mediaTypeText = models.MediaTypePhoto
// search for sidecar files
if mediaType.isRaw() {
if mediaType.IsRaw() {
sideCarPath = scanForSideCarFile(mediaPath)
if sideCarPath != nil {
sideCarHash = hashSideCarFile(sideCarPath)

View File

@ -3,15 +3,14 @@ package scanner
import (
"bufio"
"container/list"
"fmt"
"io/ioutil"
"log"
"os"
"path"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/graphql/notification"
"github.com/photoview/photoview/api/utils"
"github.com/photoview/photoview/api/scanner/scanner_cache"
"github.com/photoview/photoview/api/scanner/scanner_utils"
"github.com/pkg/errors"
ignore "github.com/sabhiram/go-gitignore"
"gorm.io/gorm"
@ -42,7 +41,7 @@ func getPhotoviewIgnore(ignorePath string) ([]string, error) {
return photoviewIgnore, scanner.Err()
}
func findAlbumsForUser(db *gorm.DB, user *models.User, album_cache *AlbumScannerCache) ([]*models.Album, []error) {
func findAlbumsForUser(db *gorm.DB, user *models.User, album_cache *scanner_cache.AlbumScannerCache) ([]*models.Album, []error) {
if err := user.FillAlbums(db); err != nil {
return nil, []error{err}
@ -215,7 +214,7 @@ func findAlbumsForUser(db *gorm.DB, user *models.User, album_cache *AlbumScanner
return userAlbums, scanErrors
}
func directoryContainsPhotos(rootPath string, cache *AlbumScannerCache, albumIgnore []string) bool {
func directoryContainsPhotos(rootPath string, cache *scanner_cache.AlbumScannerCache, albumIgnore []string) bool {
if contains_image := cache.AlbumContainsPhotos(rootPath); contains_image != nil {
return *contains_image
@ -244,7 +243,7 @@ func directoryContainsPhotos(rootPath string, cache *AlbumScannerCache, albumIgn
dirContent, err := ioutil.ReadDir(dirPath)
if err != nil {
ScannerError("Could not read directory (%s): %s\n", dirPath, err.Error())
scanner_utils.ScannerError("Could not read directory (%s): %s\n", dirPath, err.Error())
return false
}
@ -253,7 +252,7 @@ func directoryContainsPhotos(rootPath string, cache *AlbumScannerCache, albumIgn
if fileInfo.IsDir() {
scanQueue.PushBack(filePath)
} else {
if isPathMedia(filePath, cache) {
if cache.IsPathMedia(filePath) {
if ignoreEntries.MatchesPath(fileInfo.Name()) {
log.Printf("Match found %s, continue search for media", fileInfo.Name())
continue
@ -273,16 +272,3 @@ func directoryContainsPhotos(rootPath string, cache *AlbumScannerCache, albumIgn
}
return false
}
func ScannerError(format string, args ...interface{}) {
message := fmt.Sprintf(format, args...)
log.Printf("ERROR: %s", message)
notification.BroadcastNotification(&models.Notification{
Key: utils.GenerateToken(),
Type: models.NotificationTypeMessage,
Header: "Scanner error",
Content: message,
Negative: true,
})
}

View File

@ -0,0 +1,19 @@
package scanner_utils
import (
"log"
"os"
)
func FileExists(testPath string) bool {
_, err := os.Stat(testPath)
if os.IsNotExist(err) {
return false
} else if err != nil {
// unexpected error logging
log.Printf("Error: checking for file existence (%s): %s", testPath, err)
return false
}
return true
}

View File

@ -0,0 +1,23 @@
package scanner_utils
import (
"fmt"
"log"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/graphql/notification"
"github.com/photoview/photoview/api/utils"
)
func ScannerError(format string, args ...interface{}) {
message := fmt.Sprintf(format, args...)
log.Printf("ERROR: %s", message)
notification.BroadcastNotification(&models.Notification{
Key: utils.GenerateToken(),
Type: models.NotificationTypeMessage,
Header: "Scanner error",
Content: message,
Negative: true,
})
}

View File

@ -12,12 +12,13 @@ import (
"github.com/joho/godotenv"
"github.com/photoview/photoview/api/database"
"github.com/photoview/photoview/api/dataloader"
"github.com/photoview/photoview/api/graphql/auth"
"github.com/photoview/photoview/api/graphql/dataloader"
"github.com/photoview/photoview/api/routes"
"github.com/photoview/photoview/api/scanner"
"github.com/photoview/photoview/api/scanner/exif"
"github.com/photoview/photoview/api/scanner/face_detection"
"github.com/photoview/photoview/api/scanner/media_encoding/executable_worker"
"github.com/photoview/photoview/api/server"
"github.com/photoview/photoview/api/utils"
@ -54,7 +55,7 @@ func main() {
log.Panicf("Could not initialize periodic scanner: %s", err)
}
scanner.InitializeExecutableWorkers()
executable_worker.InitializeExecutableWorkers()
exif.InitializeEXIFParser()