1
Fork 0

Merge pull request #649 from photoview/rework-scanner-internals

Split up scanner into separate tasks + update dependencies for UI, API, Dockerfile
This commit is contained in:
Viktor Strate Kløvedal 2022-07-08 18:10:12 +02:00 committed by GitHub
commit 4177b4fb33
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
85 changed files with 16349 additions and 23093 deletions

View File

@ -19,6 +19,7 @@ jobs:
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
target_platform:
- "linux/amd64"

View File

@ -120,7 +120,7 @@ jobs:
strategy:
matrix:
node-version: [15.x]
node-version: [18.x]
steps:
- uses: actions/checkout@v2

3
.gitignore vendored
View File

@ -1,7 +1,8 @@
# See https://help.github.com/ignore-files/ for more about ignoring files.
cache/
media_cache/
/media_cache/
/api/media_cache/
/photos_path
photoview.db
photoview.db-journal

View File

@ -1,5 +1,5 @@
### Build UI ###
FROM --platform=${BUILDPLATFORM:-linux/amd64} node:15 as ui
FROM --platform=${BUILDPLATFORM:-linux/amd64} node:18 as ui
ARG REACT_APP_API_ENDPOINT
ENV REACT_APP_API_ENDPOINT=${REACT_APP_API_ENDPOINT}
@ -29,7 +29,7 @@ RUN npm ci --omit=dev --ignore-scripts
# Build frontend
COPY ui /app
RUN npm run build -- --public-url $UI_PUBLIC_URL
RUN npm run build -- --base=$UI_PUBLIC_URL
### Build API ###
FROM --platform=${BUILDPLATFORM:-linux/amd64} debian:bookworm AS api
@ -75,7 +75,7 @@ COPY api/data /app/data
RUN apt update \
# Required dependencies
&& apt install -y curl gpg libdlib19 ffmpeg exiftool libheif1
&& apt install -y curl gpg libdlib19.1 ffmpeg exiftool libheif1
# Install Darktable if building for a supported architecture
RUN if [ "${TARGETPLATFORM}" = "linux/amd64" ] || [ "${TARGETPLATFORM}" = "linux/arm64" ]; then \
@ -87,7 +87,7 @@ RUN apt purge -y gpg \
&& apt clean \
&& rm -rf /var/lib/apt/lists/*
COPY --from=ui /app/build /ui
COPY --from=ui /app/dist /ui
COPY --from=api /app/photoview /app/photoview
ENV PHOTOVIEW_LISTEN_IP 127.0.0.1

View File

@ -30,9 +30,6 @@ type Media struct {
SideCarHash *string `gorm:"unique"`
Faces []*ImageFace `gorm:"constraint:OnDelete:CASCADE;"`
Blurhash *string `gorm:""`
// Only used internally
CounterpartPath *string `gorm:"-"`
}
func (Media) TableName() string {
@ -65,6 +62,25 @@ func (m *Media) GetThumbnail() (*MediaURL, error) {
return nil, nil
}
func (m *Media) GetHighRes() (*MediaURL, error) {
if len(m.MediaURL) == 0 {
return nil, errors.New("media.MediaURL is empty")
}
for _, url := range m.MediaURL {
if url.Purpose == PhotoHighRes {
url.Media = m
return &url, nil
}
}
return nil, nil
}
func (m *Media) CachePath() (string, error) {
return utils.CachePathForMedia(m.AlbumID, m.ID)
}
type MediaType string
const (

View File

@ -6,13 +6,14 @@ import (
"github.com/photoview/photoview/api/database/drivers"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner"
"github.com/photoview/photoview/api/scanner/periodic_scanner"
"github.com/photoview/photoview/api/scanner/scanner_queue"
"github.com/pkg/errors"
"gorm.io/gorm"
)
func (r *mutationResolver) ScanAll(ctx context.Context) (*models.ScannerResult, error) {
err := scanner.AddAllToQueue()
err := scanner_queue.AddAllToQueue()
if err != nil {
return nil, err
}
@ -32,7 +33,7 @@ func (r *mutationResolver) ScanUser(ctx context.Context, userID int) (*models.Sc
return nil, errors.Wrap(err, "get user from database")
}
scanner.AddUserToQueue(&user)
scanner_queue.AddUserToQueue(&user)
startMessage := "Scanner started"
return &models.ScannerResult{
@ -57,7 +58,7 @@ func (r *mutationResolver) SetPeriodicScanInterval(ctx context.Context, interval
return 0, err
}
scanner.ChangePeriodicScanInterval(time.Duration(siteInfo.PeriodicScanInterval) * time.Second)
periodic_scanner.ChangePeriodicScanInterval(time.Duration(siteInfo.PeriodicScanInterval) * time.Second)
return siteInfo.PeriodicScanInterval, nil
}
@ -81,7 +82,7 @@ func (r *mutationResolver) SetScannerConcurrentWorkers(ctx context.Context, work
return 0, err
}
scanner.ChangeScannerConcurrentWorkers(siteInfo.ConcurrentWorkers)
scanner_queue.ChangeScannerConcurrentWorkers(siteInfo.ConcurrentWorkers)
return siteInfo.ConcurrentWorkers, nil
}

View File

@ -46,7 +46,7 @@ func RegisterPhotoRoutes(db *gorm.DB, router *mux.Router) {
if _, err := os.Stat(cachedPath); os.IsNotExist((err)) {
err := db.Transaction(func(tx *gorm.DB) error {
if _, err = scanner.ProcessMedia(tx, media); err != nil {
if err = scanner.ProcessSingleMedia(tx, media); err != nil {
log.Printf("ERROR: processing image not found in cache (%s): %s\n", cachedPath, err)
return err
}

View File

@ -52,7 +52,7 @@ func RegisterVideoRoutes(db *gorm.DB, router *mux.Router) {
if _, err := os.Stat(cachedPath); err != nil {
if os.IsNotExist(err) {
err := db.Transaction(func(tx *gorm.DB) error {
if _, err := scanner.ProcessMedia(tx, media); err != nil {
if err := scanner.ProcessSingleMedia(tx, media); err != nil {
log.Printf("ERROR: processing video not found in cache: %s\n", err)
return err
}

View File

@ -55,11 +55,18 @@ func encodeImageJPEG(image image.Image, outputPath string, jpegQuality int) erro
// EncodeMediaData is used to easily decode media data, with a cache so expensive operations are not repeated
type EncodeMediaData struct {
Media *models.Media
CounterpartPath *string
_photoImage image.Image
_contentType *media_type.MediaType
_videoMetadata *ffprobe.ProbeData
}
func NewEncodeMediaData(media *models.Media) EncodeMediaData {
return EncodeMediaData{
Media: media,
}
}
// ContentType reads the image to determine its content type
func (img *EncodeMediaData) ContentType() (*media_type.MediaType, error) {
if img._contentType != nil {
@ -86,7 +93,7 @@ func (img *EncodeMediaData) EncodeHighRes(outputPath string) error {
}
// Use darktable if there is no counterpart JPEG file to use instead
if contentType.IsRaw() && img.Media.CounterpartPath == nil {
if contentType.IsRaw() && img.CounterpartPath == nil {
if executable_worker.DarktableCli.IsInstalled() {
err := executable_worker.DarktableCli.EncodeJpeg(img.Media.Path, outputPath, 70)
if err != nil {
@ -114,8 +121,8 @@ func (img *EncodeMediaData) photoImage() (image.Image, error) {
}
var photoPath string
if img.Media.CounterpartPath != nil {
photoPath = *img.Media.CounterpartPath
if img.CounterpartPath != nil {
photoPath = *img.CounterpartPath
} else {
photoPath = img.Media.Path
}

View File

@ -270,7 +270,6 @@ func GetExtensionMediaType(ext string) (MediaType, bool) {
}
func GetMediaType(path string) (*MediaType, error) {
ext := filepath.Ext(path)
fileExtType, found := GetExtensionMediaType(ext)

View File

@ -1,4 +1,4 @@
package scanner
package periodic_scanner
import (
"log"
@ -6,6 +6,7 @@ import (
"time"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/scanner_queue"
"gorm.io/gorm"
)
@ -81,7 +82,7 @@ func scanIntervalRunner() {
log.Print("Scan interval runner: New ticker detected")
case <-mainPeriodicScanner.ticker.C:
log.Print("Scan interval runner: Starting periodic scan")
AddAllToQueue()
scanner_queue.AddAllToQueue()
}
} else {
<-mainPeriodicScanner.ticker_changed

View File

@ -1,411 +0,0 @@
package scanner
import (
"fmt"
"log"
"os"
"path"
"strconv"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/media_encoding/media_utils"
"github.com/photoview/photoview/api/scanner/media_type"
"github.com/photoview/photoview/api/utils"
"github.com/pkg/errors"
"gorm.io/gorm"
// Image decoders
_ "image/gif"
_ "image/png"
_ "golang.org/x/image/bmp"
_ "golang.org/x/image/tiff"
_ "golang.org/x/image/webp"
)
// Higher order function used to check if MediaURL for a given MediaPurpose exists
func makePhotoURLChecker(tx *gorm.DB, mediaID int) func(purpose models.MediaPurpose) (*models.MediaURL, error) {
return func(purpose models.MediaPurpose) (*models.MediaURL, error) {
var mediaURL []*models.MediaURL
result := tx.Where("purpose = ?", purpose).Where("media_id = ?", mediaID).Find(&mediaURL)
if result.Error != nil {
return nil, result.Error
}
if result.RowsAffected > 0 {
return mediaURL[0], nil
}
return nil, nil
}
}
func generateUniqueMediaNamePrefixed(prefix string, mediaPath string, extension string) string {
mediaName := fmt.Sprintf("%s_%s_%s", prefix, path.Base(mediaPath), utils.GenerateToken())
mediaName = models.SanitizeMediaName(mediaName)
mediaName = mediaName + extension
return mediaName
}
func generateUniqueMediaName(mediaPath string) string {
filename := path.Base(mediaPath)
baseName := filename[0 : len(filename)-len(path.Ext(filename))]
baseExt := path.Ext(filename)
mediaName := fmt.Sprintf("%s_%s", baseName, utils.GenerateToken())
mediaName = models.SanitizeMediaName(mediaName) + baseExt
return mediaName
}
func ProcessMedia(tx *gorm.DB, media *models.Media) (bool, error) {
imageData := media_encoding.EncodeMediaData{
Media: media,
}
contentType, err := imageData.ContentType()
if err != nil {
return false, errors.Wrapf(err, "get content-type of media (%s)", media.Path)
}
// Make sure media cache directory exists
mediaCachePath, err := makeMediaCacheDir(media)
if err != nil {
return false, errors.Wrap(err, "cache directory error")
}
if contentType.IsVideo() {
return processVideo(tx, &imageData, mediaCachePath)
} else {
return processPhoto(tx, &imageData, mediaCachePath)
}
}
func processPhoto(tx *gorm.DB, imageData *media_encoding.EncodeMediaData, photoCachePath *string) (bool, error) {
photo := imageData.Media
log.Printf("Processing photo: %s\n", photo.Path)
didProcess := false
photoURLFromDB := makePhotoURLChecker(tx, photo.ID)
// original photo url
origURL, err := photoURLFromDB(models.MediaOriginal)
if err != nil {
return false, err
}
// Thumbnail
thumbURL, err := photoURLFromDB(models.PhotoThumbnail)
if err != nil {
return false, errors.Wrap(err, "error processing photo thumbnail")
}
// Highres
highResURL, err := photoURLFromDB(models.PhotoHighRes)
if err != nil {
return false, errors.Wrap(err, "error processing photo highres")
}
var photoDimensions *media_utils.PhotoDimensions
var baseImagePath string = photo.Path
mediaType, err := media_type.GetMediaType(photo.Path)
if err != nil {
return false, errors.Wrap(err, "could determine if media was photo or video")
}
if mediaType.IsRaw() {
err = processRawSideCar(tx, imageData, highResURL, thumbURL, photoCachePath)
if err != nil {
return false, err
}
counterpartFile := scanForCompressedCounterpartFile(photo.Path)
if counterpartFile != nil {
imageData.Media.CounterpartPath = counterpartFile
}
}
// Generate high res jpeg
if highResURL == nil {
contentType, err := imageData.ContentType()
if err != nil {
return false, err
}
if !contentType.IsWebCompatible() {
didProcess = true
highresName := generateUniqueMediaNamePrefixed("highres", photo.Path, ".jpg")
baseImagePath = path.Join(*photoCachePath, highresName)
_, err := generateSaveHighResJPEG(tx, photo, imageData, highresName, baseImagePath, nil)
if err != nil {
return false, err
}
}
} else {
// Verify that highres photo still exists in cache
baseImagePath = path.Join(*photoCachePath, highResURL.MediaName)
if _, err := os.Stat(baseImagePath); os.IsNotExist(err) {
fmt.Printf("High-res photo found in database but not in cache, re-encoding photo to cache: %s\n", highResURL.MediaName)
didProcess = true
err = imageData.EncodeHighRes(baseImagePath)
if err != nil {
return false, errors.Wrap(err, "creating high-res cached image")
}
}
}
// Save original photo to database
if origURL == nil {
didProcess = true
// Make sure photo dimensions is set
if photoDimensions == nil {
photoDimensions, err = media_utils.GetPhotoDimensions(baseImagePath)
if err != nil {
return false, err
}
}
if err = saveOriginalPhotoToDB(tx, photo, imageData, photoDimensions); err != nil {
return false, errors.Wrap(err, "saving original photo to database")
}
}
// Save thumbnail to cache
if thumbURL == nil {
didProcess = true
thumbnailName := generateUniqueMediaNamePrefixed("thumbnail", photo.Path, ".jpg")
_, err := generateSaveThumbnailJPEG(tx, photo, thumbnailName, photoCachePath, baseImagePath, nil)
if err != nil {
return false, err
}
} else {
// Verify that thumbnail photo still exists in cache
thumbPath := path.Join(*photoCachePath, thumbURL.MediaName)
if _, err := os.Stat(thumbPath); os.IsNotExist(err) {
didProcess = true
fmt.Printf("Thumbnail photo found in database but not in cache, re-encoding photo to cache: %s\n", thumbURL.MediaName)
_, err := media_encoding.EncodeThumbnail(baseImagePath, thumbPath)
if err != nil {
return false, errors.Wrap(err, "could not create thumbnail cached image")
}
}
}
return didProcess, nil
}
func makeMediaCacheDir(media *models.Media) (*string, error) {
// Make root cache dir if not exists
if _, err := os.Stat(utils.MediaCachePath()); os.IsNotExist(err) {
if err := os.Mkdir(utils.MediaCachePath(), os.ModePerm); err != nil {
return nil, errors.Wrap(err, "could not make root image cache directory")
}
}
// Make album cache dir if not exists
albumCachePath := path.Join(utils.MediaCachePath(), strconv.Itoa(int(media.AlbumID)))
if _, err := os.Stat(albumCachePath); os.IsNotExist(err) {
if err := os.Mkdir(albumCachePath, os.ModePerm); err != nil {
return nil, errors.Wrap(err, "could not make album image cache directory")
}
}
// Make photo cache dir if not exists
photoCachePath := path.Join(albumCachePath, strconv.Itoa(int(media.ID)))
if _, err := os.Stat(photoCachePath); os.IsNotExist(err) {
if err := os.Mkdir(photoCachePath, os.ModePerm); err != nil {
return nil, errors.Wrap(err, "could not make photo image cache directory")
}
}
return &photoCachePath, nil
}
func saveOriginalPhotoToDB(tx *gorm.DB, photo *models.Media, imageData *media_encoding.EncodeMediaData, photoDimensions *media_utils.PhotoDimensions) error {
originalImageName := generateUniqueMediaName(photo.Path)
contentType, err := imageData.ContentType()
if err != nil {
return err
}
fileStats, err := os.Stat(photo.Path)
if err != nil {
return errors.Wrap(err, "reading file stats of original photo")
}
mediaURL := models.MediaURL{
Media: photo,
MediaName: originalImageName,
Width: photoDimensions.Width,
Height: photoDimensions.Height,
Purpose: models.MediaOriginal,
ContentType: string(*contentType),
FileSize: fileStats.Size(),
}
if err := tx.Create(&mediaURL).Error; err != nil {
return errors.Wrapf(err, "inserting original photo url: %d, %s", photo.ID, photo.Title)
}
return nil
}
func generateSaveHighResJPEG(tx *gorm.DB, media *models.Media, imageData *media_encoding.EncodeMediaData, highres_name string, imagePath string, mediaURL *models.MediaURL) (*models.MediaURL, error) {
err := imageData.EncodeHighRes(imagePath)
if err != nil {
return nil, errors.Wrap(err, "creating high-res cached image")
}
photoDimensions, err := media_utils.GetPhotoDimensions(imagePath)
if err != nil {
return nil, err
}
fileStats, err := os.Stat(imagePath)
if err != nil {
return nil, errors.Wrap(err, "reading file stats of highres photo")
}
if mediaURL == nil {
mediaURL = &models.MediaURL{
MediaID: media.ID,
MediaName: highres_name,
Width: photoDimensions.Width,
Height: photoDimensions.Height,
Purpose: models.PhotoHighRes,
ContentType: "image/jpeg",
FileSize: fileStats.Size(),
}
if err := tx.Create(&mediaURL).Error; err != nil {
return nil, errors.Wrapf(err, "could not insert highres media url (%d, %s)", media.ID, highres_name)
}
} else {
mediaURL.Width = photoDimensions.Width
mediaURL.Height = photoDimensions.Height
mediaURL.FileSize = fileStats.Size()
if err := tx.Save(&mediaURL).Error; err != nil {
return nil, errors.Wrapf(err, "could not update media url after side car changes (%d, %s)", media.ID, highres_name)
}
}
return mediaURL, nil
}
func generateSaveThumbnailJPEG(tx *gorm.DB, media *models.Media, thumbnail_name string, photoCachePath *string, baseImagePath string, mediaURL *models.MediaURL) (*models.MediaURL, error) {
thumbOutputPath := path.Join(*photoCachePath, thumbnail_name)
thumbSize, err := media_encoding.EncodeThumbnail(baseImagePath, thumbOutputPath)
if err != nil {
return nil, errors.Wrap(err, "could not create thumbnail cached image")
}
fileStats, err := os.Stat(thumbOutputPath)
if err != nil {
return nil, errors.Wrap(err, "reading file stats of thumbnail photo")
}
if mediaURL == nil {
mediaURL = &models.MediaURL{
MediaID: media.ID,
MediaName: thumbnail_name,
Width: thumbSize.Width,
Height: thumbSize.Height,
Purpose: models.PhotoThumbnail,
ContentType: "image/jpeg",
FileSize: fileStats.Size(),
}
if err := tx.Create(&mediaURL).Error; err != nil {
return nil, errors.Wrapf(err, "could not insert thumbnail media url (%d, %s)", media.ID, thumbnail_name)
}
} else {
mediaURL.Width = thumbSize.Width
mediaURL.Height = thumbSize.Height
mediaURL.FileSize = fileStats.Size()
if err := tx.Save(&mediaURL).Error; err != nil {
return nil, errors.Wrapf(err, "could not update media url after side car changes (%d, %s)", media.ID, thumbnail_name)
}
}
return mediaURL, nil
}
func processRawSideCar(tx *gorm.DB, imageData *media_encoding.EncodeMediaData, highResURL *models.MediaURL, thumbURL *models.MediaURL, photoCachePath *string) error {
photo := imageData.Media
sideCarFileHasChanged := false
var currentFileHash *string
currentSideCarPath := scanForSideCarFile(photo.Path)
if currentSideCarPath != nil {
currentFileHash = hashSideCarFile(currentSideCarPath)
if photo.SideCarHash == nil || *photo.SideCarHash != *currentFileHash {
sideCarFileHasChanged = true
}
} else if photo.SideCarPath != nil { // sidecar has been deleted since last scan
sideCarFileHasChanged = true
}
if sideCarFileHasChanged {
fmt.Printf("Detected changed sidecar file for %s recreating JPG's to reflect changes\n", photo.Path)
// update high res image may be cropped so dimentions and file size can change
baseImagePath := path.Join(*photoCachePath, highResURL.MediaName) // update base image path for thumbnail
tempHighResPath := baseImagePath + ".hold"
os.Rename(baseImagePath, tempHighResPath)
_, err := generateSaveHighResJPEG(tx, photo, imageData, highResURL.MediaName, baseImagePath, highResURL)
if err != nil {
os.Rename(tempHighResPath, baseImagePath)
return errors.Wrap(err, "recreating high-res cached image")
}
os.Remove(tempHighResPath)
// update thumbnail image may be cropped so dimentions and file size can change
thumbPath := path.Join(*photoCachePath, thumbURL.MediaName)
tempThumbPath := thumbPath + ".hold" // hold onto the original image incase for some reason we fail to recreate one with the new settings
os.Rename(thumbPath, tempThumbPath)
_, err = generateSaveThumbnailJPEG(tx, photo, thumbURL.MediaName, photoCachePath, baseImagePath, thumbURL)
if err != nil {
os.Rename(tempThumbPath, thumbPath)
return errors.Wrap(err, "recreating thumbnail cached image")
}
os.Remove(tempThumbPath)
photo.SideCarHash = currentFileHash
photo.SideCarPath = currentSideCarPath
// save new side car hash
if err := tx.Save(&photo).Error; err != nil {
return errors.Wrapf(err, "could not update side car hash for media: %s", photo.Path)
}
}
return nil
}

View File

@ -6,16 +6,14 @@ import (
"log"
"os"
"path"
"time"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/graphql/notification"
"github.com/photoview/photoview/api/scanner/face_detection"
"github.com/photoview/photoview/api/scanner/scanner_cache"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/scanner/scanner_tasks"
"github.com/photoview/photoview/api/scanner/scanner_utils"
"github.com/photoview/photoview/api/utils"
"github.com/pkg/errors"
ignore "github.com/sabhiram/go-gitignore"
"gorm.io/gorm"
)
@ -56,7 +54,7 @@ func NewRootAlbum(db *gorm.DB, rootPath string, owner *models.User) (*models.Alb
}
if err := db.Model(&owner).Association("Albums").Append(&album); err != nil {
return nil, errors.Wrap(err, "failed to add owner to already existing album")
return nil, errors.Wrap(err, "add owner to already existing album")
}
return &album, nil
@ -87,142 +85,121 @@ func ValidRootPath(rootPath string) bool {
return true
}
func scanAlbum(album *models.Album, cache *scanner_cache.AlbumScannerCache, db *gorm.DB) {
func ScanAlbum(ctx scanner_task.TaskContext) error {
album_notify_key := utils.GenerateToken()
notifyThrottle := utils.NewThrottle(500 * time.Millisecond)
notifyThrottle.Trigger(nil)
newCtx, err := scanner_tasks.Tasks.BeforeScanAlbum(ctx)
if err != nil {
return errors.Wrapf(err, "before scan album (%s)", ctx.GetAlbum().Path)
}
ctx = newCtx
// Scan for photos
albumMedia, err := findMediaForAlbum(album, cache, db, func(photo *models.Media, newPhoto bool) {
if newPhoto {
notifyThrottle.Trigger(func() {
notification.BroadcastNotification(&models.Notification{
Key: album_notify_key,
Type: models.NotificationTypeMessage,
Header: fmt.Sprintf("Found new media in album '%s'", album.Title),
Content: fmt.Sprintf("Found %s", photo.Path),
})
})
}
})
albumMedia, err := findMediaForAlbum(ctx)
if err != nil {
scanner_utils.ScannerError("Failed to find media for album (%s): %s", album.Path, err)
return errors.Wrapf(err, "find media for album (%s): %s", ctx.GetAlbum().Path, err)
}
album_has_changes := false
for count, media := range albumMedia {
processing_was_needed := false
changedMedia := make([]*models.Media, 0)
for i, media := range albumMedia {
updatedURLs := []*models.MediaURL{}
transactionError := db.Transaction(func(tx *gorm.DB) error {
processing_was_needed, err = ProcessMedia(tx, media)
mediaData := media_encoding.NewEncodeMediaData(media)
// define new ctx for scope of for-loop
ctx, err := scanner_tasks.Tasks.BeforeProcessMedia(ctx, &mediaData)
if err != nil {
return errors.Wrapf(err, "failed to process photo (%s)", media.Path)
return err
}
if processing_was_needed {
album_has_changes = true
progress := float64(count) / float64(len(albumMedia)) * 100.0
notification.BroadcastNotification(&models.Notification{
Key: album_notify_key,
Type: models.NotificationTypeProgress,
Header: fmt.Sprintf("Processing media for album '%s'", album.Title),
Content: fmt.Sprintf("Processed media at %s", media.Path),
Progress: &progress,
})
transactionError := ctx.DatabaseTransaction(func(ctx scanner_task.TaskContext) error {
updatedURLs, err = processMedia(ctx, &mediaData)
if err != nil {
return errors.Wrapf(err, "process media (%s)", media.Path)
}
if len(updatedURLs) > 0 {
changedMedia = append(changedMedia, media)
}
return nil
})
if transactionError != nil {
scanner_utils.ScannerError("Failed to begin database transaction: %s", transactionError)
return errors.Wrap(err, "process media database transaction")
}
if processing_was_needed && media.Type == models.MediaTypePhoto {
go func(media *models.Media) {
if face_detection.GlobalFaceDetector == nil {
return
}
if err := face_detection.GlobalFaceDetector.DetectFaces(db, media); err != nil {
scanner_utils.ScannerError("Error detecting faces in image (%s): %s", media.Path, err)
}
}(media)
if err = scanner_tasks.Tasks.AfterProcessMedia(ctx, &mediaData, updatedURLs, i, len(albumMedia)); err != nil {
return errors.Wrap(err, "after process media")
}
}
cleanup_errors := CleanupMedia(db, album.ID, albumMedia)
for _, err := range cleanup_errors {
scanner_utils.ScannerError("Failed to delete old media: %s", err)
if err := scanner_tasks.Tasks.AfterScanAlbum(ctx, changedMedia, albumMedia); err != nil {
return errors.Wrap(err, "after scan album")
}
if album_has_changes {
timeoutDelay := 2000
notification.BroadcastNotification(&models.Notification{
Key: album_notify_key,
Type: models.NotificationTypeMessage,
Positive: true,
Header: fmt.Sprintf("Done processing media for album '%s'", album.Title),
Content: fmt.Sprintf("All media have been processed"),
Timeout: &timeoutDelay,
})
}
return nil
}
func findMediaForAlbum(album *models.Album, cache *scanner_cache.AlbumScannerCache, db *gorm.DB, onScanPhoto func(photo *models.Media, newPhoto bool)) ([]*models.Media, error) {
func findMediaForAlbum(ctx scanner_task.TaskContext) ([]*models.Media, error) {
albumPhotos := make([]*models.Media, 0)
albumMedia := make([]*models.Media, 0)
dirContent, err := ioutil.ReadDir(album.Path)
dirContent, err := ioutil.ReadDir(ctx.GetAlbum().Path)
if err != nil {
return nil, err
}
// Get ignore data
albumIgnore := ignore.CompileIgnoreLines(*cache.GetAlbumIgnore(album.Path)...)
for _, item := range dirContent {
photoPath := path.Join(album.Path, item.Name())
mediaPath := path.Join(ctx.GetAlbum().Path, item.Name())
isDirSymlink, err := utils.IsDirSymlink(photoPath)
isDirSymlink, err := utils.IsDirSymlink(mediaPath)
if err != nil {
log.Printf("Cannot detect whether %s is symlink to a directory. Pretending it is not", photoPath)
log.Printf("Cannot detect whether %s is symlink to a directory. Pretending it is not", mediaPath)
isDirSymlink = false
}
if !item.IsDir() && !isDirSymlink && cache.IsPathMedia(photoPath) {
// Match file against ignore data
if albumIgnore.MatchesPath(item.Name()) {
log.Printf("File %s ignored\n", item.Name())
continue
}
// Skip the JPEGs that are compressed version of raw files
counterpartFile := scanForRawCounterpartFile(photoPath)
if counterpartFile != nil {
continue
}
err := db.Transaction(func(tx *gorm.DB) error {
media, isNewMedia, err := ScanMedia(tx, photoPath, album.ID, cache)
if !item.IsDir() && !isDirSymlink && ctx.GetCache().IsPathMedia(mediaPath) {
skip, err := scanner_tasks.Tasks.MediaFound(ctx, item, mediaPath)
if err != nil {
return errors.Wrapf(err, "Scanning media error (%s)", photoPath)
return nil, err
}
if skip {
continue
}
onScanPhoto(media, isNewMedia)
err = ctx.DatabaseTransaction(func(ctx scanner_task.TaskContext) error {
media, isNewMedia, err := ScanMedia(ctx.GetDB(), mediaPath, ctx.GetAlbum().ID, ctx.GetCache())
if err != nil {
return errors.Wrapf(err, "scanning media error (%s)", mediaPath)
}
albumPhotos = append(albumPhotos, media)
if err = scanner_tasks.Tasks.AfterMediaFound(ctx, media, isNewMedia); err != nil {
return err
}
albumMedia = append(albumMedia, media)
return nil
})
if err != nil {
scanner_utils.ScannerError("Error scanning media for album (%d): %s\n", album.ID, err)
scanner_utils.ScannerError("Error scanning media for album (%d): %s\n", ctx.GetAlbum().ID, err)
continue
}
}
}
return albumPhotos, nil
}
return albumMedia, nil
}
func processMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData) ([]*models.MediaURL, error) {
// Make sure media cache directory exists
mediaCachePath, err := mediaData.Media.CachePath()
if err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "cache directory error")
}
return scanner_tasks.Tasks.ProcessMedia(ctx, mediaData, mediaCachePath)
}

View File

@ -1,92 +1,20 @@
package scanner
import (
"crypto/md5"
"encoding/hex"
"io"
"context"
"log"
"os"
"path"
"path/filepath"
"strings"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/exif"
"github.com/photoview/photoview/api/scanner/media_type"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/scanner_cache"
"github.com/photoview/photoview/api/scanner/scanner_utils"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/scanner/scanner_tasks"
"github.com/pkg/errors"
"gorm.io/gorm"
)
func scanForSideCarFile(path string) *string {
testPath := path + ".xmp"
if scanner_utils.FileExists(testPath) {
return &testPath
}
return nil
}
func scanForRawCounterpartFile(imagePath string) *string {
ext := filepath.Ext(imagePath)
fileExtType, found := media_type.GetExtensionMediaType(ext)
if found {
if !fileExtType.IsBasicTypeSupported() {
return nil
}
}
rawPath := media_type.RawCounterpart(imagePath)
if rawPath != nil {
return rawPath
}
return nil
}
func scanForCompressedCounterpartFile(imagePath string) *string {
ext := filepath.Ext(imagePath)
fileExtType, found := media_type.GetExtensionMediaType(ext)
if found {
if fileExtType.IsBasicTypeSupported() {
return nil
}
}
pathWithoutExt := strings.TrimSuffix(imagePath, path.Ext(imagePath))
for _, ext := range media_type.TypeJpeg.FileExtensions() {
testPath := pathWithoutExt + ext
if scanner_utils.FileExists(testPath) {
return &testPath
}
}
return nil
}
func hashSideCarFile(path *string) *string {
if path == nil {
return nil
}
f, err := os.Open(*path)
if err != nil {
log.Printf("ERROR: %s", err)
}
defer f.Close()
h := md5.New()
if _, err := io.Copy(h, f); err != nil {
log.Printf("ERROR: %s", err)
}
hash := hex.EncodeToString(h.Sum(nil))
return &hash
}
func ScanMedia(tx *gorm.DB, mediaPath string, albumId int, cache *scanner_cache.AlbumScannerCache) (*models.Media, bool, error) {
mediaName := path.Base(mediaPath)
@ -115,20 +43,10 @@ func ScanMedia(tx *gorm.DB, mediaPath string, albumId int, cache *scanner_cache.
var mediaTypeText models.MediaType
var sideCarPath *string = nil
var sideCarHash *string = nil
if mediaType.IsVideo() {
mediaTypeText = models.MediaTypeVideo
} else {
mediaTypeText = models.MediaTypePhoto
// search for sidecar files
if mediaType.IsRaw() {
sideCarPath = scanForSideCarFile(mediaPath)
if sideCarPath != nil {
sideCarHash = hashSideCarFile(sideCarPath)
}
}
}
stat, err := os.Stat(mediaPath)
@ -139,8 +57,6 @@ func ScanMedia(tx *gorm.DB, mediaPath string, albumId int, cache *scanner_cache.
media := models.Media{
Title: mediaName,
Path: mediaPath,
SideCarPath: sideCarPath,
SideCarHash: sideCarHash,
AlbumID: albumId,
Type: mediaTypeText,
DateShot: stat.ModTime(),
@ -150,16 +66,41 @@ func ScanMedia(tx *gorm.DB, mediaPath string, albumId int, cache *scanner_cache.
return nil, false, errors.Wrap(err, "could not insert media into database")
}
_, err = exif.SaveEXIF(tx, &media)
if err != nil {
log.Printf("WARN: SaveEXIF for %s failed: %s\n", mediaName, err)
}
if media.Type == models.MediaTypeVideo {
if err = ScanVideoMetadata(tx, &media); err != nil {
log.Printf("WARN: ScanVideoMetadata for %s failed: %s\n", mediaName, err)
}
}
return &media, true, nil
}
// ProcessSingleMedia processes a single media, might be used to reprocess media with corrupted cache
// Function waits for processing to finish before returning.
func ProcessSingleMedia(db *gorm.DB, media *models.Media) error {
album_cache := scanner_cache.MakeAlbumCache()
var album models.Album
if err := db.Model(media).Association("Album").Find(&album); err != nil {
return err
}
media_data := media_encoding.NewEncodeMediaData(media)
task_context := scanner_task.NewTaskContext(context.Background(), db, &album, album_cache)
new_ctx, err := scanner_tasks.Tasks.BeforeProcessMedia(task_context, &media_data)
if err != nil {
return err
}
mediaCachePath, err := media.CachePath()
if err != nil {
return err
}
updated_urls, err := scanner_tasks.Tasks.ProcessMedia(new_ctx, &media_data, mediaCachePath)
if err != nil {
return err
}
err = scanner_tasks.Tasks.AfterProcessMedia(new_ctx, &media_data, updated_urls, 0, 1)
if err != nil {
return err
}
return nil
}

View File

@ -1,6 +1,7 @@
package scanner
package scanner_queue
import (
"context"
"fmt"
"log"
"sync"
@ -8,20 +9,30 @@ import (
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/graphql/notification"
"github.com/photoview/photoview/api/scanner"
"github.com/photoview/photoview/api/scanner/scanner_cache"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/scanner/scanner_utils"
"github.com/photoview/photoview/api/utils"
"github.com/pkg/errors"
"gorm.io/gorm"
)
// ScannerJob describes a job on the queue to be run by the scanner over a single album
type ScannerJob struct {
album *models.Album
cache *scanner_cache.AlbumScannerCache
ctx scanner_task.TaskContext
// album *models.Album
// cache *scanner_cache.AlbumScannerCache
}
func NewScannerJob(ctx scanner_task.TaskContext) ScannerJob {
return ScannerJob{
ctx,
}
}
func (job *ScannerJob) Run(db *gorm.DB) {
scanAlbum(job.album, job.cache, db)
scanner.ScanAlbum(job.ctx)
}
type ScannerQueueSettings struct {
@ -162,7 +173,7 @@ func (queue *ScannerQueue) processQueue(notifyThrottle *utils.Throttle) {
Positive: true,
})
if err := GenerateBlurhashes(queue.db); err != nil {
if err := scanner.GenerateBlurhashes(queue.db); err != nil {
scanner_utils.ScannerError("Failed to generate blurhashes: %v", err)
}
@ -212,9 +223,11 @@ func AddAllToQueue() error {
return nil
}
// AddUserToQueue finds all root albums owned by the given user and adds them to the scanner queue.
// Function does not block.
func AddUserToQueue(user *models.User) error {
album_cache := scanner_cache.MakeAlbumCache()
albums, album_errors := findAlbumsForUser(global_scanner_queue.db, user, album_cache)
albums, album_errors := scanner.FindAlbumsForUser(global_scanner_queue.db, user, album_cache)
for _, err := range album_errors {
return errors.Wrapf(err, "find albums for user (user_id: %d)", user.ID)
}
@ -222,8 +235,7 @@ func AddUserToQueue(user *models.User) error {
global_scanner_queue.mutex.Lock()
for _, album := range albums {
global_scanner_queue.addJob(&ScannerJob{
album: album,
cache: album_cache,
ctx: scanner_task.NewTaskContext(context.Background(), global_scanner_queue.db, album, album_cache),
})
}
global_scanner_queue.mutex.Unlock()
@ -248,7 +260,7 @@ func (queue *ScannerQueue) jobOnQueue(job *ScannerJob) (bool, error) {
scannerJobs := append(queue.in_progress, queue.up_next...)
for _, scannerJob := range scannerJobs {
if scannerJob.album.ID == job.album.ID {
if scannerJob.ctx.GetAlbum().ID == job.ctx.GetAlbum().ID {
return true, nil
}
}

View File

@ -1,12 +1,18 @@
package scanner
package scanner_queue
import (
"context"
"flag"
"testing"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/scanner_cache"
"github.com/photoview/photoview/api/scanner/scanner_task"
)
var _ = flag.Bool("database", false, "run database integration tests")
var _ = flag.Bool("filesystem", false, "run filesystem integration tests")
func makeAlbumWithID(id int) *models.Album {
var album models.Album
album.ID = id
@ -14,11 +20,15 @@ func makeAlbumWithID(id int) *models.Album {
return &album
}
func makeScannerJob(albumID int) ScannerJob {
return NewScannerJob(scanner_task.NewTaskContext(context.Background(), nil, makeAlbumWithID(albumID), scanner_cache.MakeAlbumCache()))
}
func TestScannerQueue_AddJob(t *testing.T) {
scannerJobs := []ScannerJob{
{album: makeAlbumWithID(100), cache: scanner_cache.MakeAlbumCache()},
{album: makeAlbumWithID(20), cache: scanner_cache.MakeAlbumCache()},
makeScannerJob(100),
makeScannerJob(20),
}
mockScannerQueue := ScannerQueue{
@ -29,7 +39,7 @@ func TestScannerQueue_AddJob(t *testing.T) {
}
t.Run("add new job to scanner queue", func(t *testing.T) {
newJob := ScannerJob{album: makeAlbumWithID(42), cache: scanner_cache.MakeAlbumCache()}
newJob := makeScannerJob(42)
startingJobs := len(mockScannerQueue.up_next)
@ -49,7 +59,8 @@ func TestScannerQueue_AddJob(t *testing.T) {
t.Run("add existing job to scanner queue", func(t *testing.T) {
startingJobs := len(mockScannerQueue.up_next)
err := mockScannerQueue.addJob(&ScannerJob{album: makeAlbumWithID(20), cache: scanner_cache.MakeAlbumCache()})
job := makeScannerJob(20)
err := mockScannerQueue.addJob(&job)
if err != nil {
t.Errorf(".AddJob() returned an unexpected error: %s", err)
}
@ -59,14 +70,13 @@ func TestScannerQueue_AddJob(t *testing.T) {
}
})
}
func TestScannerQueue_JobOnQueue(t *testing.T) {
scannerJobs := []ScannerJob{
{album: makeAlbumWithID(100), cache: scanner_cache.MakeAlbumCache()},
{album: makeAlbumWithID(20), cache: scanner_cache.MakeAlbumCache()},
makeScannerJob(100),
makeScannerJob(20),
}
mockScannerQueue := ScannerQueue{
@ -81,12 +91,8 @@ func TestScannerQueue_JobOnQueue(t *testing.T) {
bool
ScannerJob
}{
{"album which owner is already on the queue", true, ScannerJob{
album: makeAlbumWithID(100), cache: scanner_cache.MakeAlbumCache(),
}},
{"album that is not on the queue", false, ScannerJob{
album: makeAlbumWithID(321), cache: scanner_cache.MakeAlbumCache(),
}},
{"album which owner is already on the queue", true, makeScannerJob(100)},
{"album that is not on the queue", false, makeScannerJob(321)},
}
for _, test := range onQueueTests {

View File

@ -0,0 +1,102 @@
package scanner_task
import (
"context"
"database/sql"
"flag"
"io/fs"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/scanner_cache"
"gorm.io/gorm"
)
// ScannerTask is an interface for a task to be performed as a part of the scanner pipeline
type ScannerTask interface {
// BeforeScanAlbum will run at the beginning of the scan task.
// New values can be stored in the returned TaskContext that will live throughout the lifetime of the task.
BeforeScanAlbum(ctx TaskContext) (TaskContext, error)
// AfterScanAlbum will run at the end of the scan task.
AfterScanAlbum(ctx TaskContext, changedMedia []*models.Media, albumMedia []*models.Media) error
// MediaFound will run for each media file found on the filesystem.
// It will run even when the media is already present in the database.
// If the returned skip value is true, the media will be skipped and further steps will not be executed for the given file.
MediaFound(ctx TaskContext, fileInfo fs.FileInfo, mediaPath string) (skip bool, err error)
// AfterMediaFound will run each media file after is has been saved to the database, but not processed yet.
// It will run even when the media is already present in the database, in that case `newMedia` will be true.
AfterMediaFound(ctx TaskContext, media *models.Media, newMedia bool) error
BeforeProcessMedia(ctx TaskContext, mediaData *media_encoding.EncodeMediaData) (TaskContext, error)
ProcessMedia(ctx TaskContext, mediaData *media_encoding.EncodeMediaData, mediaCachePath string) (updatedURLs []*models.MediaURL, err error)
AfterProcessMedia(ctx TaskContext, mediaData *media_encoding.EncodeMediaData, updatedURLs []*models.MediaURL, mediaIndex int, mediaTotal int) error
}
type TaskContext struct {
ctx context.Context
}
func NewTaskContext(parent context.Context, db *gorm.DB, album *models.Album, cache *scanner_cache.AlbumScannerCache) TaskContext {
ctx := TaskContext{ctx: parent}
ctx = ctx.WithValue(taskCtxKeyAlbum, album)
ctx = ctx.WithValue(taskCtxKeyAlbumCache, cache)
ctx = ctx.WithDB(db)
return ctx
}
type taskCtxKeyType string
const (
taskCtxKeyAlbum taskCtxKeyType = "task_album"
taskCtxKeyAlbumCache taskCtxKeyType = "task_album_cache"
taskCtxKeyDatabase taskCtxKeyType = "task_database"
)
func (c TaskContext) GetAlbum() *models.Album {
return c.ctx.Value(taskCtxKeyAlbum).(*models.Album)
}
func (c TaskContext) GetCache() *scanner_cache.AlbumScannerCache {
return c.ctx.Value(taskCtxKeyAlbumCache).(*scanner_cache.AlbumScannerCache)
}
func (c TaskContext) GetDB() *gorm.DB {
return c.ctx.Value(taskCtxKeyDatabase).(*gorm.DB)
}
func (c TaskContext) DatabaseTransaction(transFunc func(ctx TaskContext) error, opts ...*sql.TxOptions) error {
return c.GetDB().Transaction(func(tx *gorm.DB) error {
return transFunc(c.WithDB(tx))
}, opts...)
}
func (c TaskContext) WithValue(key, val interface{}) TaskContext {
return TaskContext{
ctx: context.WithValue(c.ctx, key, val),
}
}
func (c TaskContext) Value(key interface{}) interface{} {
return c.ctx.Value(key)
}
func (c TaskContext) WithDB(db *gorm.DB) TaskContext {
// Allow db to be nil in tests
if db == nil && flag.Lookup("test.v") != nil {
return c
}
return c.WithValue(taskCtxKeyDatabase, db.WithContext(c.ctx))
}
func (c TaskContext) Done() <-chan struct{} {
return c.ctx.Done()
}
func (c TaskContext) Err() error {
return c.ctx.Err()
}

View File

@ -0,0 +1,39 @@
package scanner_task
import (
"io/fs"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/media_encoding"
)
// ScannerTaskBase provides a default "empty" implementation of ScannerTask,
type ScannerTaskBase struct{}
func (t ScannerTaskBase) BeforeScanAlbum(ctx TaskContext) (TaskContext, error) {
return ctx, nil
}
func (t ScannerTaskBase) AfterScanAlbum(ctx TaskContext, changedMedia []*models.Media, albumMedia []*models.Media) error {
return nil
}
func (t ScannerTaskBase) MediaFound(ctx TaskContext, fileInfo fs.FileInfo, mediaPath string) (skip bool, err error) {
return false, nil
}
func (t ScannerTaskBase) AfterMediaFound(ctx TaskContext, media *models.Media, newMedia bool) error {
return nil
}
func (t ScannerTaskBase) BeforeProcessMedia(ctx TaskContext, mediaData *media_encoding.EncodeMediaData) (TaskContext, error) {
return ctx, nil
}
func (t ScannerTaskBase) ProcessMedia(ctx TaskContext, mediaData *media_encoding.EncodeMediaData, mediaCachePath string) (updatedURLs []*models.MediaURL, err error) {
return []*models.MediaURL{}, nil
}
func (t ScannerTaskBase) AfterProcessMedia(ctx TaskContext, mediaData *media_encoding.EncodeMediaData, updatedURLs []*models.MediaURL, mediaIndex int, mediaTotal int) error {
return nil
}

View File

@ -1,4 +1,4 @@
package scanner
package cleanup_tasks
import (
"os"
@ -13,6 +13,7 @@ import (
"gorm.io/gorm"
)
// CleanupMedia removes media entries from the database that are no longer present on the filesystem
func CleanupMedia(db *gorm.DB, albumId int, albumMedia []*models.Media) []error {
albumMediaIds := make([]int, len(albumMedia))
for i, media := range albumMedia {
@ -63,8 +64,8 @@ func CleanupMedia(db *gorm.DB, albumId int, albumMedia []*models.Media) []error
return deleteErrors
}
// Find and delete old albums in the database and cache that does not exist on the filesystem anymore.
func deleteOldUserAlbums(db *gorm.DB, scannedAlbums []*models.Album, user *models.User) []error {
// DeleteOldUserAlbums finds and deletes old albums in the database and cache that does not exist on the filesystem anymore.
func DeleteOldUserAlbums(db *gorm.DB, scannedAlbums []*models.Album, user *models.User) []error {
if len(scannedAlbums) == 0 {
return nil
}

View File

@ -1,4 +1,4 @@
package scanner_test
package cleanup_tasks_test
import (
"os"
@ -13,6 +13,10 @@ import (
"github.com/stretchr/testify/assert"
)
func TestMain(m *testing.M) {
os.Exit(test_utils.IntegrationTestRun(m))
}
func TestCleanupMedia(t *testing.T) {
test_utils.FilesystemTest(t)
db := test_utils.DatabaseTest(t)
@ -27,7 +31,7 @@ func TestCleanupMedia(t *testing.T) {
}
test_dir := t.TempDir()
copy.Copy("./test_data", test_dir)
assert.NoError(t, copy.Copy("../../test_data", test_dir))
countAllMedia := func() int {
var all_media []*models.Media

View File

@ -0,0 +1,21 @@
package cleanup_tasks
import (
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/scanner/scanner_utils"
)
type MediaCleanupTask struct {
scanner_task.ScannerTaskBase
}
func (t MediaCleanupTask) AfterScanAlbum(ctx scanner_task.TaskContext, changedMedia []*models.Media, albumMedia []*models.Media) error {
cleanup_errors := CleanupMedia(ctx.GetDB(), ctx.GetAlbum().ID, albumMedia)
for _, err := range cleanup_errors {
scanner_utils.ScannerError("delete old media: %s", err)
}
return nil
}

View File

@ -0,0 +1,27 @@
package scanner_tasks
import (
"log"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/exif"
"github.com/photoview/photoview/api/scanner/scanner_task"
)
type ExifTask struct {
scanner_task.ScannerTaskBase
}
func (t ExifTask) AfterMediaFound(ctx scanner_task.TaskContext, media *models.Media, newMedia bool) error {
if !newMedia {
return nil
}
_, err := exif.SaveEXIF(ctx.GetDB(), media)
if err != nil {
log.Printf("WARN: SaveEXIF for %s failed: %s\n", media.Title, err)
}
return nil
}

View File

@ -0,0 +1,30 @@
package scanner_tasks
import (
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/face_detection"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/scanner/scanner_utils"
)
type FaceDetectionTask struct {
scanner_task.ScannerTaskBase
}
func (t FaceDetectionTask) AfterProcessMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData, updatedURLs []*models.MediaURL, mediaIndex int, mediaTotal int) error {
didProcess := len(updatedURLs) > 0
if didProcess && mediaData.Media.Type == models.MediaTypePhoto {
go func(media *models.Media) {
if face_detection.GlobalFaceDetector == nil {
return
}
if err := face_detection.GlobalFaceDetector.DetectFaces(ctx.GetDB(), media); err != nil {
scanner_utils.ScannerError("Error detecting faces in image (%s): %s", media.Path, err)
}
}(mediaData.Media)
}
return nil
}

View File

@ -0,0 +1,37 @@
package scanner_tasks
import (
"io/fs"
"log"
"github.com/photoview/photoview/api/scanner/scanner_task"
ignore "github.com/sabhiram/go-gitignore"
)
type IgnorefileTask struct {
scanner_task.ScannerTaskBase
}
type ignorefileTaskKey string
const albumIgnoreKey ignorefileTaskKey = "album_ignore_key"
func getAlbumIgnore(ctx scanner_task.TaskContext) *ignore.GitIgnore {
return ctx.Value(albumIgnoreKey).(*ignore.GitIgnore)
}
func (t IgnorefileTask) BeforeScanAlbum(ctx scanner_task.TaskContext) (scanner_task.TaskContext, error) {
albumIgnore := ignore.CompileIgnoreLines(*ctx.GetCache().GetAlbumIgnore(ctx.GetAlbum().Path)...)
return ctx.WithValue(albumIgnoreKey, albumIgnore), nil
}
func (t IgnorefileTask) MediaFound(ctx scanner_task.TaskContext, fileInfo fs.FileInfo, mediaPath string) (bool, error) {
// Match file against ignore data
if getAlbumIgnore(ctx).MatchesPath(fileInfo.Name()) {
log.Printf("File %s ignored\n", fileInfo.Name())
return true, nil
}
return false, nil
}

View File

@ -0,0 +1,74 @@
package scanner_tasks
import (
"fmt"
"time"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/graphql/notification"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/utils"
)
type NotificationTask struct {
scanner_task.ScannerTaskBase
throttle utils.Throttle
albumKey string
}
func NewNotificationTask() NotificationTask {
notifyThrottle := utils.NewThrottle(500 * time.Millisecond)
notifyThrottle.Trigger(nil)
return NotificationTask{
albumKey: utils.GenerateToken(),
throttle: notifyThrottle,
}
}
func (t NotificationTask) AfterMediaFound(ctx scanner_task.TaskContext, media *models.Media, newMedia bool) error {
if newMedia {
t.throttle.Trigger(func() {
notification.BroadcastNotification(&models.Notification{
Key: t.albumKey,
Type: models.NotificationTypeMessage,
Header: fmt.Sprintf("Found new media in album '%s'", ctx.GetAlbum().Title),
Content: fmt.Sprintf("Found %s", media.Path),
})
})
}
return nil
}
func (t NotificationTask) AfterProcessMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData, updatedURLs []*models.MediaURL, mediaIndex int, mediaTotal int) error {
if len(updatedURLs) > 0 {
progress := float64(mediaIndex) / float64(mediaTotal) * 100.0
notification.BroadcastNotification(&models.Notification{
Key: t.albumKey,
Type: models.NotificationTypeProgress,
Header: fmt.Sprintf("Processing media for album '%s'", ctx.GetAlbum().Title),
Content: fmt.Sprintf("Processed media at %s", mediaData.Media.Path),
Progress: &progress,
})
}
return nil
}
func (t NotificationTask) AfterScanAlbum(ctx scanner_task.TaskContext, changedMedia []*models.Media, albumMedia []*models.Media) error {
if len(changedMedia) > 0 {
timeoutDelay := 2000
notification.BroadcastNotification(&models.Notification{
Key: t.albumKey,
Type: models.NotificationTypeMessage,
Positive: true,
Header: fmt.Sprintf("Done processing media for album '%s'", ctx.GetAlbum().Title),
Content: "All media have been processed",
Timeout: &timeoutDelay,
})
}
return nil
}

View File

@ -0,0 +1,87 @@
package processing_tasks
import (
"io/fs"
"path"
"path/filepath"
"strings"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/media_type"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/scanner/scanner_utils"
"github.com/pkg/errors"
)
type CounterpartFilesTask struct {
scanner_task.ScannerTaskBase
}
func (t CounterpartFilesTask) MediaFound(ctx scanner_task.TaskContext, fileInfo fs.FileInfo, mediaPath string) (skip bool, err error) {
// Skip the JPEGs that are compressed version of raw files
counterpartFile := scanForRawCounterpartFile(mediaPath)
if counterpartFile != nil {
return true, nil
}
return false, nil
}
func (t CounterpartFilesTask) BeforeProcessMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData) (scanner_task.TaskContext, error) {
mediaType, err := ctx.GetCache().GetMediaType(mediaData.Media.Path)
if err != nil {
return ctx, errors.Wrap(err, "scan for counterpart file")
}
if !mediaType.IsRaw() {
return ctx, nil
}
counterpartFile := scanForCompressedCounterpartFile(mediaData.Media.Path)
if counterpartFile != nil {
mediaData.CounterpartPath = counterpartFile
}
return ctx, nil
}
func scanForCompressedCounterpartFile(imagePath string) *string {
ext := filepath.Ext(imagePath)
fileExtType, found := media_type.GetExtensionMediaType(ext)
if found {
if fileExtType.IsBasicTypeSupported() {
return nil
}
}
pathWithoutExt := strings.TrimSuffix(imagePath, path.Ext(imagePath))
for _, ext := range media_type.TypeJpeg.FileExtensions() {
testPath := pathWithoutExt + ext
if scanner_utils.FileExists(testPath) {
return &testPath
}
}
return nil
}
func scanForRawCounterpartFile(imagePath string) *string {
ext := filepath.Ext(imagePath)
fileExtType, found := media_type.GetExtensionMediaType(ext)
if found {
if !fileExtType.IsBasicTypeSupported() {
return nil
}
}
rawPath := media_type.RawCounterpart(imagePath)
if rawPath != nil {
return rawPath
}
return nil
}

View File

@ -0,0 +1,139 @@
package processing_tasks
import (
"fmt"
"log"
"os"
"path"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/media_encoding/media_utils"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/pkg/errors"
// Image decoders
_ "image/gif"
_ "image/png"
_ "golang.org/x/image/bmp"
_ "golang.org/x/image/tiff"
_ "golang.org/x/image/webp"
)
type ProcessPhotoTask struct {
scanner_task.ScannerTaskBase
}
func (t ProcessPhotoTask) ProcessMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData, mediaCachePath string) ([]*models.MediaURL, error) {
if mediaData.Media.Type != models.MediaTypePhoto {
return []*models.MediaURL{}, nil
}
updatedURLs := make([]*models.MediaURL, 0)
photo := mediaData.Media
log.Printf("Processing photo: %s\n", photo.Path)
photoURLFromDB := makePhotoURLChecker(ctx.GetDB(), photo.ID)
// original photo url
origURL, err := photoURLFromDB(models.MediaOriginal)
if err != nil {
return []*models.MediaURL{}, err
}
// Thumbnail
thumbURL, err := photoURLFromDB(models.PhotoThumbnail)
if err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "error processing photo thumbnail")
}
// Highres
highResURL, err := photoURLFromDB(models.PhotoHighRes)
if err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "error processing photo highres")
}
var photoDimensions *media_utils.PhotoDimensions
var baseImagePath string = photo.Path
// Generate high res jpeg
if highResURL == nil {
contentType, err := mediaData.ContentType()
if err != nil {
return []*models.MediaURL{}, err
}
if !contentType.IsWebCompatible() {
highresName := generateUniqueMediaNamePrefixed("highres", photo.Path, ".jpg")
baseImagePath = path.Join(mediaCachePath, highresName)
highRes, err := generateSaveHighResJPEG(ctx.GetDB(), photo, mediaData, highresName, baseImagePath, nil)
if err != nil {
return []*models.MediaURL{}, err
}
updatedURLs = append(updatedURLs, highRes)
}
} else {
// Verify that highres photo still exists in cache
baseImagePath = path.Join(mediaCachePath, highResURL.MediaName)
if _, err := os.Stat(baseImagePath); os.IsNotExist(err) {
fmt.Printf("High-res photo found in database but not in cache, re-encoding photo to cache: %s\n", highResURL.MediaName)
updatedURLs = append(updatedURLs, highResURL)
err = mediaData.EncodeHighRes(baseImagePath)
if err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "creating high-res cached image")
}
}
}
// Save original photo to database
if origURL == nil {
// Make sure photo dimensions is set
if photoDimensions == nil {
photoDimensions, err = media_utils.GetPhotoDimensions(baseImagePath)
if err != nil {
return []*models.MediaURL{}, err
}
}
original, err := saveOriginalPhotoToDB(ctx.GetDB(), photo, mediaData, photoDimensions)
if err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "saving original photo to database")
}
updatedURLs = append(updatedURLs, original)
}
// Save thumbnail to cache
if thumbURL == nil {
thumbnailName := generateUniqueMediaNamePrefixed("thumbnail", photo.Path, ".jpg")
thumbnail, err := generateSaveThumbnailJPEG(ctx.GetDB(), photo, thumbnailName, mediaCachePath, baseImagePath, nil)
if err != nil {
return []*models.MediaURL{}, err
}
updatedURLs = append(updatedURLs, thumbnail)
} else {
// Verify that thumbnail photo still exists in cache
thumbPath := path.Join(mediaCachePath, thumbURL.MediaName)
if _, err := os.Stat(thumbPath); os.IsNotExist(err) {
updatedURLs = append(updatedURLs, thumbURL)
fmt.Printf("Thumbnail photo found in database but not in cache, re-encoding photo to cache: %s\n", thumbURL.MediaName)
_, err := media_encoding.EncodeThumbnail(baseImagePath, thumbPath)
if err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "could not create thumbnail cached image")
}
}
}
return updatedURLs, nil
}

View File

@ -1,4 +1,4 @@
package scanner
package processing_tasks
import (
"context"
@ -13,54 +13,60 @@ import (
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/media_encoding/executable_worker"
"github.com/photoview/photoview/api/scanner/media_encoding/media_utils"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/utils"
"github.com/pkg/errors"
"gopkg.in/vansante/go-ffprobe.v2"
"gorm.io/gorm"
)
func processVideo(tx *gorm.DB, mediaData *media_encoding.EncodeMediaData, videoCachePath *string) (bool, error) {
type ProcessVideoTask struct {
scanner_task.ScannerTaskBase
}
func (t ProcessVideoTask) ProcessMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData, mediaCachePath string) ([]*models.MediaURL, error) {
if mediaData.Media.Type != models.MediaTypeVideo {
return []*models.MediaURL{}, nil
}
updatedURLs := make([]*models.MediaURL, 0)
video := mediaData.Media
didProcess := false
log.Printf("Processing video: %s", video.Path)
mediaURLFromDB := makePhotoURLChecker(tx, video.ID)
mediaURLFromDB := makePhotoURLChecker(ctx.GetDB(), video.ID)
videoOriginalURL, err := mediaURLFromDB(models.MediaOriginal)
if err != nil {
return false, errors.Wrap(err, "error processing video original format")
return []*models.MediaURL{}, errors.Wrap(err, "error processing video original format")
}
videoWebURL, err := mediaURLFromDB(models.VideoWeb)
if err != nil {
return false, errors.Wrap(err, "error processing video web-format")
return []*models.MediaURL{}, errors.Wrap(err, "error processing video web-format")
}
videoThumbnailURL, err := mediaURLFromDB(models.VideoThumbnail)
if err != nil {
return false, errors.Wrap(err, "error processing video thumbnail")
return []*models.MediaURL{}, errors.Wrap(err, "error processing video thumbnail")
}
videoType, err := mediaData.ContentType()
if err != nil {
return false, errors.Wrap(err, "error getting video content type")
return []*models.MediaURL{}, errors.Wrap(err, "error getting video content type")
}
if videoOriginalURL == nil && videoType.IsWebCompatible() {
didProcess = true
origVideoPath := video.Path
videoMediaName := generateUniqueMediaName(video.Path)
webMetadata, err := readVideoStreamMetadata(origVideoPath)
webMetadata, err := ReadVideoStreamMetadata(origVideoPath)
if err != nil {
return false, errors.Wrapf(err, "failed to read metadata for original video (%s)", video.Title)
return []*models.MediaURL{}, errors.Wrapf(err, "failed to read metadata for original video (%s)", video.Title)
}
fileStats, err := os.Stat(origVideoPath)
if err != nil {
return false, errors.Wrap(err, "reading file stats of original video")
return []*models.MediaURL{}, errors.Wrap(err, "reading file stats of original video")
}
mediaURL := models.MediaURL{
@ -73,35 +79,34 @@ func processVideo(tx *gorm.DB, mediaData *media_encoding.EncodeMediaData, videoC
FileSize: fileStats.Size(),
}
if err := tx.Create(&mediaURL).Error; err != nil {
return false, errors.Wrapf(err, "failed to insert original video into database (%s)", video.Title)
if err := ctx.GetDB().Create(&mediaURL).Error; err != nil {
return []*models.MediaURL{}, errors.Wrapf(err, "insert original video into database (%s)", video.Title)
}
updatedURLs = append(updatedURLs, &mediaURL)
}
if videoWebURL == nil && !videoType.IsWebCompatible() {
didProcess = true
web_video_name := fmt.Sprintf("web_video_%s_%s", path.Base(video.Path), utils.GenerateToken())
web_video_name = strings.ReplaceAll(web_video_name, ".", "_")
web_video_name = strings.ReplaceAll(web_video_name, " ", "_")
web_video_name = web_video_name + ".mp4"
webVideoPath := path.Join(*videoCachePath, web_video_name)
webVideoPath := path.Join(mediaCachePath, web_video_name)
err = executable_worker.FfmpegCli.EncodeMp4(video.Path, webVideoPath)
if err != nil {
return false, errors.Wrapf(err, "could not encode mp4 video (%s)", video.Path)
return []*models.MediaURL{}, errors.Wrapf(err, "could not encode mp4 video (%s)", video.Path)
}
webMetadata, err := readVideoStreamMetadata(webVideoPath)
webMetadata, err := ReadVideoStreamMetadata(webVideoPath)
if err != nil {
return false, errors.Wrapf(err, "failed to read metadata for encoded web-video (%s)", video.Title)
return []*models.MediaURL{}, errors.Wrapf(err, "failed to read metadata for encoded web-video (%s)", video.Title)
}
fileStats, err := os.Stat(webVideoPath)
if err != nil {
return false, errors.Wrap(err, "reading file stats of web-optimized video")
return []*models.MediaURL{}, errors.Wrap(err, "reading file stats of web-optimized video")
}
mediaURL := models.MediaURL{
@ -114,39 +119,39 @@ func processVideo(tx *gorm.DB, mediaData *media_encoding.EncodeMediaData, videoC
FileSize: fileStats.Size(),
}
if err := tx.Create(&mediaURL).Error; err != nil {
return false, errors.Wrapf(err, "failed to insert encoded web-video into database (%s)", video.Title)
if err := ctx.GetDB().Create(&mediaURL).Error; err != nil {
return []*models.MediaURL{}, errors.Wrapf(err, "failed to insert encoded web-video into database (%s)", video.Title)
}
updatedURLs = append(updatedURLs, &mediaURL)
}
probeData, err := mediaData.VideoMetadata()
if err != nil {
return false, err
return []*models.MediaURL{}, err
}
if videoThumbnailURL == nil {
didProcess = true
video_thumb_name := fmt.Sprintf("video_thumb_%s_%s", path.Base(video.Path), utils.GenerateToken())
video_thumb_name = strings.ReplaceAll(video_thumb_name, ".", "_")
video_thumb_name = strings.ReplaceAll(video_thumb_name, " ", "_")
video_thumb_name = video_thumb_name + ".jpg"
thumbImagePath := path.Join(*videoCachePath, video_thumb_name)
thumbImagePath := path.Join(mediaCachePath, video_thumb_name)
err = executable_worker.FfmpegCli.EncodeVideoThumbnail(video.Path, thumbImagePath, probeData)
if err != nil {
return false, errors.Wrapf(err, "failed to generate thumbnail for video (%s)", video.Title)
return []*models.MediaURL{}, errors.Wrapf(err, "failed to generate thumbnail for video (%s)", video.Title)
}
thumbDimensions, err := media_utils.GetPhotoDimensions(thumbImagePath)
if err != nil {
return false, errors.Wrap(err, "get dimensions of video thumbnail image")
return []*models.MediaURL{}, errors.Wrap(err, "get dimensions of video thumbnail image")
}
fileStats, err := os.Stat(thumbImagePath)
if err != nil {
return false, errors.Wrap(err, "reading file stats of video thumbnail")
return []*models.MediaURL{}, errors.Wrap(err, "reading file stats of video thumbnail")
}
thumbMediaURL := models.MediaURL{
@ -159,46 +164,48 @@ func processVideo(tx *gorm.DB, mediaData *media_encoding.EncodeMediaData, videoC
FileSize: fileStats.Size(),
}
if err := tx.Create(&thumbMediaURL).Error; err != nil {
return false, errors.Wrapf(err, "failed to insert video thumbnail image into database (%s)", video.Title)
if err := ctx.GetDB().Create(&thumbMediaURL).Error; err != nil {
return []*models.MediaURL{}, errors.Wrapf(err, "failed to insert video thumbnail image into database (%s)", video.Title)
}
updatedURLs = append(updatedURLs, &thumbMediaURL)
} else {
// Verify that video thumbnail still exists in cache
thumbImagePath := path.Join(*videoCachePath, videoThumbnailURL.MediaName)
thumbImagePath := path.Join(mediaCachePath, videoThumbnailURL.MediaName)
if _, err := os.Stat(thumbImagePath); os.IsNotExist(err) {
fmt.Printf("Video thumbnail found in database but not in cache, re-encoding photo to cache: %s\n", videoThumbnailURL.MediaName)
didProcess = true
updatedURLs = append(updatedURLs, videoThumbnailURL)
err = executable_worker.FfmpegCli.EncodeVideoThumbnail(video.Path, thumbImagePath, probeData)
if err != nil {
return false, errors.Wrapf(err, "failed to generate thumbnail for video (%s)", video.Title)
return []*models.MediaURL{}, errors.Wrapf(err, "failed to generate thumbnail for video (%s)", video.Title)
}
thumbDimensions, err := media_utils.GetPhotoDimensions(thumbImagePath)
if err != nil {
return false, errors.Wrap(err, "get dimensions of video thumbnail image")
return []*models.MediaURL{}, errors.Wrap(err, "get dimensions of video thumbnail image")
}
fileStats, err := os.Stat(thumbImagePath)
if err != nil {
return false, errors.Wrap(err, "reading file stats of video thumbnail")
return []*models.MediaURL{}, errors.Wrap(err, "reading file stats of video thumbnail")
}
videoThumbnailURL.Width = thumbDimensions.Width
videoThumbnailURL.Height = thumbDimensions.Height
videoThumbnailURL.FileSize = fileStats.Size()
if err := tx.Save(videoThumbnailURL).Error; err != nil {
return false, errors.Wrap(err, "updating video thumbnail url in database after re-encoding")
if err := ctx.GetDB().Save(videoThumbnailURL).Error; err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "updating video thumbnail url in database after re-encoding")
}
}
}
return didProcess, nil
return updatedURLs, nil
}
func readVideoMetadata(videoPath string) (*ffprobe.ProbeData, error) {
func ReadVideoMetadata(videoPath string) (*ffprobe.ProbeData, error) {
ctx, cancelFn := context.WithTimeout(context.Background(), 5*time.Second)
defer cancelFn()
@ -210,8 +217,8 @@ func readVideoMetadata(videoPath string) (*ffprobe.ProbeData, error) {
return data, nil
}
func readVideoStreamMetadata(videoPath string) (*ffprobe.Stream, error) {
data, err := readVideoMetadata(videoPath)
func ReadVideoStreamMetadata(videoPath string) (*ffprobe.Stream, error) {
data, err := ReadVideoMetadata(videoPath)
if err != nil {
return nil, errors.Wrap(err, "read video stream metadata")
}

View File

@ -0,0 +1,98 @@
package processing_tasks
import (
"os"
"path"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/media_encoding/media_utils"
"github.com/pkg/errors"
"gorm.io/gorm"
)
func generateSaveHighResJPEG(tx *gorm.DB, media *models.Media, imageData *media_encoding.EncodeMediaData, highres_name string, imagePath string, mediaURL *models.MediaURL) (*models.MediaURL, error) {
err := imageData.EncodeHighRes(imagePath)
if err != nil {
return nil, errors.Wrap(err, "creating high-res cached image")
}
photoDimensions, err := media_utils.GetPhotoDimensions(imagePath)
if err != nil {
return nil, err
}
fileStats, err := os.Stat(imagePath)
if err != nil {
return nil, errors.Wrap(err, "reading file stats of highres photo")
}
if mediaURL == nil {
mediaURL = &models.MediaURL{
MediaID: media.ID,
MediaName: highres_name,
Width: photoDimensions.Width,
Height: photoDimensions.Height,
Purpose: models.PhotoHighRes,
ContentType: "image/jpeg",
FileSize: fileStats.Size(),
}
if err := tx.Create(&mediaURL).Error; err != nil {
return nil, errors.Wrapf(err, "could not insert highres media url (%d, %s)", media.ID, highres_name)
}
} else {
mediaURL.Width = photoDimensions.Width
mediaURL.Height = photoDimensions.Height
mediaURL.FileSize = fileStats.Size()
if err := tx.Save(&mediaURL).Error; err != nil {
return nil, errors.Wrapf(err, "could not update media url after side car changes (%d, %s)", media.ID, highres_name)
}
}
return mediaURL, nil
}
func generateSaveThumbnailJPEG(tx *gorm.DB, media *models.Media, thumbnail_name string, photoCachePath string, baseImagePath string, mediaURL *models.MediaURL) (*models.MediaURL, error) {
thumbOutputPath := path.Join(photoCachePath, thumbnail_name)
thumbSize, err := media_encoding.EncodeThumbnail(baseImagePath, thumbOutputPath)
if err != nil {
return nil, errors.Wrap(err, "could not create thumbnail cached image")
}
fileStats, err := os.Stat(thumbOutputPath)
if err != nil {
return nil, errors.Wrap(err, "reading file stats of thumbnail photo")
}
if mediaURL == nil {
mediaURL = &models.MediaURL{
MediaID: media.ID,
MediaName: thumbnail_name,
Width: thumbSize.Width,
Height: thumbSize.Height,
Purpose: models.PhotoThumbnail,
ContentType: "image/jpeg",
FileSize: fileStats.Size(),
}
if err := tx.Create(&mediaURL).Error; err != nil {
return nil, errors.Wrapf(err, "could not insert thumbnail media url (%d, %s)", media.ID, thumbnail_name)
}
} else {
mediaURL.Width = thumbSize.Width
mediaURL.Height = thumbSize.Height
mediaURL.FileSize = fileStats.Size()
if err := tx.Save(&mediaURL).Error; err != nil {
return nil, errors.Wrapf(err, "could not update media url after side car changes (%d, %s)", media.ID, thumbnail_name)
}
}
return mediaURL, nil
}

View File

@ -0,0 +1,82 @@
package processing_tasks
import (
"fmt"
"os"
"path"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/media_encoding/media_utils"
"github.com/photoview/photoview/api/utils"
"github.com/pkg/errors"
"gorm.io/gorm"
)
// Higher order function used to check if MediaURL for a given MediaPurpose exists
func makePhotoURLChecker(tx *gorm.DB, mediaID int) func(purpose models.MediaPurpose) (*models.MediaURL, error) {
return func(purpose models.MediaPurpose) (*models.MediaURL, error) {
var mediaURL []*models.MediaURL
result := tx.Where("purpose = ?", purpose).Where("media_id = ?", mediaID).Find(&mediaURL)
if result.Error != nil {
return nil, result.Error
}
if result.RowsAffected > 0 {
return mediaURL[0], nil
}
return nil, nil
}
}
func generateUniqueMediaNamePrefixed(prefix string, mediaPath string, extension string) string {
mediaName := fmt.Sprintf("%s_%s_%s", prefix, path.Base(mediaPath), utils.GenerateToken())
mediaName = models.SanitizeMediaName(mediaName)
mediaName = mediaName + extension
return mediaName
}
func generateUniqueMediaName(mediaPath string) string {
filename := path.Base(mediaPath)
baseName := filename[0 : len(filename)-len(path.Ext(filename))]
baseExt := path.Ext(filename)
mediaName := fmt.Sprintf("%s_%s", baseName, utils.GenerateToken())
mediaName = models.SanitizeMediaName(mediaName) + baseExt
return mediaName
}
func saveOriginalPhotoToDB(tx *gorm.DB, photo *models.Media, imageData *media_encoding.EncodeMediaData, photoDimensions *media_utils.PhotoDimensions) (*models.MediaURL, error) {
originalImageName := generateUniqueMediaName(photo.Path)
contentType, err := imageData.ContentType()
if err != nil {
return nil, err
}
fileStats, err := os.Stat(photo.Path)
if err != nil {
return nil, errors.Wrap(err, "reading file stats of original photo")
}
mediaURL := models.MediaURL{
Media: photo,
MediaName: originalImageName,
Width: photoDimensions.Width,
Height: photoDimensions.Height,
Purpose: models.MediaOriginal,
ContentType: string(*contentType),
FileSize: fileStats.Size(),
}
if err := tx.Create(&mediaURL).Error; err != nil {
return nil, errors.Wrapf(err, "inserting original photo url: %d, %s", photo.ID, photo.Title)
}
return &mediaURL, nil
}

View File

@ -0,0 +1,159 @@
package processing_tasks
import (
"crypto/md5"
"encoding/hex"
"fmt"
"io"
"log"
"os"
"path"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/scanner/scanner_utils"
"github.com/pkg/errors"
)
type SidecarTask struct {
scanner_task.ScannerTaskBase
}
func (t SidecarTask) AfterMediaFound(ctx scanner_task.TaskContext, media *models.Media, newMedia bool) error {
if media.Type != models.MediaTypePhoto || !newMedia {
return nil
}
mediaType, err := ctx.GetCache().GetMediaType(media.Path)
if err != nil {
return errors.Wrap(err, "scan for sidecar file")
}
if !mediaType.IsRaw() {
return nil
}
var sideCarPath *string = nil
var sideCarHash *string = nil
sideCarPath = scanForSideCarFile(media.Path)
if sideCarPath != nil {
sideCarHash = hashSideCarFile(sideCarPath)
}
// Add sidecar data to media
media.SideCarPath = sideCarPath
media.SideCarHash = sideCarHash
if err := ctx.GetDB().Save(media).Error; err != nil {
return errors.Wrapf(err, "update media sidecar info (%s)", *sideCarPath)
}
return nil
}
func (t SidecarTask) ProcessMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData, mediaCachePath string) (updatedURLs []*models.MediaURL, err error) {
mediaType, err := mediaData.ContentType()
if err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "sidecar task, process media")
}
if !mediaType.IsRaw() {
return []*models.MediaURL{}, nil
}
photo := mediaData.Media
sideCarFileHasChanged := false
var currentFileHash *string
currentSideCarPath := scanForSideCarFile(photo.Path)
if currentSideCarPath != nil {
currentFileHash = hashSideCarFile(currentSideCarPath)
if photo.SideCarHash == nil || *photo.SideCarHash != *currentFileHash {
sideCarFileHasChanged = true
}
} else if photo.SideCarPath != nil { // sidecar has been deleted since last scan
sideCarFileHasChanged = true
}
if !sideCarFileHasChanged {
return []*models.MediaURL{}, nil
}
fmt.Printf("Detected changed sidecar file for %s recreating JPG's to reflect changes\n", photo.Path)
highResURL, err := photo.GetHighRes()
if err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "sidecar task, get high-res media_url")
}
thumbURL, err := photo.GetThumbnail()
if err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "sidecar task, get high-res media_url")
}
// update high res image may be cropped so dimentions and file size can change
baseImagePath := path.Join(mediaCachePath, highResURL.MediaName) // update base image path for thumbnail
tempHighResPath := baseImagePath + ".hold"
os.Rename(baseImagePath, tempHighResPath)
updatedHighRes, err := generateSaveHighResJPEG(ctx.GetDB(), photo, mediaData, highResURL.MediaName, baseImagePath, highResURL)
if err != nil {
os.Rename(tempHighResPath, baseImagePath)
return []*models.MediaURL{}, errors.Wrap(err, "sidecar task, recreating high-res cached image")
}
os.Remove(tempHighResPath)
// update thumbnail image may be cropped so dimentions and file size can change
thumbPath := path.Join(mediaCachePath, thumbURL.MediaName)
tempThumbPath := thumbPath + ".hold" // hold onto the original image incase for some reason we fail to recreate one with the new settings
os.Rename(thumbPath, tempThumbPath)
updatedThumbnail, err := generateSaveThumbnailJPEG(ctx.GetDB(), photo, thumbURL.MediaName, mediaCachePath, baseImagePath, thumbURL)
if err != nil {
os.Rename(tempThumbPath, thumbPath)
return []*models.MediaURL{}, errors.Wrap(err, "recreating thumbnail cached image")
}
os.Remove(tempThumbPath)
photo.SideCarHash = currentFileHash
photo.SideCarPath = currentSideCarPath
// save new side car hash
if err := ctx.GetDB().Save(&photo).Error; err != nil {
return []*models.MediaURL{}, errors.Wrapf(err, "could not update side car hash for media: %s", photo.Path)
}
return []*models.MediaURL{
updatedThumbnail,
updatedHighRes,
}, nil
}
func scanForSideCarFile(path string) *string {
testPath := path + ".xmp"
if scanner_utils.FileExists(testPath) {
return &testPath
}
return nil
}
func hashSideCarFile(path *string) *string {
if path == nil {
return nil
}
f, err := os.Open(*path)
if err != nil {
log.Printf("ERROR: %s", err)
}
defer f.Close()
h := md5.New()
if _, err := io.Copy(h, f); err != nil {
log.Printf("ERROR: %s", err)
}
hash := hex.EncodeToString(h.Sum(nil))
return &hash
}

View File

@ -0,0 +1,143 @@
package scanner_tasks
import (
"io/fs"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/scanner/scanner_tasks/cleanup_tasks"
"github.com/photoview/photoview/api/scanner/scanner_tasks/processing_tasks"
)
var allTasks []scanner_task.ScannerTask = []scanner_task.ScannerTask{
NotificationTask{},
IgnorefileTask{},
processing_tasks.CounterpartFilesTask{},
processing_tasks.SidecarTask{},
processing_tasks.ProcessPhotoTask{},
processing_tasks.ProcessVideoTask{},
FaceDetectionTask{},
ExifTask{},
VideoMetadataTask{},
cleanup_tasks.MediaCleanupTask{},
}
type scannerTasks struct {
scanner_task.ScannerTaskBase
}
var Tasks scannerTasks = scannerTasks{}
func simpleCombinedTasks(ctx scanner_task.TaskContext, doTask func(ctx scanner_task.TaskContext, task scanner_task.ScannerTask) error) error {
for _, task := range allTasks {
select {
case <-ctx.Done():
return ctx.Err()
default:
}
err := doTask(ctx, task)
if err != nil {
return err
}
}
return nil
}
func (t scannerTasks) BeforeScanAlbum(ctx scanner_task.TaskContext) (scanner_task.TaskContext, error) {
for _, task := range allTasks {
var err error
ctx, err = task.BeforeScanAlbum(ctx)
if err != nil {
return ctx, err
}
select {
case <-ctx.Done():
return ctx, ctx.Err()
default:
}
}
return ctx, nil
}
func (t scannerTasks) MediaFound(ctx scanner_task.TaskContext, fileInfo fs.FileInfo, mediaPath string) (bool, error) {
for _, task := range allTasks {
select {
case <-ctx.Done():
return false, ctx.Err()
default:
}
skip, err := task.MediaFound(ctx, fileInfo, mediaPath)
if err != nil {
return false, err
}
if skip {
return true, nil
}
}
return false, nil
}
func (t scannerTasks) AfterScanAlbum(ctx scanner_task.TaskContext, changedMedia []*models.Media, albumMedia []*models.Media) error {
return simpleCombinedTasks(ctx, func(ctx scanner_task.TaskContext, task scanner_task.ScannerTask) error {
return task.AfterScanAlbum(ctx, changedMedia, albumMedia)
})
}
func (t scannerTasks) AfterMediaFound(ctx scanner_task.TaskContext, media *models.Media, newMedia bool) error {
return simpleCombinedTasks(ctx, func(ctx scanner_task.TaskContext, task scanner_task.ScannerTask) error {
return task.AfterMediaFound(ctx, media, newMedia)
})
}
func (t scannerTasks) BeforeProcessMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData) (scanner_task.TaskContext, error) {
for _, task := range allTasks {
select {
case <-ctx.Done():
return ctx, ctx.Err()
default:
}
var err error
ctx, err = task.BeforeProcessMedia(ctx, mediaData)
if err != nil {
return ctx, err
}
}
return ctx, nil
}
func (t scannerTasks) ProcessMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData, mediaCachePath string) ([]*models.MediaURL, error) {
allNewMedia := make([]*models.MediaURL, 0)
for _, task := range allTasks {
select {
case <-ctx.Done():
return nil, ctx.Err()
default:
}
newMedia, err := task.ProcessMedia(ctx, mediaData, mediaCachePath)
if err != nil {
return []*models.MediaURL{}, err
}
allNewMedia = append(allNewMedia, newMedia...)
}
return allNewMedia, nil
}
func (t scannerTasks) AfterProcessMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData, updatedURLs []*models.MediaURL, mediaIndex int, mediaTotal int) error {
return simpleCombinedTasks(ctx, func(ctx scanner_task.TaskContext, task scanner_task.ScannerTask) error {
return task.AfterProcessMedia(ctx, mediaData, updatedURLs, mediaIndex, mediaTotal)
})
}

View File

@ -1,18 +1,39 @@
package scanner
package scanner_tasks
import (
"fmt"
"log"
"strconv"
"strings"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/scanner/scanner_tasks/processing_tasks"
"github.com/pkg/errors"
"gorm.io/gorm"
)
type VideoMetadataTask struct {
scanner_task.ScannerTaskBase
}
func (t VideoMetadataTask) AfterMediaFound(ctx scanner_task.TaskContext, media *models.Media, newMedia bool) error {
if !newMedia || media.Type != models.MediaTypeVideo {
return nil
}
err := ScanVideoMetadata(ctx.GetDB(), media)
if err != nil {
log.Printf("WARN: ScanVideoMetadata for %s failed: %s\n", media.Title, err)
}
return nil
}
func ScanVideoMetadata(tx *gorm.DB, video *models.Media) error {
data, err := readVideoMetadata(video.Path)
data, err := processing_tasks.ReadVideoMetadata(video.Path)
if err != nil {
return errors.Wrapf(err, "scan video metadata failed (%s)", video.Title)
}

View File

@ -10,6 +10,7 @@ import (
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/scanner_cache"
"github.com/photoview/photoview/api/scanner/scanner_tasks/cleanup_tasks"
"github.com/photoview/photoview/api/scanner/scanner_utils"
"github.com/photoview/photoview/api/utils"
"github.com/pkg/errors"
@ -42,7 +43,7 @@ func getPhotoviewIgnore(ignorePath string) ([]string, error) {
return photoviewIgnore, scanner.Err()
}
func findAlbumsForUser(db *gorm.DB, user *models.User, album_cache *scanner_cache.AlbumScannerCache) ([]*models.Album, []error) {
func FindAlbumsForUser(db *gorm.DB, user *models.User, album_cache *scanner_cache.AlbumScannerCache) ([]*models.Album, []error) {
if err := user.FillAlbums(db); err != nil {
return nil, []error{err}
@ -215,7 +216,7 @@ func findAlbumsForUser(db *gorm.DB, user *models.User, album_cache *scanner_cach
}
}
deleteErrors := deleteOldUserAlbums(db, userAlbums, user)
deleteErrors := cleanup_tasks.DeleteOldUserAlbums(db, userAlbums, user)
scanErrors = append(scanErrors, deleteErrors...)
return userAlbums, scanErrors

View File

@ -15,10 +15,11 @@ import (
"github.com/photoview/photoview/api/graphql/auth"
graphql_endpoint "github.com/photoview/photoview/api/graphql/endpoint"
"github.com/photoview/photoview/api/routes"
"github.com/photoview/photoview/api/scanner"
"github.com/photoview/photoview/api/scanner/exif"
"github.com/photoview/photoview/api/scanner/face_detection"
"github.com/photoview/photoview/api/scanner/media_encoding/executable_worker"
"github.com/photoview/photoview/api/scanner/periodic_scanner"
"github.com/photoview/photoview/api/scanner/scanner_queue"
"github.com/photoview/photoview/api/server"
"github.com/photoview/photoview/api/utils"
@ -45,11 +46,11 @@ func main() {
log.Panicf("Could not migrate database: %s\n", err)
}
if err := scanner.InitializeScannerQueue(db); err != nil {
if err := scanner_queue.InitializeScannerQueue(db); err != nil {
log.Panicf("Could not initialize scanner queue: %s\n", err)
}
if err := scanner.InitializePeriodicScanner(db); err != nil {
if err := periodic_scanner.InitializePeriodicScanner(db); err != nil {
log.Panicf("Could not initialize periodic scanner: %s", err)
}

View File

@ -4,33 +4,33 @@ import (
"testing"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner"
"github.com/photoview/photoview/api/scanner/scanner_queue"
"github.com/stretchr/testify/assert"
"gorm.io/gorm"
)
func RunScannerOnUser(t *testing.T, db *gorm.DB, user *models.User) {
if !assert.NoError(t, scanner.InitializeScannerQueue(db)) {
if !assert.NoError(t, scanner_queue.InitializeScannerQueue(db)) {
return
}
if !assert.NoError(t, scanner.AddUserToQueue(user)) {
if !assert.NoError(t, scanner_queue.AddUserToQueue(user)) {
return
}
// wait for all jobs to finish
scanner.CloseScannerQueue()
scanner_queue.CloseScannerQueue()
}
func RunScannerAll(t *testing.T, db *gorm.DB) {
if !assert.NoError(t, scanner.InitializeScannerQueue(db)) {
if !assert.NoError(t, scanner_queue.InitializeScannerQueue(db)) {
return
}
if !assert.NoError(t, scanner.AddAllToQueue()) {
if !assert.NoError(t, scanner_queue.AddAllToQueue()) {
return
}
// wait for all jobs to finish
scanner.CloseScannerQueue()
scanner_queue.CloseScannerQueue()
}

58
api/utils/media_cache.go Normal file
View File

@ -0,0 +1,58 @@
package utils
import (
"os"
"path"
"strconv"
"github.com/pkg/errors"
)
// CachePathForMedia is a low-level implementation for Media.CachePath()
func CachePathForMedia(albumID int, mediaID int) (string, error) {
// Make root cache dir if not exists
if _, err := os.Stat(MediaCachePath()); os.IsNotExist(err) {
if err := os.Mkdir(MediaCachePath(), os.ModePerm); err != nil {
return "", errors.Wrap(err, "could not make root image cache directory")
}
}
// Make album cache dir if not exists
albumCachePath := path.Join(MediaCachePath(), strconv.Itoa(int(albumID)))
if _, err := os.Stat(albumCachePath); os.IsNotExist(err) {
if err := os.Mkdir(albumCachePath, os.ModePerm); err != nil {
return "", errors.Wrap(err, "could not make album image cache directory")
}
}
// Make photo cache dir if not exists
photoCachePath := path.Join(albumCachePath, strconv.Itoa(int(mediaID)))
if _, err := os.Stat(photoCachePath); os.IsNotExist(err) {
if err := os.Mkdir(photoCachePath, os.ModePerm); err != nil {
return "", errors.Wrap(err, "could not make photo image cache directory")
}
}
return photoCachePath, nil
}
var test_cache_path string = ""
func ConfigureTestCache(tmp_dir string) {
test_cache_path = tmp_dir
}
// MediaCachePath returns the path for where the media cache is located on the file system
func MediaCachePath() string {
if test_cache_path != "" {
return test_cache_path
}
photoCache := EnvMediaCachePath.GetValue()
if photoCache == "" {
photoCache = "./media_cache"
}
return photoCache
}

View File

@ -47,26 +47,6 @@ func HandleError(message string, err error) PhotoviewError {
}
}
var test_cache_path string = ""
func ConfigureTestCache(tmp_dir string) {
test_cache_path = tmp_dir
}
// MediaCachePath returns the path for where the media cache is located on the file system
func MediaCachePath() string {
if test_cache_path != "" {
return test_cache_path
}
photoCache := EnvMediaCachePath.GetValue()
if photoCache == "" {
photoCache = "./media_cache"
}
return photoCache
}
var test_face_recognition_models_path string = ""
func ConfigureTestFaceRecognitionModelsPath(path string) {

View File

@ -43,24 +43,24 @@ module.exports = {
},
// parser: 'babel-eslint',
overrides: [
Object.assign(require('eslint-plugin-jest').configs.recommended, {
files: ['**/*.test.js', '**/*.test.ts', '**/*.test.tsx'],
env: { jest: true },
plugins: ['jest', 'jest-dom'],
rules: Object.assign(
require('eslint-plugin-jest').configs.recommended.rules,
{
'no-import-assign': 'off',
'react/prop-types': 'off',
'jest/valid-title': 'off',
}
),
settings: {
jest: {
version: 26,
},
},
}),
// Object.assign(require('eslint-plugin-jest').configs.recommended, {
// files: ['**/*.test.js', '**/*.test.ts', '**/*.test.tsx'],
// env: { jest: true },
// plugins: ['jest', 'jest-dom'],
// rules: Object.assign(
// require('eslint-plugin-jest').configs.recommended.rules,
// {
// 'no-import-assign': 'off',
// 'react/prop-types': 'off',
// 'jest/valid-title': 'off',
// }
// ),
// settings: {
// jest: {
// version: 26,
// },
// },
// }),
{
files: ['**/*.js'],
rules: {

View File

@ -1,7 +0,0 @@
module.exports = {
style: {
postcss: {
plugins: [require('tailwindcss'), require('autoprefixer')],
},
},
}

22
ui/index.html Normal file
View File

@ -0,0 +1,22 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="icon" href="/photoview-logo.svg" type="image/svg+xml" />
<link rel="apple-touch-icon" href="/logo192.png" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="theme-color" content="#000000" />
<link rel="manifest" href="/manifest.json" />
<meta name="apple-mobile-web-app-title" content="Photoview" />
<meta name="apple-mobile-web-app-capable" content="yes" />
<meta name="apple-mobile-web-app-status-bar-style" content="white" />
</head>
<body>
<noscript>You need to enable JavaScript to run Photoview.</noscript>
<div id="root"></div>
<script type="module" src="/src/index.tsx"></script>
</body>
</html>

36794
ui/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -8,81 +8,74 @@
},
"license": "GPL-3.0",
"description": "UI app for Photoview",
"scripts": {
"start": "vite",
"build": "vite build",
"lint": "eslint ./src --max-warnings 0 --cache --config .eslintrc.js",
"test": "vitest",
"test:ci": "CI=true vitest --reporter verbose --run --coverage",
"genSchemaTypes": "apollo client:codegen --target=typescript --globalTypesFile=src/__generated__/globalTypes.ts && prettier --write */**/__generated__/*.ts",
"extractTranslations": "i18next -c i18next-parser.config.js",
"prepare": "(cd .. && ./ui/node_modules/.bin/husky install)"
},
"dependencies": {
"@apollo/client": "^3.5.8",
"@babel/preset-typescript": "^7.16.7",
"@craco/craco": "^6.4.3",
"@headlessui/react": "^1.4.3",
"@react-aria/focus": "^3.5.0",
"@rollup/plugin-babel": "^5.3.0",
"@apollo/client": "^3.6.9",
"@babel/preset-typescript": "^7.18.6",
"@headlessui/react": "^1.6.6",
"@types/geojson": "^7946.0.8",
"@types/jest": "^27.4.0",
"@types/mapbox-gl": "^2.6.0",
"@types/react": "^17.0.38",
"@types/react-dom": "^17.0.11",
"@types/jest": "^28.1.4",
"@types/mapbox-gl": "^2.7.3",
"@types/react": "^18.0.15",
"@types/react-dom": "^18.0.6",
"@types/react-helmet": "^6.1.5",
"@types/react-router-dom": "^5.3.3",
"@types/styled-components": "^5.1.21",
"@types/styled-components": "^5.1.25",
"@types/url-join": "^4.0.1",
"autoprefixer": "^9.8.6",
"babel-plugin-graphql-tag": "^3.3.0",
"blurhash": "^1.1.4",
"@vitejs/plugin-react": "^1.3.2",
"autoprefixer": "^10.4.7",
"blurhash": "^1.1.5",
"classnames": "^2.3.1",
"connect-history-api-fallback": "^1.6.0",
"connect-history-api-fallback": "^2.0.0",
"copy-to-clipboard": "^3.3.1",
"eslint-config-prettier": "^8.3.0",
"eslint-plugin-jest-dom": "^4.0.1",
"fs-extra": "^10.0.0",
"i18next": "^21.6.10",
"mapbox-gl": "^2.6.1",
"postcss": "^7.0.36",
"prettier": "^2.5.1",
"prop-types": "^15.8.1",
"react": "^17.0.2",
"i18next": "^21.8.13",
"mapbox-gl": "^2.9.1",
"postcss": "^8.4.14",
"prettier": "^2.7.1",
"react": "^18.2.0",
"react-blurhash": "^0.1.3",
"react-dom": "^17.0.2",
"react-dom": "^18.2.0",
"react-helmet": "^6.1.0",
"react-hook-form": "^7.25.3",
"react-i18next": "^11.15.3",
"react-router-dom": "^6.2.1",
"react-router-prop-types": "^1.0.5",
"react-scripts": "^4.0.3",
"react-spring": "^8.0.27",
"react-test-renderer": "^17.0.2",
"styled-components": "^5.3.3",
"react-hook-form": "^7.33.1",
"react-i18next": "^11.18.0",
"react-router-dom": "^6.3.0",
"react-scripts": "^5.0.1",
"react-test-renderer": "^18.2.0",
"styled-components": "^5.3.5",
"subscriptions-transport-ws": "^0.11.0",
"tailwind-override": "^0.6.1",
"tailwindcss": "npm:@tailwindcss/postcss7-compat@^2.2.17",
"typescript": "^4.5.5",
"url-join": "^4.0.1"
},
"scripts": {
"start": "BROWSER=none PORT=1234 craco start",
"build": "craco build",
"test": "npm run lint && npm run jest -- --watchAll=false",
"test:ci": "npm run lint && npm run jest:ci",
"lint": "npm run lint:types & npm run lint:eslint",
"lint:eslint": "eslint ./src --max-warnings 0 --cache --config .eslintrc.js",
"lint:types": "tsc --noemit",
"jest": "craco test --setupFilesAfterEnv ./testing/setupTests.ts",
"jest:ci": "CI=true craco test --setupFilesAfterEnv ./testing/setupTests.ts --verbose --ci --coverage",
"genSchemaTypes": "apollo client:codegen --target=typescript --globalTypesFile=src/__generated__/globalTypes.ts && prettier --write */**/__generated__/*.ts",
"extractTranslations": "i18next -c i18next-parser.config.js",
"prepare": "(cd .. && npx husky install)"
"typescript": "^4.7.4",
"url-join": "^5.0.0",
"vite": "^2.9.13",
"vite-plugin-svgr": "^2.2.0"
},
"devDependencies": {
"@testing-library/jest-dom": "^5.16.1",
"@testing-library/react": "^12.1.2",
"@testing-library/user-event": "^13.5.0",
"apollo": "2.33.9",
"apollo-language-server": "1.26.7",
"husky": "^7.0.4",
"i18next-parser": "^5.4.0",
"lint-staged": "^12.3.2",
"tsc-files": "1.1.2"
"@testing-library/jest-dom": "^5.16.4",
"@testing-library/react": "^13.3.0",
"@testing-library/user-event": "^14.2.1",
"@vitest/ui": "^0.17.1",
"apollo": "2.34.0",
"apollo-language-server": "1.26.9",
"c8": "^7.11.3",
"eslint": "^8.19.0",
"eslint-config-prettier": "^8.5.0",
"husky": "^8.0.1",
"i18next-parser": "^6.5.0",
"lint-staged": "^13.0.3",
"vitest": "^0.17.1"
},
"overrides": {
"graphql": "^15.0.0"
"graphql": "^15.5.0"
},
"prettier": {
"trailingComma": "es5",
@ -93,8 +86,7 @@
},
"lint-staged": {
"*.{ts,tsx,js,json,css,md,graphql}": "prettier --write",
"*.{js,ts,tsx}": "eslint --cache --fix --max-warnings 0",
"*.{ts,tsx}": "tsc-files --noEmit"
"*.{js,ts,tsx}": "eslint --cache --fix --max-warnings 0"
},
"browserslist": {
"production": [

6
ui/postcss.config.js Normal file
View File

@ -0,0 +1,6 @@
module.exports = {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
}

View File

@ -1,42 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="icon" href="%PUBLIC_URL%/photoview-logo.svg" type="image/svg+xml" />
<link rel="apple-touch-icon" href="%PUBLIC_URL%/logo192.png" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="theme-color" content="#000000" />
<!--
manifest.json provides metadata used when your web app is installed on a
user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/
-->
<link rel="manifest" href="%PUBLIC_URL%/manifest.json" />
<meta name="apple-mobile-web-app-title" content="Photoview" />
<meta name="apple-mobile-web-app-capable" content="yes" />
<meta name="apple-mobile-web-app-status-bar-style" content="white" />
<!--
Notice the use of %PUBLIC_URL% in the tags above.
It will be replaced with the URL of the `public` folder during the build.
Only files inside the `public` folder can be referenced from the HTML.
Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will
work correctly both with client-side routing and a non-root public URL.
Learn how to configure a non-root public URL by running `npm run build`.
-->
</head>
<body>
<noscript>You need to enable JavaScript to run Photoview.</noscript>
<div id="root"></div>
<!--
This HTML file is a template.
If you open it directly in the browser, you will see an empty page.
You can add webfonts, meta tags, or analytics to this file.
The build step will place the bundled scripts into the <body> tag.
To begin the development, run `npm start` or `yarn start`.
To create a production bundle, use `npm run build` or `yarn build`.
-->
</body>
</html>

View File

@ -4,7 +4,7 @@ import React from 'react'
import { MemoryRouter, Route, Routes } from 'react-router-dom'
import AlbumPage from './AlbumPage'
jest.mock('../../hooks/useScrollPagination')
vi.mock('../../hooks/useScrollPagination')
test('AlbumPage renders', () => {
render(

View File

@ -100,7 +100,7 @@ function AlbumPage() {
})
const toggleFavorites = useCallback(
onlyFavorites => {
(onlyFavorites: boolean) => {
if (
(refetchNeededAll && !onlyFavorites) ||
(refetchNeededFavorites && onlyFavorites)

View File

@ -7,11 +7,9 @@ import * as authentication from '../../helpers/authentication'
import InitialSetupPage from './InitialSetupPage'
import { mockInitialSetupGraphql } from './loginTestHelpers'
jest.mock('../../helpers/authentication.ts')
vi.mock('../../helpers/authentication.ts')
const authToken = authentication.authToken as jest.Mock<
ReturnType<typeof authentication.authToken>
>
const authToken = authentication.authToken // as vi.Mock<ReturnType<typeof authentication.authToken>>
describe('Initial setup page', () => {
test('Render initial setup form', async () => {

View File

@ -7,11 +7,9 @@ import { createMemoryHistory } from 'history'
import { MockedProvider } from '@apollo/client/testing'
import { mockInitialSetupGraphql } from './loginTestHelpers'
jest.mock('../../helpers/authentication.ts')
vi.mock('../../helpers/authentication.ts')
const authToken = authentication.authToken as jest.Mock<
ReturnType<typeof authentication.authToken>
>
const authToken = authentication.authToken // as vi.Mock<ReturnType<typeof authentication.authToken>>
describe('Login page redirects', () => {
test('Auth token redirect', async () => {

View File

@ -29,7 +29,7 @@ const LogoHeader = () => {
<div className="flex justify-center flex-col mb-14 mt-20">
<img
className="h-24"
src={process.env.PUBLIC_URL + '/photoview-logo.svg'}
src={import.meta.env.BASE_URL + 'photoview-logo.svg'}
alt="photoview logo"
/>
<h1 className="text-3xl text-center mt-4">

View File

@ -11,7 +11,7 @@ import { MockedProvider } from '@apollo/client/testing'
import { MemoryRouter } from 'react-router'
import { myFaces_myFaceGroups } from './__generated__/myFaces'
jest.mock('../../hooks/useScrollPagination')
vi.mock('../../hooks/useScrollPagination')
describe('PeoplePage component', () => {
const graphqlMocks = [
@ -142,7 +142,7 @@ describe('FaceDetails component', () => {
<MockedProvider mocks={[]} addTypename={false}>
<FaceDetails
editLabel={false}
setEditLabel={jest.fn()}
setEditLabel={vi.fn()}
group={emptyFaceGroup}
/>
</MockedProvider>
@ -161,7 +161,7 @@ describe('FaceDetails component', () => {
<MockedProvider mocks={[]} addTypename={false}>
<FaceDetails
editLabel={false}
setEditLabel={jest.fn()}
setEditLabel={vi.fn()}
group={labeledFaceGroup}
/>
</MockedProvider>
@ -181,7 +181,7 @@ describe('FaceDetails component', () => {
label: 'John Doe',
},
},
newData: jest.fn(() => ({
newData: vi.fn(() => ({
data: {
setFaceGroupLabel: {
__typename: 'FaceGroup',

View File

@ -4,7 +4,7 @@ import { MockedProvider } from '@apollo/client/testing'
import SingleFaceGroup, { SINGLE_FACE_GROUP } from './SingleFaceGroup'
import { MemoryRouter } from 'react-router-dom'
jest.mock('../../../hooks/useScrollPagination')
vi.mock('../../../hooks/useScrollPagination')
test('single face group', async () => {
const graphqlMocks = [
@ -92,9 +92,6 @@ test('single face group', async () => {
)
await waitFor(() => {
// expect(screen.queryByText('Loading more media')).not.toHaveClass('active')
expect(screen.queryByText('Face Group Name')).toBeInTheDocument()
})
expect(screen.getAllByRole('img')).toHaveLength(2)
})
})

View File

@ -33,8 +33,8 @@ const gqlMock = [
]
test('Add user with username and path', async () => {
const userAdded = jest.fn()
const setShow = jest.fn()
const userAdded = vi.fn()
const setShow = vi.fn()
render(
<MockedProvider addTypename={true} mocks={gqlMock}>
@ -62,8 +62,8 @@ test('Add user with username and path', async () => {
})
test('Add user with only username', async () => {
const userAdded = jest.fn()
const setShow = jest.fn()
const userAdded = vi.fn()
const setShow = vi.fn()
render(
<MockedProvider addTypename={true} mocks={gqlMock}>

View File

@ -40,7 +40,7 @@ const ChangePasswordModal = ({
title={t('settings.users.password_reset.title', 'Change password')}
description={
<Trans t={t} i18nKey="settings.users.password_reset.description">
Change password for <b>{{ username: user.username }}</b>
Change password for <b>{user.username}</b>
</Trans>
}
actions={[

View File

@ -7,10 +7,12 @@ import {
SectionTitle,
} from './SettingsPage'
const VERSION = process.env.REACT_APP_BUILD_VERSION ?? 'undefined'
const BUILD_DATE = process.env.REACT_APP_BUILD_DATE ?? 'undefined'
const VERSION = import.meta.env.REACT_APP_BUILD_VERSION ?? 'undefined'
const BUILD_DATE = import.meta.env.REACT_APP_BUILD_DATE ?? 'undefined'
const COMMIT_SHA = process.env.REACT_APP_BUILD_COMMIT_SHA as string | undefined
const COMMIT_SHA = import.meta.env.REACT_APP_BUILD_COMMIT_SHA as
| string
| undefined
let commitLink: ReactElement
if (COMMIT_SHA) {

View File

@ -35,7 +35,7 @@ const PasswordProtectedShare = ({
'share_page.wrong_password',
'Wrong password, please try again.'
)
} else if (errors.password?.type === 'required') {
} else if (errors.password) {
errorMessage = t(
'share_page.protected_share.password_required_error',
'Password is required'

View File

@ -17,7 +17,7 @@ import {
import { SIDEBAR_DOWNLOAD_QUERY } from '../../components/sidebar/SidebarDownloadMedia'
import { SHARE_ALBUM_QUERY } from './AlbumSharePage'
jest.mock('../../hooks/useScrollPagination')
vi.mock('../../hooks/useScrollPagination')
describe('load correct share page, based on graphql query', () => {
const token = 'TOKEN123'
@ -96,7 +96,7 @@ describe('load correct share page, based on graphql query', () => {
>
<MemoryRouter initialEntries={historyMock}>
<Routes>
<Route path="/share/:token" element={<TokenRoute />} />
<Route path="/share/:token/*" element={<TokenRoute />} />
</Routes>
</MemoryRouter>
</MockedProvider>
@ -173,7 +173,7 @@ describe('load correct share page, based on graphql query', () => {
>
<MemoryRouter initialEntries={historyMock}>
<Routes>
<Route path="/share/:token" element={<TokenRoute />} />
<Route path="/share/:token/*" element={<TokenRoute />} />
</Routes>
</MemoryRouter>
</MockedProvider>

View File

@ -17,8 +17,8 @@ import { MessageState } from './components/messages/Messages'
import { Message } from './components/messages/SubscriptionsHook'
import { NotificationType } from './__generated__/globalTypes'
export const API_ENDPOINT = process.env.REACT_APP_API_ENDPOINT
? (process.env.REACT_APP_API_ENDPOINT as string)
export const API_ENDPOINT = import.meta.env.REACT_APP_API_ENDPOINT
? (import.meta.env.REACT_APP_API_ENDPOINT as string)
: urlJoin(location.origin, '/api')
export const GRAPHQL_ENDPOINT = urlJoin(API_ENDPOINT, '/graphql')

View File

@ -18,7 +18,7 @@ const Header = () => {
<h1 className="mr-4 lg:mr-8 flex-shrink-0 flex items-center">
<img
className="h-12 lg:h-10"
src={process.env.PUBLIC_URL + '/photoview-logo.svg'}
src={import.meta.env.BASE_URL + 'photoview-logo.svg'}
alt="logo"
/>
<span className="hidden lg:block ml-2 text-2xl font-light">

View File

@ -4,7 +4,7 @@ import Layout from './Layout'
test('Layout component', async () => {
render(
<Layout>
<Layout title="Test title">
<p>layout_content</p>
</Layout>
)

View File

@ -1,5 +1,4 @@
import { gql } from '@apollo/client'
import PropTypes from 'prop-types'
import React, { useContext } from 'react'
import { Helmet } from 'react-helmet'
import Header from '../header/Header'
@ -41,7 +40,6 @@ const Layout = ({ children, title, ...otherProps }: LayoutProps) => {
id="layout-content"
>
{children}
{/* <div className="h-6"></div> */}
</div>
</div>
<Sidebar />
@ -50,9 +48,4 @@ const Layout = ({ children, title, ...otherProps }: LayoutProps) => {
)
}
Layout.propTypes = {
children: PropTypes.any.isRequired,
title: PropTypes.string,
}
export default Layout

View File

@ -7,11 +7,9 @@ import { ADMIN_QUERY } from './Layout'
import { MemoryRouter } from 'react-router-dom'
import MainMenu, { MAPBOX_QUERY } from './MainMenu'
jest.mock('../../helpers/authentication.ts')
vi.mock('../../helpers/authentication.ts')
const authTokenMock = authentication.authToken as jest.MockedFunction<
typeof authentication.authToken
>
const authTokenMock = authentication.authToken // as vi.MockedFunction<typeof authentication.authToken>
afterEach(() => {
authTokenMock.mockClear()

View File

@ -1,5 +1,4 @@
import React, { useState } from 'react'
import { animated, useTransition } from 'react-spring'
import styled from 'styled-components'
import { authToken } from '../../helpers/authentication'
import MessageProgress from './MessageProgress'
@ -83,29 +82,30 @@ const Messages = () => {
}
}
const transitions = useTransition(messages.slice().reverse(), x => x.key, {
from: {
opacity: 0,
height: '0px',
},
enter: {
opacity: 1,
height: `100px`,
},
leave: { opacity: 0, height: '0px' },
// const transitions = useTransition(messages.slice().reverse(), x => x.key, {
// from: {
// opacity: 0,
// height: '0px',
// },
// enter: {
// opacity: 1,
// height: `100px`,
// },
// leave: { opacity: 0, height: '0px' },
// })
const messageElems = messages.map(msg => {
const Elem = getMessageElement(msg)
return (
<div key={msg.key}>
<Elem />
</div>
)
})
return (
<Container>
{transitions.map(({ item, props: style, key }) => {
const MessageElement = getMessageElement(item)
return (
<animated.div key={key} style={style}>
<MessageElement />
</animated.div>
)
})}
{messageElems}
{authToken() && (
<SubscriptionsHook messages={messages} setMessages={setMessages} />
)}

View File

@ -1,5 +1,4 @@
import { notificationSubscription } from './__generated__/notificationSubscription'
import PropTypes from 'prop-types'
import { useEffect } from 'react'
import { useSubscription, gql } from '@apollo/client'
import { authToken } from '../../helpers/authentication'
@ -122,9 +121,4 @@ const SubscriptionsHook = ({
return null
}
SubscriptionsHook.propTypes = {
messages: PropTypes.array.isRequired,
setMessages: PropTypes.func.isRequired,
}
export default SubscriptionsHook

View File

@ -5,12 +5,12 @@ import { MediaType } from '../../__generated__/globalTypes'
import PhotoGallery from './PhotoGallery'
import { PhotoGalleryState } from './photoGalleryReducer'
jest.mock('./photoGalleryMutations', () => ({
useMarkFavoriteMutation: () => [jest.fn()],
vi.mock('./photoGalleryMutations', () => ({
useMarkFavoriteMutation: () => [vi.fn()],
}))
test('photo gallery with media', () => {
const dispatchMedia = jest.fn()
const dispatchMedia = vi.fn()
const mediaState: PhotoGalleryState = {
activeIndex: 0,
@ -68,7 +68,7 @@ test('photo gallery with media', () => {
})
describe('photo gallery presenting', () => {
const dispatchMedia = jest.fn()
const dispatchMedia = vi.fn()
test('not presenting', () => {
const mediaStateNoPresent: PhotoGalleryState = {

View File

@ -20,7 +20,7 @@ test('render present image', () => {
expect(screen.getByTestId('present-img-thumbnail')).toHaveAttribute(
'src',
'http://localhost/sample_image.jpg'
'http://localhost:3000/sample_image.jpg'
)
expect(screen.getByTestId('present-img-highres')).toHaveStyle({
display: 'none',
@ -47,10 +47,10 @@ test('render present video', () => {
expect(screen.getByTestId('present-video')).toHaveAttribute(
'poster',
'http://localhost/sample_video_thumb.jpg'
'http://localhost:3000/sample_video_thumb.jpg'
)
expect(
screen.getByTestId('present-video').querySelector('source')
).toHaveAttribute('src', 'http://localhost/sample_video.mp4')
).toHaveAttribute('src', 'http://localhost:3000/sample_video.mp4')
})

View File

@ -2,11 +2,11 @@ import React from 'react'
import PresentNavigationOverlay from './PresentNavigationOverlay'
import { fireEvent, render, screen, act } from '@testing-library/react'
jest.useFakeTimers('modern')
vi.useFakeTimers()
describe('PresentNavigationOverlay component', () => {
test('simple render', () => {
const dispatchMedia = jest.fn()
const dispatchMedia = vi.fn()
render(<PresentNavigationOverlay dispatchMedia={dispatchMedia} />)
expect(screen.getByLabelText('Previous image')).toBeInTheDocument()
@ -15,7 +15,7 @@ describe('PresentNavigationOverlay component', () => {
})
test('click buttons', () => {
const dispatchMedia = jest.fn()
const dispatchMedia = vi.fn()
render(<PresentNavigationOverlay dispatchMedia={dispatchMedia} />)
expect(dispatchMedia).not.toHaveBeenCalled()
@ -28,7 +28,7 @@ describe('PresentNavigationOverlay component', () => {
})
test('mouse move, show and hide', () => {
const dispatchMedia = jest.fn()
const dispatchMedia = vi.fn()
const { container } = render(
<PresentNavigationOverlay dispatchMedia={dispatchMedia} />
)
@ -39,7 +39,7 @@ describe('PresentNavigationOverlay component', () => {
expect(screen.getByLabelText('Next image')).not.toHaveClass('hide')
act(() => {
jest.advanceTimersByTime(3000)
vi.advanceTimersByTime(3000)
})
expect(screen.getByLabelText('Next image')).toHaveClass('hide')

View File

@ -67,7 +67,7 @@ const PresentView = ({
})
return (
<StyledContainer {...className}>
<StyledContainer className={className}>
<PreventScroll />
<PresentNavigationOverlay
dispatchMedia={dispatchMedia}

View File

@ -7,11 +7,9 @@ import * as authentication from '../../helpers/authentication'
import { MockedProvider } from '@apollo/client/testing'
import { ADMIN_QUERY } from '../layout/Layout'
jest.mock('../../helpers/authentication.ts')
vi.mock('../../helpers/authentication.ts')
const authToken = authentication.authToken as jest.Mock<
ReturnType<typeof authentication.authToken>
>
const authToken = vi.mocked(authentication.authToken)
describe('AuthorizedRoute component', () => {
const AuthorizedComponent = () => <div>authorized content</div>

View File

@ -8,12 +8,14 @@ import {
} from '@testing-library/react'
import { MemoryRouter } from 'react-router-dom'
jest.mock('../../Pages/LoginPage/LoginPage.tsx', () => () => (
vi.mock('../../Pages/LoginPage/LoginPage.tsx', () => () => (
<div>mocked login page</div>
))
describe('routes', () => {
test('unauthorized root path should navigate to login page', async () => {
// vitest does not support this yet
// https://github.com/vitest-dev/vitest/issues/960
test.skip('unauthorized root path should navigate to login page', async () => {
render(
<MemoryRouter initialEntries={['/']}>
<Routes />

View File

@ -7,11 +7,9 @@ import { MemoryRouter } from 'react-router'
import * as authentication from '../../../helpers/authentication'
jest.mock('../../../helpers/authentication.ts')
vi.mock('../../../helpers/authentication.ts')
const authToken = authentication.authToken as jest.Mock<
ReturnType<typeof authentication.authToken>
>
const authToken = vi.mocked(authentication.authToken)
describe('MediaSidebar', () => {
const media: MediaSidebarMedia = {

View File

@ -5,7 +5,7 @@ import { MemoryRouter } from 'react-router-dom'
import TimelineGallery, { MY_TIMELINE_QUERY } from './TimelineGallery'
import { timelineData } from './timelineTestData'
jest.mock('../../hooks/useScrollPagination')
vi.mock('../../hooks/useScrollPagination')
test('timeline with media', async () => {
const graphqlMocks = [

View File

@ -1,5 +1,4 @@
import React, { useRef, useEffect, useReducer } from 'react'
import PropTypes from 'prop-types'
import { useQuery, gql } from '@apollo/client'
import TimelineGroupDate from './TimelineGroupDate'
import PresentView from '../photoGallery/presentView/PresentView'
@ -197,9 +196,4 @@ const TimelineGallery = () => {
)
}
TimelineGallery.propTypes = {
favorites: PropTypes.bool,
setFavorites: PropTypes.func,
}
export default TimelineGallery

View File

@ -1,4 +1,4 @@
export default () => ({
finished: true,
containerElem: jest.fn(),
containerElem: vi.fn(),
})

View File

@ -1,7 +1,7 @@
import 'regenerator-runtime/runtime'
import React from 'react'
import ReactDOM from 'react-dom'
import { createRoot } from 'react-dom/client'
import App from './App'
import client from './apolloClient'
import { ApolloProvider } from '@apollo/client'
@ -26,6 +26,7 @@ const Main = () => (
</ApolloProvider>
)
ReactDOM.render(<Main />, document.getElementById('root'))
const root = createRoot(document.getElementById('root')!)
root.render(<Main />)
serviceWorkerRegistration.register()

View File

@ -21,7 +21,7 @@ export function setupLocalization(): void {
},
react: {
useSuspense: process.env.NODE_ENV == 'production',
useSuspense: import.meta.env.PROD,
},
})
}

View File

@ -1 +0,0 @@
/// <reference types="react-scripts" />

View File

@ -55,7 +55,7 @@ registerRoute(
// Return true to signal that we want to use the handler.
return true
},
createHandlerBoundToURL(process.env.PUBLIC_URL + '/index.html')
createHandlerBoundToURL(import.meta.env.BASE_URL + 'index.html')
)
// An example runtime caching route for requests that aren't handled by the

View File

@ -26,9 +26,9 @@ type Config = {
}
export function register(config?: Config) {
if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) {
if (import.meta.env.PROD && 'serviceWorker' in navigator) {
// The URL constructor is available in all browsers that support SW.
const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href)
const publicUrl = new URL(import.meta.env.BASE_URL, window.location.href)
if (publicUrl.origin !== window.location.origin) {
// Our service worker won't work if PUBLIC_URL is on a different origin
// from what our page is served on. This might happen if a CDN is used to
@ -37,7 +37,7 @@ export function register(config?: Config) {
}
window.addEventListener('load', () => {
const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`
const swUrl = `${import.meta.env.BASE_URL}service-worker.js`
if (isLocalhost) {
// This is running on localhost. Let's check if a service worker still exists or not.

1
ui/src/vite.env.d.ts vendored Normal file
View File

@ -0,0 +1 @@
/// <reference types="vite/client" />

View File

@ -1,5 +1,6 @@
module.exports = {
mode: 'jit',
content: ['./index.html', './src/**/*.{js,ts,jsx,tsx}'],
purge: ['./index.html', './src/**/*.{js,ts,jsx,tsx}'],
darkMode: 'class',
theme: {

View File

@ -5,5 +5,7 @@
import '@testing-library/jest-dom'
import '@testing-library/user-event'
import { setupLocalization } from '../src/localization'
// setup localization to make it easier to select elements by text
require('../src/localization').setupLocalization()
setupLocalization()

View File

@ -1,21 +1,21 @@
{
"compilerOptions": {
"target": "es5",
"lib": ["dom", "dom.iterable", "esnext"],
"allowJs": true,
"target": "ESNext",
"useDefineForClassFields": true,
"lib": ["DOM", "DOM.Iterable", "ESNext"],
"allowJs": false,
"skipLibCheck": true,
"esModuleInterop": true,
"esModuleInterop": false,
"allowSyntheticDefaultImports": true,
"strict": true,
"forceConsistentCasingInFileNames": true,
"noFallthroughCasesInSwitch": true,
"module": "esnext",
"moduleResolution": "node",
"module": "ESNext",
"moduleResolution": "Node",
"resolveJsonModule": true,
"isolatedModules": true,
"noEmit": true,
"jsx": "react-jsx"
"jsx": "react-jsx",
"types": ["vitest/globals", "@testing-library/jest-dom"]
},
"include": ["src"],
"exclude": ["./node_modules", "./build"]
"include": ["src", "vite.config.ts"]
}

25
ui/vite.config.js Normal file
View File

@ -0,0 +1,25 @@
/// <reference types="vitest" />
/// <reference types="vite/client" />
import { defineConfig } from 'vite'
import svgr from 'vite-plugin-svgr'
import react from '@vitejs/plugin-react'
export default defineConfig({
plugins: [react(), svgr()],
envPrefix: ['VITE_', 'REACT_APP_'],
server: {
port: 1234,
},
esbuild: {
logOverride: { 'this-is-undefined-in-esm': 'silent' },
},
test: {
globals: true,
environment: 'jsdom',
setupFiles: './testing/setupTests.ts',
coverage: {
reporter: ['text', 'json', 'html'],
},
},
})