1
Fork 0

Merge branch 'master' into add_swipe_gesture

This commit is contained in:
PJ-Watson 2022-08-12 15:42:18 +10:00 committed by GitHub
commit 4169a2c102
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
173 changed files with 24442 additions and 26791 deletions

View File

@ -19,6 +19,7 @@ jobs:
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
target_platform:
- "linux/amd64"

View File

@ -120,7 +120,7 @@ jobs:
strategy:
matrix:
node-version: [15.x]
node-version: [18.x]
steps:
- uses: actions/checkout@v2

3
.gitignore vendored
View File

@ -1,7 +1,8 @@
# See https://help.github.com/ignore-files/ for more about ignoring files.
cache/
media_cache/
/media_cache/
/api/media_cache/
/photos_path
photoview.db
photoview.db-journal

View File

@ -1,5 +1,5 @@
### Build UI ###
FROM --platform=${BUILDPLATFORM:-linux/amd64} node:15 as ui
FROM --platform=${BUILDPLATFORM:-linux/amd64} node:18 as ui
ARG REACT_APP_API_ENDPOINT
ENV REACT_APP_API_ENDPOINT=${REACT_APP_API_ENDPOINT}
@ -29,7 +29,7 @@ RUN npm ci --omit=dev --ignore-scripts
# Build frontend
COPY ui /app
RUN npm run build -- --public-url $UI_PUBLIC_URL
RUN npm run build -- --base=$UI_PUBLIC_URL
### Build API ###
FROM --platform=${BUILDPLATFORM:-linux/amd64} debian:bookworm AS api
@ -75,7 +75,7 @@ COPY api/data /app/data
RUN apt update \
# Required dependencies
&& apt install -y curl gpg libdlib19 ffmpeg exiftool libheif1
&& apt install -y curl gpg libdlib19.1 ffmpeg exiftool libheif1
# Install Darktable if building for a supported architecture
RUN if [ "${TARGETPLATFORM}" = "linux/amd64" ] || [ "${TARGETPLATFORM}" = "linux/arm64" ]; then \
@ -87,7 +87,7 @@ RUN apt purge -y gpg \
&& apt clean \
&& rm -rf /var/lib/apt/lists/*
COPY --from=ui /app/build /ui
COPY --from=ui /app/dist /ui
COPY --from=api /app/photoview /app/photoview
ENV PHOTOVIEW_LISTEN_IP 127.0.0.1

View File

@ -195,4 +195,10 @@ And the graphql playground at [localhost:4001](http://localhost:4001)
<b>@FKrauss</b>
</a>
</td>
<td>
<a href="https://github.com/jupblb">
<img src="https://avatars.githubusercontent.com/u/3370617?v=4" height="auto" width="100" style="border-radius:50%"><br/>
<b>@jupblb</b>
</a>
</td>
</table>

View File

@ -118,6 +118,15 @@ func migrate_exif_fields_flash(db *gorm.DB) error {
err := db.Transaction(func(tx *gorm.DB) error {
var data_type string
if err := tx.Raw("SELECT data_type FROM information_schema.columns WHERE table_name = 'media_exif' AND column_name = 'flash';").Find(&data_type).Error; err != nil {
return errors.Wrapf(err, "read data_type of column media_exif.flash")
}
if data_type == "bigint" {
return nil
}
if err := tx.Exec("UPDATE media_exif SET flash = NULL WHERE flash = ''").Error; err != nil {
return errors.Wrapf(err, "convert flash attribute empty values to NULL")
}

View File

@ -1,39 +1,54 @@
module github.com/photoview/photoview/api
go 1.16
go 1.18
require (
github.com/99designs/gqlgen v0.16.0
github.com/99designs/gqlgen v0.17.12
github.com/Kagami/go-face v0.0.0-20210630145111-0c14797b4d0e
github.com/agnivade/levenshtein v1.1.1 // indirect
github.com/barasher/go-exiftool v1.7.0
github.com/barasher/go-exiftool v1.8.0
github.com/buckket/go-blurhash v1.1.0
github.com/disintegration/imaging v1.6.2
github.com/felixge/httpsnoop v1.0.2 // indirect
github.com/go-sql-driver/mysql v1.6.0
github.com/gorilla/handlers v1.5.1
github.com/gorilla/mux v1.8.0
github.com/gorilla/websocket v1.4.2
github.com/gorilla/websocket v1.5.0
github.com/h2non/filetype v1.1.3
github.com/hashicorp/golang-lru v0.5.4 // indirect
github.com/jackc/pgx/v4 v4.14.1 // indirect
github.com/joho/godotenv v1.4.0
github.com/mattn/go-sqlite3 v1.14.11 // indirect
github.com/mitchellh/mapstructure v1.4.3 // indirect
github.com/otiai10/copy v1.7.0
github.com/pkg/errors v0.9.1
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06
github.com/stretchr/testify v1.7.0
github.com/stretchr/testify v1.8.0
github.com/strukturag/libheif v1.12.0
github.com/vektah/gqlparser/v2 v2.3.1
github.com/vektah/gqlparser/v2 v2.4.6
github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0
github.com/xor-gate/goexif2 v1.1.0
golang.org/x/crypto v0.0.0-20220128200615-198e4374d7ed
golang.org/x/image v0.0.0-20211028202545-6944b10bf410
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d
golang.org/x/image v0.0.0-20220617043117-41969df76e82
gopkg.in/vansante/go-ffprobe.v2 v2.0.3
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
gorm.io/driver/mysql v1.2.3
gorm.io/driver/postgres v1.2.3
gorm.io/driver/sqlite v1.2.6
gorm.io/gorm v1.22.5
gorm.io/driver/mysql v1.3.4
gorm.io/driver/postgres v1.3.8
gorm.io/driver/sqlite v1.3.5
gorm.io/gorm v1.23.7
)
require (
github.com/agnivade/levenshtein v1.1.1 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/felixge/httpsnoop v1.0.3 // indirect
github.com/hashicorp/golang-lru v0.5.4 // indirect
github.com/jackc/chunkreader/v2 v2.0.1 // indirect
github.com/jackc/pgconn v1.12.1 // indirect
github.com/jackc/pgio v1.0.0 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgproto3/v2 v2.3.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect
github.com/jackc/pgtype v1.11.0 // indirect
github.com/jackc/pgx/v4 v4.16.1 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/mattn/go-sqlite3 v1.14.14 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
golang.org/x/text v0.3.7 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

View File

@ -1,27 +1,26 @@
github.com/99designs/gqlgen v0.16.0 h1:7Qc4Ll3mfN3doAyUWOgtGLcBGu+KDgK48HdkBGLZVFs=
github.com/99designs/gqlgen v0.16.0/go.mod h1:nbeSjFkqphIqpZsYe1ULVz0yfH8hjpJdJIQoX/e0G2I=
github.com/99designs/gqlgen v0.17.12 h1:lH/H5dTYCY5eLNRKXeq22l0wFMavpOnN6v9GAIw+fxY=
github.com/99designs/gqlgen v0.17.12/go.mod h1:w1brbeOdqVyNJI553BGwtwdVcYu1LKeYE1opLWN9RgQ=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/toml v1.1.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
github.com/Kagami/go-face v0.0.0-20210630145111-0c14797b4d0e h1:lqIUFzxaqyYqUn4MhzAvSAh4wIte/iLNcIEWxpT/qbc=
github.com/Kagami/go-face v0.0.0-20210630145111-0c14797b4d0e/go.mod h1:9wdDJkRgo3SGTcFwbQ7elVIQhIr2bbBjecuY7VoqmPU=
github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030IGemrRc=
github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=
github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM=
github.com/agnivade/levenshtein v1.1.0/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo=
github.com/agnivade/levenshtein v1.1.1 h1:QY8M92nrzkmr798gCo3kmMyqXFzdQVpxLlGPRBij0P8=
github.com/agnivade/levenshtein v1.1.1/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q=
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
github.com/barasher/go-exiftool v1.7.0 h1:EOGb5D6TpWXmqsnEjJ0ai6+tIW2gZFwIoS9O/33Nixs=
github.com/barasher/go-exiftool v1.7.0/go.mod h1:F9s/a3uHSM8YniVfwF+sbQUtP8Gmh9nyzigNF+8vsWo=
github.com/barasher/go-exiftool v1.8.0 h1:u8bEi1mhLtpVC5aG/ZJlRS/r+SkK+rcgbZQwcKUb424=
github.com/barasher/go-exiftool v1.8.0/go.mod h1:F9s/a3uHSM8YniVfwF+sbQUtP8Gmh9nyzigNF+8vsWo=
github.com/buckket/go-blurhash v1.1.0 h1:X5M6r0LIvwdvKiUtiNcRL2YlmOfMzYobI3VCKCZc9Do=
github.com/buckket/go-blurhash v1.1.0/go.mod h1:aT2iqo5W9vu9GpyoLErKfTHwgODsZp3bQfXjXJUxNb8=
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@ -32,8 +31,8 @@ github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48/go.mod h1:if7Fbed8
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
github.com/felixge/httpsnoop v1.0.1/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/felixge/httpsnoop v1.0.2 h1:+nS9g82KMXccJ/wp0zyRW9ZBHFETmMGtkk+2CTTrW4o=
github.com/felixge/httpsnoop v1.0.2/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/felixge/httpsnoop v1.0.3 h1:s/nj+GCswXYzN5v2DpNMuMQYe+0DDwt5WVCU6CWBdXk=
github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
@ -41,19 +40,19 @@ github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LB
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/gofrs/uuid v4.0.0+incompatible h1:1SD/1F5pU8p29ybwgQSwpQk+mwdRrXCYuPhW6m+TnJw=
github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/gorilla/handlers v1.5.1 h1:9lRY6j8DEeeBT10CvO9hGW0gmky0BprnvDI5vfhUHH4=
github.com/gorilla/handlers v1.5.1/go.mod h1:t8XrUpc4KVXb7HGyJ4/cEnwQiaxrX/hz1Zv/4g96P1Q=
github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc=
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/h2non/filetype v1.1.3 h1:FKkx9QbD7HR/zjK1Ia5XiBsq9zdLi5Kf3zGyFTAFkGg=
github.com/h2non/filetype v1.1.3/go.mod h1:319b3zT68BvV+WRj7cwy856M2ehB3HqNOt6sy1HndBY=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc=
github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZbe0=
github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo=
github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk=
github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8=
@ -64,8 +63,8 @@ github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsU
github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o=
github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY=
github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI=
github.com/jackc/pgconn v1.10.1 h1:DzdIHIjG1AxGwoEEqS+mGsURyjt4enSmqzACXvVzOT8=
github.com/jackc/pgconn v1.10.1/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI=
github.com/jackc/pgconn v1.12.1 h1:rsDFzIpRk7xT4B8FufgpCCeyjdNpKyghZeSefViE5W8=
github.com/jackc/pgconn v1.12.1/go.mod h1:ZkhRC59Llhrq3oSfrikvwQ5NaxYExr6twkdkMLaKono=
github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE=
github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8=
github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE=
@ -74,7 +73,6 @@ github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65 h1:DadwsjnMwFjfWc9y5W
github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak=
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
github.com/jackc/pgproto3 v1.1.0 h1:FYYE4yRw+AgI8wXIinMlNjBbp/UitDJwfj5LqqewP1A=
github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78=
github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA=
github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg=
@ -82,34 +80,31 @@ github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvW
github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM=
github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
github.com/jackc/pgproto3/v2 v2.2.0 h1:r7JypeP2D3onoQTCxWdTpCtJ4D+qpKr0TxvoyMhZ5ns=
github.com/jackc/pgproto3/v2 v2.2.0/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
github.com/jackc/pgproto3/v2 v2.3.0 h1:brH0pCGBDkBW07HWlN/oSBXrmo3WB0UvZd1pIuDcL8Y=
github.com/jackc/pgproto3/v2 v2.3.0/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b h1:C8S2+VttkHFdOOCXJe+YGfa4vHYwlt4Zx+IVXQ97jYg=
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E=
github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg=
github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc=
github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw=
github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM=
github.com/jackc/pgtype v1.9.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4=
github.com/jackc/pgtype v1.9.1 h1:MJc2s0MFS8C3ok1wQTdQxWuXQcB6+HwAm5x1CzW7mf0=
github.com/jackc/pgtype v1.9.1/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4=
github.com/jackc/pgtype v1.11.0 h1:u4uiGPz/1hryuXzyaBhSk6dnIyyG2683olG2OV+UUgs=
github.com/jackc/pgtype v1.11.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4=
github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y=
github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM=
github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc=
github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs=
github.com/jackc/pgx/v4 v4.14.0/go.mod h1:jT3ibf/A0ZVCp89rtCIN0zCJxcE74ypROmHEZYsG/j8=
github.com/jackc/pgx/v4 v4.14.1 h1:71oo1KAGI6mXhLiTMn6iDFcp3e7+zon/capWjl2OEFU=
github.com/jackc/pgx/v4 v4.14.1/go.mod h1:RgDuE4Z34o7XE92RpLsvFiOEfrAUT0Xt2KxvX73W06M=
github.com/jackc/pgx/v4 v4.16.1 h1:JzTglcal01DrghUqt+PmzWsZx/Yh7SC/CTQmSBMTd0Y=
github.com/jackc/pgx/v4 v4.16.1/go.mod h1:SIhx0D5hoADaiXZVyv+3gSm3LCIIINTVO0PficsvWGQ=
github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jackc/puddle v1.2.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jackc/puddle v1.2.1/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
github.com/jinzhu/now v1.1.2/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/jinzhu/now v1.1.3/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/jinzhu/now v1.1.4 h1:tHnRBy1i5F2Dh8BAFxqFzxKqqvezXrL2OW1TnX+Mlas=
github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/joho/godotenv v1.4.0 h1:3l4+N6zfMWnkbPEXKng2o2/MR5mSwTrBih4ZEkkz1lg=
github.com/joho/godotenv v1.4.0/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
github.com/kevinmbeaulieu/eq-go v1.0.0/go.mod h1:G3S8ajA56gKBZm4UB9AOyoOS37JO3roToPzKNM8dtdM=
@ -128,20 +123,20 @@ github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.10.2 h1:AqzbZs4ZoCBp+GtejcpCpcxM3zlSMx29dXbUSeVtJb8=
github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/logrusorgru/aurora/v3 v3.0.0/go.mod h1:vsR12bk5grlLvLXAYrBsb5Oc/N+LxAlxggSjiwMnCUc=
github.com/matryer/moq v0.2.3/go.mod h1:9RtPYjTnH1bSBIkpvtHkFN7nbWAnO7oRpdJkEIn6UtE=
github.com/matryer/moq v0.2.7/go.mod h1:kITsx543GOENm48TUAQyJ9+SAvFSr7iGQXPoth/VUBk=
github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ=
github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-sqlite3 v1.14.9/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/mattn/go-sqlite3 v1.14.11 h1:gt+cp9c0XGqe9S/wAHTL3n/7MqY+siPWgWJgqdsFrzQ=
github.com/mattn/go-sqlite3 v1.14.11/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/mitchellh/mapstructure v1.2.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/mapstructure v1.4.3 h1:OVowDSCllw/YjdLkam3/sm7wEtOy59d8ndGgCcyj8cs=
github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mattn/go-sqlite3 v1.14.12/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/mattn/go-sqlite3 v1.14.14 h1:qZgc/Rwetq+MtyE18WhzjokPD93dNqLGNT3QJuLvBGw=
github.com/mattn/go-sqlite3 v1.14.14/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/mitchellh/mapstructure v1.3.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/otiai10/copy v1.7.0 h1:hVoPiN+t+7d2nzzwMiDHPSOogsWAStewq3TwU05+clE=
github.com/otiai10/copy v1.7.0/go.mod h1:rmRl6QPdJj6EiUqXQ/4Nn2lLXoNQjFCQbbNrxgc/t3U=
github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE=
@ -158,7 +153,6 @@ github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR
github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU=
github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 h1:OkMGxebDjyw0ULyrTYWeN0UNCCkmCWfjPnIA2W6oviI=
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06/go.mod h1:+ePHsJ1keEjQtpvf9HHw0f4ZeJ0TLRsxhunSI2hYJSs=
@ -168,31 +162,32 @@ github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNX
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ=
github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/strukturag/libheif v1.12.0 h1:Z5V5lCC5xbI59V77b4kGXcHsOohA6tQIvzLGIjhLsfY=
github.com/strukturag/libheif v1.12.0/go.mod h1:E/PNRlmVtrtj9j2AvBZlrO4dsBDu6KfwDZn7X1Ce8Ks=
github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI=
github.com/vektah/gqlparser/v2 v2.2.0/go.mod h1:i3mQIGIrbK2PD1RrCeMTlVbkF2FJ6WkU1KJlJlC+3F4=
github.com/vektah/gqlparser/v2 v2.3.1 h1:blIC0fCxGIr9pVjsc+BVI8XjYUtc2nCFRfnmP7FuFMk=
github.com/vektah/gqlparser/v2 v2.3.1/go.mod h1:i3mQIGIrbK2PD1RrCeMTlVbkF2FJ6WkU1KJlJlC+3F4=
github.com/urfave/cli/v2 v2.8.1/go.mod h1:Z41J9TPoffeoqP0Iza0YbAhGvymRdZAd2uPmZ5JxRdY=
github.com/vektah/gqlparser/v2 v2.4.6 h1:Yjzp66g6oVq93Jihbi0qhGnf/6zIWjcm8H6gA27zstE=
github.com/vektah/gqlparser/v2 v2.4.6/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0=
github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0 h1:3UeQBvD0TFrlVjOeLOBz+CPAI8dnbqNSVwUwRrkp7vQ=
github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0/go.mod h1:IXCdmsXIht47RaVFLEdVnh1t+pgYtTAhQGj73kz+2DM=
github.com/xor-gate/goexif2 v1.1.0 h1:OvTZ5iEvsDhRWFjV5xY3wT7uHFna28nSSP7ucau+cXQ=
github.com/xor-gate/goexif2 v1.1.0/go.mod h1:eRjn3VSkAwpNpxEx/CGmd0zg0JFGL3akrSMxnJ581AY=
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673/go.mod h1:N3UwUGtsrSj3ccvlPHLoLsHnpR27oXr4ZE984MbSER8=
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
@ -215,26 +210,23 @@ golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWP
golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220128200615-198e4374d7ed h1:YoWVYYAfvQ4ddHv3OKmIvX7NCAhFGTj62VP2l2kfBbA=
golang.org/x/crypto v0.0.0-20220128200615-198e4374d7ed/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d h1:sK3txAijHtOK88l68nt020reeT1ZdKLIYetKl95FzVY=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20211028202545-6944b10bf410 h1:hTftEOvwiOq2+O8k2D5/Q7COC7k5Qcrgc2TFURJYnvQ=
golang.org/x/image v0.0.0-20211028202545-6944b10bf410/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM=
golang.org/x/image v0.0.0-20220617043117-41969df76e82 h1:KpZB5pUSBvrHltNEdK/tw0xlPeD13M6M6aGP32gKqiw=
golang.org/x/image v0.0.0-20220617043117-41969df76e82/go.mod h1:doUCurBvlfPMKfmIpRIywoHmhN3VyhnoFDbvIEWF4hY=
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -246,12 +238,12 @@ golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@ -262,7 +254,6 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190125232054-d66bd3c5d5a6/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
@ -271,14 +262,16 @@ golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtn
golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200815165600-90abf76919f3/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
@ -288,20 +281,21 @@ gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:a
gopkg.in/vansante/go-ffprobe.v2 v2.0.3 h1:nmR7yLalb5p5UmXhXUYnrsIkbb8j7h2OYMDoLETCQ5U=
gopkg.in/vansante/go-ffprobe.v2 v2.0.3/go.mod h1:qF0AlAjk7Nqzqf3y333Ly+KxN3cKF2JqA3JT5ZheUGE=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gorm.io/driver/mysql v1.2.3 h1:cZqzlOfg5Kf1VIdLC1D9hT6Cy9BgxhExLj/2tIgUe7Y=
gorm.io/driver/mysql v1.2.3/go.mod h1:qsiz+XcAyMrS6QY+X3M9R6b/lKM1imKmcuK9kac5LTo=
gorm.io/driver/postgres v1.2.3 h1:f4t0TmNMy9gh3TU2PX+EppoA6YsgFnyq8Ojtddb42To=
gorm.io/driver/postgres v1.2.3/go.mod h1:pJV6RgYQPG47aM1f0QeOzFH9HxQc8JcmAgjRCgS0wjs=
gorm.io/driver/sqlite v1.2.6 h1:SStaH/b+280M7C8vXeZLz/zo9cLQmIGwwj3cSj7p6l4=
gorm.io/driver/sqlite v1.2.6/go.mod h1:gyoX0vHiiwi0g49tv+x2E7l8ksauLK0U/gShcdUsjWY=
gorm.io/gorm v1.22.3/go.mod h1:F+OptMscr0P2F2qU97WT1WimdH9GaQPoDW7AYd5i2Y0=
gorm.io/gorm v1.22.4/go.mod h1:1aeVC+pe9ZmvKZban/gW4QPra7PRoTEssyc922qCAkk=
gorm.io/gorm v1.22.5 h1:lYREBgc02Be/5lSCTuysZZDb6ffL2qrat6fg9CFbvXU=
gorm.io/gorm v1.22.5/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gorm.io/driver/mysql v1.3.4 h1:/KoBMgsUHC3bExsekDcmNYaBnfH2WNeFuXqqrqMc98Q=
gorm.io/driver/mysql v1.3.4/go.mod h1:s4Tq0KmD0yhPGHbZEwg1VPlH0vT/GBHJZorPzhcxBUE=
gorm.io/driver/postgres v1.3.8 h1:8bEphSAB69t3odsCR4NDzt581iZEWQuRM27Cg6KgfPY=
gorm.io/driver/postgres v1.3.8/go.mod h1:qB98Aj6AhRO/oyu/jmZsi/YM9g6UzVCjMxO/6frFvcA=
gorm.io/driver/sqlite v1.3.5 h1:VmtQcbtN13YCUy8QNpKBBYklH0LMO7yQcmFGvRIJ/ws=
gorm.io/driver/sqlite v1.3.5/go.mod h1:Sg1/pvnKtbQ7jLXxfZa+jSHvoX8hoZA8cn4xllOMTgE=
gorm.io/gorm v1.23.4/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk=
gorm.io/gorm v1.23.6/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk=
gorm.io/gorm v1.23.7 h1:ww+9Mu5WwHKDSOQZFC4ipu/sgpKMr9EtrJ0uwBqNtB0=
gorm.io/gorm v1.23.7/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk=
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=

File diff suppressed because it is too large Load Diff

View File

@ -113,6 +113,7 @@ const (
LanguageTranslationTraditionalChinese LanguageTranslation = "TraditionalChinese"
LanguageTranslationSimplifiedChinese LanguageTranslation = "SimplifiedChinese"
LanguageTranslationPortuguese LanguageTranslation = "Portuguese"
LanguageTranslationBasque LanguageTranslation = "Basque"
)
var AllLanguageTranslation = []LanguageTranslation{
@ -128,11 +129,12 @@ var AllLanguageTranslation = []LanguageTranslation{
LanguageTranslationTraditionalChinese,
LanguageTranslationSimplifiedChinese,
LanguageTranslationPortuguese,
LanguageTranslationBasque,
}
func (e LanguageTranslation) IsValid() bool {
switch e {
case LanguageTranslationEnglish, LanguageTranslationFrench, LanguageTranslationItalian, LanguageTranslationSwedish, LanguageTranslationDanish, LanguageTranslationSpanish, LanguageTranslationPolish, LanguageTranslationGerman, LanguageTranslationRussian, LanguageTranslationTraditionalChinese, LanguageTranslationSimplifiedChinese, LanguageTranslationPortuguese:
case LanguageTranslationEnglish, LanguageTranslationFrench, LanguageTranslationItalian, LanguageTranslationSwedish, LanguageTranslationDanish, LanguageTranslationSpanish, LanguageTranslationPolish, LanguageTranslationGerman, LanguageTranslationRussian, LanguageTranslationTraditionalChinese, LanguageTranslationSimplifiedChinese, LanguageTranslationPortuguese, LanguageTranslationBasque:
return true
}
return false
@ -249,3 +251,53 @@ func (e *OrderDirection) UnmarshalGQL(v interface{}) error {
func (e OrderDirection) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String()))
}
// Supported downsampling filters for thumbnail generation
type ThumbnailFilter string
const (
ThumbnailFilterNearestNeighbor ThumbnailFilter = "NearestNeighbor"
ThumbnailFilterBox ThumbnailFilter = "Box"
ThumbnailFilterLinear ThumbnailFilter = "Linear"
ThumbnailFilterMitchellNetravali ThumbnailFilter = "MitchellNetravali"
ThumbnailFilterCatmullRom ThumbnailFilter = "CatmullRom"
ThumbnailFilterLanczos ThumbnailFilter = "Lanczos"
)
var AllThumbnailFilter = []ThumbnailFilter{
ThumbnailFilterNearestNeighbor,
ThumbnailFilterBox,
ThumbnailFilterLinear,
ThumbnailFilterMitchellNetravali,
ThumbnailFilterCatmullRom,
ThumbnailFilterLanczos,
}
func (e ThumbnailFilter) IsValid() bool {
switch e {
case ThumbnailFilterNearestNeighbor, ThumbnailFilterBox, ThumbnailFilterLinear, ThumbnailFilterMitchellNetravali, ThumbnailFilterCatmullRom, ThumbnailFilterLanczos:
return true
}
return false
}
func (e ThumbnailFilter) String() string {
return string(e)
}
func (e *ThumbnailFilter) UnmarshalGQL(v interface{}) error {
str, ok := v.(string)
if !ok {
return fmt.Errorf("enums must be strings")
}
*e = ThumbnailFilter(str)
if !e.IsValid() {
return fmt.Errorf("%s is not a valid ThumbnailFilter", str)
}
return nil
}
func (e ThumbnailFilter) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String()))
}

View File

@ -30,9 +30,6 @@ type Media struct {
SideCarHash *string `gorm:"unique"`
Faces []*ImageFace `gorm:"constraint:OnDelete:CASCADE;"`
Blurhash *string `gorm:""`
// Only used internally
CounterpartPath *string `gorm:"-"`
}
func (Media) TableName() string {
@ -65,6 +62,25 @@ func (m *Media) GetThumbnail() (*MediaURL, error) {
return nil, nil
}
func (m *Media) GetHighRes() (*MediaURL, error) {
if len(m.MediaURL) == 0 {
return nil, errors.New("media.MediaURL is empty")
}
for _, url := range m.MediaURL {
if url.Purpose == PhotoHighRes {
url.Media = m
return &url, nil
}
}
return nil, nil
}
func (m *Media) CachePath() (string, error) {
return utils.CachePathForMedia(m.AlbumID, m.ID)
}
type MediaType string
const (

View File

@ -6,6 +6,7 @@ import (
type MediaEXIF struct {
Model
Description *string
Camera *string
Maker *string
Lens *string

View File

@ -10,6 +10,7 @@ type SiteInfo struct {
InitialSetup bool `gorm:"not null"`
PeriodicScanInterval int `gorm:"not null"`
ConcurrentWorkers int `gorm:"not null"`
ThumbnailMethod ThumbnailFilter `gorm:"not null"`
}
func (SiteInfo) TableName() string {
@ -26,6 +27,7 @@ func DefaultSiteInfo(db *gorm.DB) SiteInfo {
InitialSetup: true,
PeriodicScanInterval: 0,
ConcurrentWorkers: defaultConcurrentWorkers,
ThumbnailMethod: ThumbnailFilterNearestNeighbor,
}
}

View File

@ -22,6 +22,7 @@ func TestSiteInfo(t *testing.T) {
site_info.InitialSetup = false
site_info.PeriodicScanInterval = 360
site_info.ConcurrentWorkers = 10
site_info.ThumbnailMethod = models.ThumbnailFilterLanczos
if !assert.NoError(t, db.Session(&gorm.Session{AllowGlobalUpdate: true}).Save(&site_info).Error) {
return
@ -36,6 +37,7 @@ func TestSiteInfo(t *testing.T) {
InitialSetup: false,
PeriodicScanInterval: 360,
ConcurrentWorkers: 10,
ThumbnailMethod: models.ThumbnailFilterLanczos,
}, *site_info)
}

View File

@ -145,8 +145,9 @@ func (r *queryResolver) FaceGroup(ctx context.Context, id int) (*models.FaceGrou
faceGroupQuery := db.
Joins("LEFT JOIN image_faces ON image_faces.face_group_id = face_groups.id").
Joins("LEFT JOIN media ON image_faces.media_id = media.id").
Where("face_groups.id = ?", id).
Where("image_faces.media_id IN (?)", db.Select("media_id").Table("media").Where("media.album_id IN (?)", userAlbumIDs))
Where("media.album_id IN (?)", userAlbumIDs)
var faceGroup models.FaceGroup
if err := faceGroupQuery.Find(&faceGroup).Error; err != nil {

View File

@ -6,13 +6,14 @@ import (
"github.com/photoview/photoview/api/database/drivers"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner"
"github.com/photoview/photoview/api/scanner/periodic_scanner"
"github.com/photoview/photoview/api/scanner/scanner_queue"
"github.com/pkg/errors"
"gorm.io/gorm"
)
func (r *mutationResolver) ScanAll(ctx context.Context) (*models.ScannerResult, error) {
err := scanner.AddAllToQueue()
err := scanner_queue.AddAllToQueue()
if err != nil {
return nil, err
}
@ -32,7 +33,7 @@ func (r *mutationResolver) ScanUser(ctx context.Context, userID int) (*models.Sc
return nil, errors.Wrap(err, "get user from database")
}
scanner.AddUserToQueue(&user)
scanner_queue.AddUserToQueue(&user)
startMessage := "Scanner started"
return &models.ScannerResult{
@ -57,7 +58,7 @@ func (r *mutationResolver) SetPeriodicScanInterval(ctx context.Context, interval
return 0, err
}
scanner.ChangePeriodicScanInterval(time.Duration(siteInfo.PeriodicScanInterval) * time.Second)
periodic_scanner.ChangePeriodicScanInterval(time.Duration(siteInfo.PeriodicScanInterval) * time.Second)
return siteInfo.PeriodicScanInterval, nil
}
@ -81,7 +82,7 @@ func (r *mutationResolver) SetScannerConcurrentWorkers(ctx context.Context, work
return 0, err
}
scanner.ChangeScannerConcurrentWorkers(siteInfo.ConcurrentWorkers)
scanner_queue.ChangeScannerConcurrentWorkers(siteInfo.ConcurrentWorkers)
return siteInfo.ConcurrentWorkers, nil
}

View File

@ -0,0 +1,36 @@
package resolvers
import (
"context"
"github.com/photoview/photoview/api/graphql/models"
// "github.com/pkg/errors"
"gorm.io/gorm"
)
func (r *mutationResolver) SetThumbnailDownsampleMethod(ctx context.Context, method models.ThumbnailFilter) (models.ThumbnailFilter, error) {
db := r.DB(ctx)
// if method > 5 {
// return 0, errors.New("The requested filter is unsupported, defaulting to nearest neighbor")
// }
if err := db.Session(&gorm.Session{AllowGlobalUpdate: true}).Model(&models.SiteInfo{}).Update("thumbnail_method", method).Error; err != nil {
return models.ThumbnailFilterNearestNeighbor, err
}
var siteInfo models.SiteInfo
if err := db.First(&siteInfo).Error; err != nil {
return models.ThumbnailFilterNearestNeighbor, err
}
return siteInfo.ThumbnailMethod, nil
// var langTrans *models.LanguageTranslation = nil
// if language != nil {
// lng := models.LanguageTranslation(*language)
// langTrans = &lng
// }
}

View File

@ -163,6 +163,9 @@ type Mutation {
"Set max number of concurrent scanner jobs running at once"
setScannerConcurrentWorkers(workers: Int!): Int! @isAdmin
"Set the filter to be used when generating thumbnails"
setThumbnailDownsampleMethod(method: ThumbnailFilter!): ThumbnailFilter! @isAdmin
"Change user preferences for the logged in user"
changeUserPreferences(language: String): UserPreferences! @isAuthorized
@ -247,6 +250,16 @@ type ShareToken {
media: Media
}
"Supported downsampling filters for thumbnail generation"
enum ThumbnailFilter {
NearestNeighbor,
Box,
Linear,
MitchellNetravali,
CatmullRom,
Lanczos,
}
"General information about the site"
type SiteInfo {
"Whether or not the initial setup wizard should be shown"
@ -257,6 +270,8 @@ type SiteInfo {
periodicScanInterval: Int! @isAdmin
"How many max concurrent scanner jobs that should run at once"
concurrentWorkers: Int! @isAdmin
"The filter to use when generating thumbnails"
thumbnailMethod: ThumbnailFilter! @isAdmin
}
type User {
@ -283,7 +298,8 @@ enum LanguageTranslation {
Russian,
TraditionalChinese,
SimplifiedChinese,
Portuguese
Portuguese,
Basque
}
"Preferences for regular users"
@ -382,6 +398,8 @@ type Media {
type MediaEXIF {
id: ID!
media: Media!
"The description of the image"
description: String
"The model name of the camera"
camera: String
"The maker of the camera"

View File

@ -46,7 +46,7 @@ func RegisterPhotoRoutes(db *gorm.DB, router *mux.Router) {
if _, err := os.Stat(cachedPath); os.IsNotExist((err)) {
err := db.Transaction(func(tx *gorm.DB) error {
if _, err = scanner.ProcessMedia(tx, media); err != nil {
if err = scanner.ProcessSingleMedia(tx, media); err != nil {
log.Printf("ERROR: processing image not found in cache (%s): %s\n", cachedPath, err)
return err
}

View File

@ -52,7 +52,7 @@ func RegisterVideoRoutes(db *gorm.DB, router *mux.Router) {
if _, err := os.Stat(cachedPath); err != nil {
if os.IsNotExist(err) {
err := db.Transaction(func(tx *gorm.DB) error {
if _, err := scanner.ProcessMedia(tx, media); err != nil {
if err := scanner.ProcessSingleMedia(tx, media); err != nil {
log.Printf("ERROR: processing video not found in cache: %s\n", err)
return err
}

View File

@ -81,6 +81,13 @@ func (p *externalExifParser) ParseExif(media_path string) (returnExif *models.Me
newExif := models.MediaEXIF{}
found_exif := false
// Get description
description, err := fileInfo.GetString("ImageDescription")
if err == nil {
found_exif = true
newExif.Description = &description
}
// Get camera model
model, err := fileInfo.GetString("Model")
if err == nil {

View File

@ -44,6 +44,11 @@ func (p internalExifParser) ParseExif(media_path string) (returnExif *models.Med
newExif := models.MediaEXIF{}
description, err := p.readStringTag(exifTags, exif.ImageDescription, media_path)
if err == nil {
newExif.Description = description
}
model, err := p.readStringTag(exifTags, exif.Model, media_path)
if err == nil {
newExif.Camera = model

View File

@ -48,6 +48,7 @@ func TestExifParsers(t *testing.T) {
{
path: "./test_data/bird.jpg",
assert: func(t *testing.T, exif *models.MediaEXIF) {
assert.EqualValues(t, *exif.Description, "Photo of a Bird")
assert.WithinDuration(t, *exif.DateShot, time.Unix(1336318784, 0).UTC(), time.Minute)
assert.EqualValues(t, *exif.Camera, "Canon EOS 600D")
assert.EqualValues(t, *exif.Maker, "Canon")

Binary file not shown.

Before

Width:  |  Height:  |  Size: 37 KiB

After

Width:  |  Height:  |  Size: 37 KiB

View File

@ -17,9 +17,25 @@ import (
"gopkg.in/vansante/go-ffprobe.v2"
_ "github.com/strukturag/libheif/go/heif"
"gorm.io/gorm"
)
func EncodeThumbnail(inputPath string, outputPath string) (*media_utils.PhotoDimensions, error) {
var thumbFilter = map[models.ThumbnailFilter]imaging.ResampleFilter{
models.ThumbnailFilterNearestNeighbor: imaging.NearestNeighbor,
models.ThumbnailFilterBox: imaging.Box,
models.ThumbnailFilterLinear: imaging.Linear,
models.ThumbnailFilterMitchellNetravali: imaging.MitchellNetravali,
models.ThumbnailFilterCatmullRom: imaging.CatmullRom,
models.ThumbnailFilterLanczos: imaging.Lanczos,
}
func EncodeThumbnail(db *gorm.DB, inputPath string, outputPath string) (*media_utils.PhotoDimensions, error) {
var siteInfo models.SiteInfo
if err := db.First(&siteInfo).Error; err != nil {
return nil, err
}
inputImage, err := imaging.Open(inputPath, imaging.AutoOrientation(true))
if err != nil {
@ -29,7 +45,7 @@ func EncodeThumbnail(inputPath string, outputPath string) (*media_utils.PhotoDim
dimensions := media_utils.PhotoDimensionsFromRect(inputImage.Bounds())
dimensions = dimensions.ThumbnailScale()
thumbImage := imaging.Resize(inputImage, dimensions.Width, dimensions.Height, imaging.NearestNeighbor)
thumbImage := imaging.Resize(inputImage, dimensions.Width, dimensions.Height, thumbFilter[siteInfo.ThumbnailMethod])
if err = encodeImageJPEG(thumbImage, outputPath, 60); err != nil {
return nil, err
}
@ -55,11 +71,18 @@ func encodeImageJPEG(image image.Image, outputPath string, jpegQuality int) erro
// EncodeMediaData is used to easily decode media data, with a cache so expensive operations are not repeated
type EncodeMediaData struct {
Media *models.Media
CounterpartPath *string
_photoImage image.Image
_contentType *media_type.MediaType
_videoMetadata *ffprobe.ProbeData
}
func NewEncodeMediaData(media *models.Media) EncodeMediaData {
return EncodeMediaData{
Media: media,
}
}
// ContentType reads the image to determine its content type
func (img *EncodeMediaData) ContentType() (*media_type.MediaType, error) {
if img._contentType != nil {
@ -86,7 +109,7 @@ func (img *EncodeMediaData) EncodeHighRes(outputPath string) error {
}
// Use darktable if there is no counterpart JPEG file to use instead
if contentType.IsRaw() && img.Media.CounterpartPath == nil {
if contentType.IsRaw() && img.CounterpartPath == nil {
if executable_worker.DarktableCli.IsInstalled() {
err := executable_worker.DarktableCli.EncodeJpeg(img.Media.Path, outputPath, 70)
if err != nil {
@ -114,8 +137,8 @@ func (img *EncodeMediaData) photoImage() (image.Image, error) {
}
var photoPath string
if img.Media.CounterpartPath != nil {
photoPath = *img.Media.CounterpartPath
if img.CounterpartPath != nil {
photoPath = *img.CounterpartPath
} else {
photoPath = img.Media.Path
}

View File

@ -29,6 +29,7 @@ const (
TypeSR2 MediaType = "image/x-sony-sr2"
TypeSRF MediaType = "image/x-sony-srf"
TypeCR2 MediaType = "image/x-canon-cr2"
TypeCR3 MediaType = "image/x-canon-cr3"
TypeCRW MediaType = "image/x-canon-crw"
TypeERF MediaType = "image/x-epson-erf"
TypeDCS MediaType = "image/x-kodak-dcs"
@ -92,6 +93,7 @@ var RawMimeTypes = [...]MediaType{
TypeSR2,
TypeSRF,
TypeCR2,
TypeCR3,
TypeCRW,
TypeERF,
TypeDCS,
@ -158,6 +160,7 @@ var fileExtensions = map[string]MediaType{
".srf": TypeSRF,
".srw": TypeSRW,
".cr2": TypeCR2,
".cr3": TypeCR3,
".crw": TypeCRW,
".erf": TypeERF,
".dcr": TypeDCR,
@ -270,7 +273,6 @@ func GetExtensionMediaType(ext string) (MediaType, bool) {
}
func GetMediaType(path string) (*MediaType, error) {
ext := filepath.Ext(path)
fileExtType, found := GetExtensionMediaType(ext)

View File

@ -1,4 +1,4 @@
package scanner
package periodic_scanner
import (
"log"
@ -6,6 +6,7 @@ import (
"time"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/scanner_queue"
"gorm.io/gorm"
)
@ -81,7 +82,7 @@ func scanIntervalRunner() {
log.Print("Scan interval runner: New ticker detected")
case <-mainPeriodicScanner.ticker.C:
log.Print("Scan interval runner: Starting periodic scan")
AddAllToQueue()
scanner_queue.AddAllToQueue()
}
} else {
<-mainPeriodicScanner.ticker_changed

View File

@ -1,411 +0,0 @@
package scanner
import (
"fmt"
"log"
"os"
"path"
"strconv"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/media_encoding/media_utils"
"github.com/photoview/photoview/api/scanner/media_type"
"github.com/photoview/photoview/api/utils"
"github.com/pkg/errors"
"gorm.io/gorm"
// Image decoders
_ "image/gif"
_ "image/png"
_ "golang.org/x/image/bmp"
_ "golang.org/x/image/tiff"
_ "golang.org/x/image/webp"
)
// Higher order function used to check if MediaURL for a given MediaPurpose exists
func makePhotoURLChecker(tx *gorm.DB, mediaID int) func(purpose models.MediaPurpose) (*models.MediaURL, error) {
return func(purpose models.MediaPurpose) (*models.MediaURL, error) {
var mediaURL []*models.MediaURL
result := tx.Where("purpose = ?", purpose).Where("media_id = ?", mediaID).Find(&mediaURL)
if result.Error != nil {
return nil, result.Error
}
if result.RowsAffected > 0 {
return mediaURL[0], nil
}
return nil, nil
}
}
func generateUniqueMediaNamePrefixed(prefix string, mediaPath string, extension string) string {
mediaName := fmt.Sprintf("%s_%s_%s", prefix, path.Base(mediaPath), utils.GenerateToken())
mediaName = models.SanitizeMediaName(mediaName)
mediaName = mediaName + extension
return mediaName
}
func generateUniqueMediaName(mediaPath string) string {
filename := path.Base(mediaPath)
baseName := filename[0 : len(filename)-len(path.Ext(filename))]
baseExt := path.Ext(filename)
mediaName := fmt.Sprintf("%s_%s", baseName, utils.GenerateToken())
mediaName = models.SanitizeMediaName(mediaName) + baseExt
return mediaName
}
func ProcessMedia(tx *gorm.DB, media *models.Media) (bool, error) {
imageData := media_encoding.EncodeMediaData{
Media: media,
}
contentType, err := imageData.ContentType()
if err != nil {
return false, errors.Wrapf(err, "get content-type of media (%s)", media.Path)
}
// Make sure media cache directory exists
mediaCachePath, err := makeMediaCacheDir(media)
if err != nil {
return false, errors.Wrap(err, "cache directory error")
}
if contentType.IsVideo() {
return processVideo(tx, &imageData, mediaCachePath)
} else {
return processPhoto(tx, &imageData, mediaCachePath)
}
}
func processPhoto(tx *gorm.DB, imageData *media_encoding.EncodeMediaData, photoCachePath *string) (bool, error) {
photo := imageData.Media
log.Printf("Processing photo: %s\n", photo.Path)
didProcess := false
photoURLFromDB := makePhotoURLChecker(tx, photo.ID)
// original photo url
origURL, err := photoURLFromDB(models.MediaOriginal)
if err != nil {
return false, err
}
// Thumbnail
thumbURL, err := photoURLFromDB(models.PhotoThumbnail)
if err != nil {
return false, errors.Wrap(err, "error processing photo thumbnail")
}
// Highres
highResURL, err := photoURLFromDB(models.PhotoHighRes)
if err != nil {
return false, errors.Wrap(err, "error processing photo highres")
}
var photoDimensions *media_utils.PhotoDimensions
var baseImagePath string = photo.Path
mediaType, err := media_type.GetMediaType(photo.Path)
if err != nil {
return false, errors.Wrap(err, "could determine if media was photo or video")
}
if mediaType.IsRaw() {
err = processRawSideCar(tx, imageData, highResURL, thumbURL, photoCachePath)
if err != nil {
return false, err
}
counterpartFile := scanForCompressedCounterpartFile(photo.Path)
if counterpartFile != nil {
imageData.Media.CounterpartPath = counterpartFile
}
}
// Generate high res jpeg
if highResURL == nil {
contentType, err := imageData.ContentType()
if err != nil {
return false, err
}
if !contentType.IsWebCompatible() {
didProcess = true
highresName := generateUniqueMediaNamePrefixed("highres", photo.Path, ".jpg")
baseImagePath = path.Join(*photoCachePath, highresName)
_, err := generateSaveHighResJPEG(tx, photo, imageData, highresName, baseImagePath, nil)
if err != nil {
return false, err
}
}
} else {
// Verify that highres photo still exists in cache
baseImagePath = path.Join(*photoCachePath, highResURL.MediaName)
if _, err := os.Stat(baseImagePath); os.IsNotExist(err) {
fmt.Printf("High-res photo found in database but not in cache, re-encoding photo to cache: %s\n", highResURL.MediaName)
didProcess = true
err = imageData.EncodeHighRes(baseImagePath)
if err != nil {
return false, errors.Wrap(err, "creating high-res cached image")
}
}
}
// Save original photo to database
if origURL == nil {
didProcess = true
// Make sure photo dimensions is set
if photoDimensions == nil {
photoDimensions, err = media_utils.GetPhotoDimensions(baseImagePath)
if err != nil {
return false, err
}
}
if err = saveOriginalPhotoToDB(tx, photo, imageData, photoDimensions); err != nil {
return false, errors.Wrap(err, "saving original photo to database")
}
}
// Save thumbnail to cache
if thumbURL == nil {
didProcess = true
thumbnailName := generateUniqueMediaNamePrefixed("thumbnail", photo.Path, ".jpg")
_, err := generateSaveThumbnailJPEG(tx, photo, thumbnailName, photoCachePath, baseImagePath, nil)
if err != nil {
return false, err
}
} else {
// Verify that thumbnail photo still exists in cache
thumbPath := path.Join(*photoCachePath, thumbURL.MediaName)
if _, err := os.Stat(thumbPath); os.IsNotExist(err) {
didProcess = true
fmt.Printf("Thumbnail photo found in database but not in cache, re-encoding photo to cache: %s\n", thumbURL.MediaName)
_, err := media_encoding.EncodeThumbnail(baseImagePath, thumbPath)
if err != nil {
return false, errors.Wrap(err, "could not create thumbnail cached image")
}
}
}
return didProcess, nil
}
func makeMediaCacheDir(media *models.Media) (*string, error) {
// Make root cache dir if not exists
if _, err := os.Stat(utils.MediaCachePath()); os.IsNotExist(err) {
if err := os.Mkdir(utils.MediaCachePath(), os.ModePerm); err != nil {
return nil, errors.Wrap(err, "could not make root image cache directory")
}
}
// Make album cache dir if not exists
albumCachePath := path.Join(utils.MediaCachePath(), strconv.Itoa(int(media.AlbumID)))
if _, err := os.Stat(albumCachePath); os.IsNotExist(err) {
if err := os.Mkdir(albumCachePath, os.ModePerm); err != nil {
return nil, errors.Wrap(err, "could not make album image cache directory")
}
}
// Make photo cache dir if not exists
photoCachePath := path.Join(albumCachePath, strconv.Itoa(int(media.ID)))
if _, err := os.Stat(photoCachePath); os.IsNotExist(err) {
if err := os.Mkdir(photoCachePath, os.ModePerm); err != nil {
return nil, errors.Wrap(err, "could not make photo image cache directory")
}
}
return &photoCachePath, nil
}
func saveOriginalPhotoToDB(tx *gorm.DB, photo *models.Media, imageData *media_encoding.EncodeMediaData, photoDimensions *media_utils.PhotoDimensions) error {
originalImageName := generateUniqueMediaName(photo.Path)
contentType, err := imageData.ContentType()
if err != nil {
return err
}
fileStats, err := os.Stat(photo.Path)
if err != nil {
return errors.Wrap(err, "reading file stats of original photo")
}
mediaURL := models.MediaURL{
Media: photo,
MediaName: originalImageName,
Width: photoDimensions.Width,
Height: photoDimensions.Height,
Purpose: models.MediaOriginal,
ContentType: string(*contentType),
FileSize: fileStats.Size(),
}
if err := tx.Create(&mediaURL).Error; err != nil {
return errors.Wrapf(err, "inserting original photo url: %d, %s", photo.ID, photo.Title)
}
return nil
}
func generateSaveHighResJPEG(tx *gorm.DB, media *models.Media, imageData *media_encoding.EncodeMediaData, highres_name string, imagePath string, mediaURL *models.MediaURL) (*models.MediaURL, error) {
err := imageData.EncodeHighRes(imagePath)
if err != nil {
return nil, errors.Wrap(err, "creating high-res cached image")
}
photoDimensions, err := media_utils.GetPhotoDimensions(imagePath)
if err != nil {
return nil, err
}
fileStats, err := os.Stat(imagePath)
if err != nil {
return nil, errors.Wrap(err, "reading file stats of highres photo")
}
if mediaURL == nil {
mediaURL = &models.MediaURL{
MediaID: media.ID,
MediaName: highres_name,
Width: photoDimensions.Width,
Height: photoDimensions.Height,
Purpose: models.PhotoHighRes,
ContentType: "image/jpeg",
FileSize: fileStats.Size(),
}
if err := tx.Create(&mediaURL).Error; err != nil {
return nil, errors.Wrapf(err, "could not insert highres media url (%d, %s)", media.ID, highres_name)
}
} else {
mediaURL.Width = photoDimensions.Width
mediaURL.Height = photoDimensions.Height
mediaURL.FileSize = fileStats.Size()
if err := tx.Save(&mediaURL).Error; err != nil {
return nil, errors.Wrapf(err, "could not update media url after side car changes (%d, %s)", media.ID, highres_name)
}
}
return mediaURL, nil
}
func generateSaveThumbnailJPEG(tx *gorm.DB, media *models.Media, thumbnail_name string, photoCachePath *string, baseImagePath string, mediaURL *models.MediaURL) (*models.MediaURL, error) {
thumbOutputPath := path.Join(*photoCachePath, thumbnail_name)
thumbSize, err := media_encoding.EncodeThumbnail(baseImagePath, thumbOutputPath)
if err != nil {
return nil, errors.Wrap(err, "could not create thumbnail cached image")
}
fileStats, err := os.Stat(thumbOutputPath)
if err != nil {
return nil, errors.Wrap(err, "reading file stats of thumbnail photo")
}
if mediaURL == nil {
mediaURL = &models.MediaURL{
MediaID: media.ID,
MediaName: thumbnail_name,
Width: thumbSize.Width,
Height: thumbSize.Height,
Purpose: models.PhotoThumbnail,
ContentType: "image/jpeg",
FileSize: fileStats.Size(),
}
if err := tx.Create(&mediaURL).Error; err != nil {
return nil, errors.Wrapf(err, "could not insert thumbnail media url (%d, %s)", media.ID, thumbnail_name)
}
} else {
mediaURL.Width = thumbSize.Width
mediaURL.Height = thumbSize.Height
mediaURL.FileSize = fileStats.Size()
if err := tx.Save(&mediaURL).Error; err != nil {
return nil, errors.Wrapf(err, "could not update media url after side car changes (%d, %s)", media.ID, thumbnail_name)
}
}
return mediaURL, nil
}
func processRawSideCar(tx *gorm.DB, imageData *media_encoding.EncodeMediaData, highResURL *models.MediaURL, thumbURL *models.MediaURL, photoCachePath *string) error {
photo := imageData.Media
sideCarFileHasChanged := false
var currentFileHash *string
currentSideCarPath := scanForSideCarFile(photo.Path)
if currentSideCarPath != nil {
currentFileHash = hashSideCarFile(currentSideCarPath)
if photo.SideCarHash == nil || *photo.SideCarHash != *currentFileHash {
sideCarFileHasChanged = true
}
} else if photo.SideCarPath != nil { // sidecar has been deleted since last scan
sideCarFileHasChanged = true
}
if sideCarFileHasChanged {
fmt.Printf("Detected changed sidecar file for %s recreating JPG's to reflect changes\n", photo.Path)
// update high res image may be cropped so dimentions and file size can change
baseImagePath := path.Join(*photoCachePath, highResURL.MediaName) // update base image path for thumbnail
tempHighResPath := baseImagePath + ".hold"
os.Rename(baseImagePath, tempHighResPath)
_, err := generateSaveHighResJPEG(tx, photo, imageData, highResURL.MediaName, baseImagePath, highResURL)
if err != nil {
os.Rename(tempHighResPath, baseImagePath)
return errors.Wrap(err, "recreating high-res cached image")
}
os.Remove(tempHighResPath)
// update thumbnail image may be cropped so dimentions and file size can change
thumbPath := path.Join(*photoCachePath, thumbURL.MediaName)
tempThumbPath := thumbPath + ".hold" // hold onto the original image incase for some reason we fail to recreate one with the new settings
os.Rename(thumbPath, tempThumbPath)
_, err = generateSaveThumbnailJPEG(tx, photo, thumbURL.MediaName, photoCachePath, baseImagePath, thumbURL)
if err != nil {
os.Rename(tempThumbPath, thumbPath)
return errors.Wrap(err, "recreating thumbnail cached image")
}
os.Remove(tempThumbPath)
photo.SideCarHash = currentFileHash
photo.SideCarPath = currentSideCarPath
// save new side car hash
if err := tx.Save(&photo).Error; err != nil {
return errors.Wrapf(err, "could not update side car hash for media: %s", photo.Path)
}
}
return nil
}

View File

@ -6,16 +6,14 @@ import (
"log"
"os"
"path"
"time"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/graphql/notification"
"github.com/photoview/photoview/api/scanner/face_detection"
"github.com/photoview/photoview/api/scanner/scanner_cache"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/scanner/scanner_tasks"
"github.com/photoview/photoview/api/scanner/scanner_utils"
"github.com/photoview/photoview/api/utils"
"github.com/pkg/errors"
ignore "github.com/sabhiram/go-gitignore"
"gorm.io/gorm"
)
@ -56,7 +54,7 @@ func NewRootAlbum(db *gorm.DB, rootPath string, owner *models.User) (*models.Alb
}
if err := db.Model(&owner).Association("Albums").Append(&album); err != nil {
return nil, errors.Wrap(err, "failed to add owner to already existing album")
return nil, errors.Wrap(err, "add owner to already existing album")
}
return &album, nil
@ -87,142 +85,121 @@ func ValidRootPath(rootPath string) bool {
return true
}
func scanAlbum(album *models.Album, cache *scanner_cache.AlbumScannerCache, db *gorm.DB) {
func ScanAlbum(ctx scanner_task.TaskContext) error {
album_notify_key := utils.GenerateToken()
notifyThrottle := utils.NewThrottle(500 * time.Millisecond)
notifyThrottle.Trigger(nil)
newCtx, err := scanner_tasks.Tasks.BeforeScanAlbum(ctx)
if err != nil {
return errors.Wrapf(err, "before scan album (%s)", ctx.GetAlbum().Path)
}
ctx = newCtx
// Scan for photos
albumMedia, err := findMediaForAlbum(album, cache, db, func(photo *models.Media, newPhoto bool) {
if newPhoto {
notifyThrottle.Trigger(func() {
notification.BroadcastNotification(&models.Notification{
Key: album_notify_key,
Type: models.NotificationTypeMessage,
Header: fmt.Sprintf("Found new media in album '%s'", album.Title),
Content: fmt.Sprintf("Found %s", photo.Path),
})
})
}
})
albumMedia, err := findMediaForAlbum(ctx)
if err != nil {
scanner_utils.ScannerError("Failed to find media for album (%s): %s", album.Path, err)
return errors.Wrapf(err, "find media for album (%s): %s", ctx.GetAlbum().Path, err)
}
album_has_changes := false
for count, media := range albumMedia {
processing_was_needed := false
changedMedia := make([]*models.Media, 0)
for i, media := range albumMedia {
updatedURLs := []*models.MediaURL{}
transactionError := db.Transaction(func(tx *gorm.DB) error {
processing_was_needed, err = ProcessMedia(tx, media)
mediaData := media_encoding.NewEncodeMediaData(media)
// define new ctx for scope of for-loop
ctx, err := scanner_tasks.Tasks.BeforeProcessMedia(ctx, &mediaData)
if err != nil {
return errors.Wrapf(err, "failed to process photo (%s)", media.Path)
return err
}
if processing_was_needed {
album_has_changes = true
progress := float64(count) / float64(len(albumMedia)) * 100.0
notification.BroadcastNotification(&models.Notification{
Key: album_notify_key,
Type: models.NotificationTypeProgress,
Header: fmt.Sprintf("Processing media for album '%s'", album.Title),
Content: fmt.Sprintf("Processed media at %s", media.Path),
Progress: &progress,
})
transactionError := ctx.DatabaseTransaction(func(ctx scanner_task.TaskContext) error {
updatedURLs, err = processMedia(ctx, &mediaData)
if err != nil {
return errors.Wrapf(err, "process media (%s)", media.Path)
}
if len(updatedURLs) > 0 {
changedMedia = append(changedMedia, media)
}
return nil
})
if transactionError != nil {
scanner_utils.ScannerError("Failed to begin database transaction: %s", transactionError)
return errors.Wrap(err, "process media database transaction")
}
if processing_was_needed && media.Type == models.MediaTypePhoto {
go func(media *models.Media) {
if face_detection.GlobalFaceDetector == nil {
return
}
if err := face_detection.GlobalFaceDetector.DetectFaces(db, media); err != nil {
scanner_utils.ScannerError("Error detecting faces in image (%s): %s", media.Path, err)
}
}(media)
if err = scanner_tasks.Tasks.AfterProcessMedia(ctx, &mediaData, updatedURLs, i, len(albumMedia)); err != nil {
return errors.Wrap(err, "after process media")
}
}
cleanup_errors := CleanupMedia(db, album.ID, albumMedia)
for _, err := range cleanup_errors {
scanner_utils.ScannerError("Failed to delete old media: %s", err)
if err := scanner_tasks.Tasks.AfterScanAlbum(ctx, changedMedia, albumMedia); err != nil {
return errors.Wrap(err, "after scan album")
}
if album_has_changes {
timeoutDelay := 2000
notification.BroadcastNotification(&models.Notification{
Key: album_notify_key,
Type: models.NotificationTypeMessage,
Positive: true,
Header: fmt.Sprintf("Done processing media for album '%s'", album.Title),
Content: fmt.Sprintf("All media have been processed"),
Timeout: &timeoutDelay,
})
}
return nil
}
func findMediaForAlbum(album *models.Album, cache *scanner_cache.AlbumScannerCache, db *gorm.DB, onScanPhoto func(photo *models.Media, newPhoto bool)) ([]*models.Media, error) {
func findMediaForAlbum(ctx scanner_task.TaskContext) ([]*models.Media, error) {
albumPhotos := make([]*models.Media, 0)
albumMedia := make([]*models.Media, 0)
dirContent, err := ioutil.ReadDir(album.Path)
dirContent, err := ioutil.ReadDir(ctx.GetAlbum().Path)
if err != nil {
return nil, err
}
// Get ignore data
albumIgnore := ignore.CompileIgnoreLines(*cache.GetAlbumIgnore(album.Path)...)
for _, item := range dirContent {
photoPath := path.Join(album.Path, item.Name())
mediaPath := path.Join(ctx.GetAlbum().Path, item.Name())
isDirSymlink, err := utils.IsDirSymlink(photoPath)
isDirSymlink, err := utils.IsDirSymlink(mediaPath)
if err != nil {
log.Printf("Cannot detect whether %s is symlink to a directory. Pretending it is not", photoPath)
log.Printf("Cannot detect whether %s is symlink to a directory. Pretending it is not", mediaPath)
isDirSymlink = false
}
if !item.IsDir() && !isDirSymlink && cache.IsPathMedia(photoPath) {
// Match file against ignore data
if albumIgnore.MatchesPath(item.Name()) {
log.Printf("File %s ignored\n", item.Name())
continue
}
// Skip the JPEGs that are compressed version of raw files
counterpartFile := scanForRawCounterpartFile(photoPath)
if counterpartFile != nil {
continue
}
err := db.Transaction(func(tx *gorm.DB) error {
media, isNewMedia, err := ScanMedia(tx, photoPath, album.ID, cache)
if !item.IsDir() && !isDirSymlink && ctx.GetCache().IsPathMedia(mediaPath) {
skip, err := scanner_tasks.Tasks.MediaFound(ctx, item, mediaPath)
if err != nil {
return errors.Wrapf(err, "Scanning media error (%s)", photoPath)
return nil, err
}
if skip {
continue
}
onScanPhoto(media, isNewMedia)
err = ctx.DatabaseTransaction(func(ctx scanner_task.TaskContext) error {
media, isNewMedia, err := ScanMedia(ctx.GetDB(), mediaPath, ctx.GetAlbum().ID, ctx.GetCache())
if err != nil {
return errors.Wrapf(err, "scanning media error (%s)", mediaPath)
}
albumPhotos = append(albumPhotos, media)
if err = scanner_tasks.Tasks.AfterMediaFound(ctx, media, isNewMedia); err != nil {
return err
}
albumMedia = append(albumMedia, media)
return nil
})
if err != nil {
scanner_utils.ScannerError("Error scanning media for album (%d): %s\n", album.ID, err)
scanner_utils.ScannerError("Error scanning media for album (%d): %s\n", ctx.GetAlbum().ID, err)
continue
}
}
}
return albumPhotos, nil
}
return albumMedia, nil
}
func processMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData) ([]*models.MediaURL, error) {
// Make sure media cache directory exists
mediaCachePath, err := mediaData.Media.CachePath()
if err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "cache directory error")
}
return scanner_tasks.Tasks.ProcessMedia(ctx, mediaData, mediaCachePath)
}

View File

@ -1,92 +1,20 @@
package scanner
import (
"crypto/md5"
"encoding/hex"
"io"
"context"
"log"
"os"
"path"
"path/filepath"
"strings"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/exif"
"github.com/photoview/photoview/api/scanner/media_type"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/scanner_cache"
"github.com/photoview/photoview/api/scanner/scanner_utils"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/scanner/scanner_tasks"
"github.com/pkg/errors"
"gorm.io/gorm"
)
func scanForSideCarFile(path string) *string {
testPath := path + ".xmp"
if scanner_utils.FileExists(testPath) {
return &testPath
}
return nil
}
func scanForRawCounterpartFile(imagePath string) *string {
ext := filepath.Ext(imagePath)
fileExtType, found := media_type.GetExtensionMediaType(ext)
if found {
if !fileExtType.IsBasicTypeSupported() {
return nil
}
}
rawPath := media_type.RawCounterpart(imagePath)
if rawPath != nil {
return rawPath
}
return nil
}
func scanForCompressedCounterpartFile(imagePath string) *string {
ext := filepath.Ext(imagePath)
fileExtType, found := media_type.GetExtensionMediaType(ext)
if found {
if fileExtType.IsBasicTypeSupported() {
return nil
}
}
pathWithoutExt := strings.TrimSuffix(imagePath, path.Ext(imagePath))
for _, ext := range media_type.TypeJpeg.FileExtensions() {
testPath := pathWithoutExt + ext
if scanner_utils.FileExists(testPath) {
return &testPath
}
}
return nil
}
func hashSideCarFile(path *string) *string {
if path == nil {
return nil
}
f, err := os.Open(*path)
if err != nil {
log.Printf("ERROR: %s", err)
}
defer f.Close()
h := md5.New()
if _, err := io.Copy(h, f); err != nil {
log.Printf("ERROR: %s", err)
}
hash := hex.EncodeToString(h.Sum(nil))
return &hash
}
func ScanMedia(tx *gorm.DB, mediaPath string, albumId int, cache *scanner_cache.AlbumScannerCache) (*models.Media, bool, error) {
mediaName := path.Base(mediaPath)
@ -115,20 +43,10 @@ func ScanMedia(tx *gorm.DB, mediaPath string, albumId int, cache *scanner_cache.
var mediaTypeText models.MediaType
var sideCarPath *string = nil
var sideCarHash *string = nil
if mediaType.IsVideo() {
mediaTypeText = models.MediaTypeVideo
} else {
mediaTypeText = models.MediaTypePhoto
// search for sidecar files
if mediaType.IsRaw() {
sideCarPath = scanForSideCarFile(mediaPath)
if sideCarPath != nil {
sideCarHash = hashSideCarFile(sideCarPath)
}
}
}
stat, err := os.Stat(mediaPath)
@ -139,8 +57,6 @@ func ScanMedia(tx *gorm.DB, mediaPath string, albumId int, cache *scanner_cache.
media := models.Media{
Title: mediaName,
Path: mediaPath,
SideCarPath: sideCarPath,
SideCarHash: sideCarHash,
AlbumID: albumId,
Type: mediaTypeText,
DateShot: stat.ModTime(),
@ -150,16 +66,41 @@ func ScanMedia(tx *gorm.DB, mediaPath string, albumId int, cache *scanner_cache.
return nil, false, errors.Wrap(err, "could not insert media into database")
}
_, err = exif.SaveEXIF(tx, &media)
if err != nil {
log.Printf("WARN: SaveEXIF for %s failed: %s\n", mediaName, err)
}
if media.Type == models.MediaTypeVideo {
if err = ScanVideoMetadata(tx, &media); err != nil {
log.Printf("WARN: ScanVideoMetadata for %s failed: %s\n", mediaName, err)
}
}
return &media, true, nil
}
// ProcessSingleMedia processes a single media, might be used to reprocess media with corrupted cache
// Function waits for processing to finish before returning.
func ProcessSingleMedia(db *gorm.DB, media *models.Media) error {
album_cache := scanner_cache.MakeAlbumCache()
var album models.Album
if err := db.Model(media).Association("Album").Find(&album); err != nil {
return err
}
media_data := media_encoding.NewEncodeMediaData(media)
task_context := scanner_task.NewTaskContext(context.Background(), db, &album, album_cache)
new_ctx, err := scanner_tasks.Tasks.BeforeProcessMedia(task_context, &media_data)
if err != nil {
return err
}
mediaCachePath, err := media.CachePath()
if err != nil {
return err
}
updated_urls, err := scanner_tasks.Tasks.ProcessMedia(new_ctx, &media_data, mediaCachePath)
if err != nil {
return err
}
err = scanner_tasks.Tasks.AfterProcessMedia(new_ctx, &media_data, updated_urls, 0, 1)
if err != nil {
return err
}
return nil
}

View File

@ -1,6 +1,7 @@
package scanner
package scanner_queue
import (
"context"
"fmt"
"log"
"sync"
@ -8,20 +9,33 @@ import (
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/graphql/notification"
"github.com/photoview/photoview/api/scanner"
"github.com/photoview/photoview/api/scanner/scanner_cache"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/scanner/scanner_utils"
"github.com/photoview/photoview/api/utils"
"github.com/pkg/errors"
"gorm.io/gorm"
)
// ScannerJob describes a job on the queue to be run by the scanner over a single album
type ScannerJob struct {
album *models.Album
cache *scanner_cache.AlbumScannerCache
ctx scanner_task.TaskContext
// album *models.Album
// cache *scanner_cache.AlbumScannerCache
}
func NewScannerJob(ctx scanner_task.TaskContext) ScannerJob {
return ScannerJob{
ctx,
}
}
func (job *ScannerJob) Run(db *gorm.DB) {
scanAlbum(job.album, job.cache, db)
err := scanner.ScanAlbum(job.ctx)
if err != nil {
scanner_utils.ScannerError("Failed to scan album: %v", err)
}
}
type ScannerQueueSettings struct {
@ -162,7 +176,7 @@ func (queue *ScannerQueue) processQueue(notifyThrottle *utils.Throttle) {
Positive: true,
})
if err := GenerateBlurhashes(queue.db); err != nil {
if err := scanner.GenerateBlurhashes(queue.db); err != nil {
scanner_utils.ScannerError("Failed to generate blurhashes: %v", err)
}
@ -212,9 +226,11 @@ func AddAllToQueue() error {
return nil
}
// AddUserToQueue finds all root albums owned by the given user and adds them to the scanner queue.
// Function does not block.
func AddUserToQueue(user *models.User) error {
album_cache := scanner_cache.MakeAlbumCache()
albums, album_errors := findAlbumsForUser(global_scanner_queue.db, user, album_cache)
albums, album_errors := scanner.FindAlbumsForUser(global_scanner_queue.db, user, album_cache)
for _, err := range album_errors {
return errors.Wrapf(err, "find albums for user (user_id: %d)", user.ID)
}
@ -222,8 +238,7 @@ func AddUserToQueue(user *models.User) error {
global_scanner_queue.mutex.Lock()
for _, album := range albums {
global_scanner_queue.addJob(&ScannerJob{
album: album,
cache: album_cache,
ctx: scanner_task.NewTaskContext(context.Background(), global_scanner_queue.db, album, album_cache),
})
}
global_scanner_queue.mutex.Unlock()
@ -248,7 +263,7 @@ func (queue *ScannerQueue) jobOnQueue(job *ScannerJob) (bool, error) {
scannerJobs := append(queue.in_progress, queue.up_next...)
for _, scannerJob := range scannerJobs {
if scannerJob.album.ID == job.album.ID {
if scannerJob.ctx.GetAlbum().ID == job.ctx.GetAlbum().ID {
return true, nil
}
}

View File

@ -1,12 +1,18 @@
package scanner
package scanner_queue
import (
"context"
"flag"
"testing"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/scanner_cache"
"github.com/photoview/photoview/api/scanner/scanner_task"
)
var _ = flag.Bool("database", false, "run database integration tests")
var _ = flag.Bool("filesystem", false, "run filesystem integration tests")
func makeAlbumWithID(id int) *models.Album {
var album models.Album
album.ID = id
@ -14,11 +20,15 @@ func makeAlbumWithID(id int) *models.Album {
return &album
}
func makeScannerJob(albumID int) ScannerJob {
return NewScannerJob(scanner_task.NewTaskContext(context.Background(), nil, makeAlbumWithID(albumID), scanner_cache.MakeAlbumCache()))
}
func TestScannerQueue_AddJob(t *testing.T) {
scannerJobs := []ScannerJob{
{album: makeAlbumWithID(100), cache: scanner_cache.MakeAlbumCache()},
{album: makeAlbumWithID(20), cache: scanner_cache.MakeAlbumCache()},
makeScannerJob(100),
makeScannerJob(20),
}
mockScannerQueue := ScannerQueue{
@ -29,7 +39,7 @@ func TestScannerQueue_AddJob(t *testing.T) {
}
t.Run("add new job to scanner queue", func(t *testing.T) {
newJob := ScannerJob{album: makeAlbumWithID(42), cache: scanner_cache.MakeAlbumCache()}
newJob := makeScannerJob(42)
startingJobs := len(mockScannerQueue.up_next)
@ -49,7 +59,8 @@ func TestScannerQueue_AddJob(t *testing.T) {
t.Run("add existing job to scanner queue", func(t *testing.T) {
startingJobs := len(mockScannerQueue.up_next)
err := mockScannerQueue.addJob(&ScannerJob{album: makeAlbumWithID(20), cache: scanner_cache.MakeAlbumCache()})
job := makeScannerJob(20)
err := mockScannerQueue.addJob(&job)
if err != nil {
t.Errorf(".AddJob() returned an unexpected error: %s", err)
}
@ -59,14 +70,13 @@ func TestScannerQueue_AddJob(t *testing.T) {
}
})
}
func TestScannerQueue_JobOnQueue(t *testing.T) {
scannerJobs := []ScannerJob{
{album: makeAlbumWithID(100), cache: scanner_cache.MakeAlbumCache()},
{album: makeAlbumWithID(20), cache: scanner_cache.MakeAlbumCache()},
makeScannerJob(100),
makeScannerJob(20),
}
mockScannerQueue := ScannerQueue{
@ -81,12 +91,8 @@ func TestScannerQueue_JobOnQueue(t *testing.T) {
bool
ScannerJob
}{
{"album which owner is already on the queue", true, ScannerJob{
album: makeAlbumWithID(100), cache: scanner_cache.MakeAlbumCache(),
}},
{"album that is not on the queue", false, ScannerJob{
album: makeAlbumWithID(321), cache: scanner_cache.MakeAlbumCache(),
}},
{"album which owner is already on the queue", true, makeScannerJob(100)},
{"album that is not on the queue", false, makeScannerJob(321)},
}
for _, test := range onQueueTests {

View File

@ -0,0 +1,102 @@
package scanner_task
import (
"context"
"database/sql"
"flag"
"io/fs"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/scanner_cache"
"gorm.io/gorm"
)
// ScannerTask is an interface for a task to be performed as a part of the scanner pipeline
type ScannerTask interface {
// BeforeScanAlbum will run at the beginning of the scan task.
// New values can be stored in the returned TaskContext that will live throughout the lifetime of the task.
BeforeScanAlbum(ctx TaskContext) (TaskContext, error)
// AfterScanAlbum will run at the end of the scan task.
AfterScanAlbum(ctx TaskContext, changedMedia []*models.Media, albumMedia []*models.Media) error
// MediaFound will run for each media file found on the filesystem.
// It will run even when the media is already present in the database.
// If the returned skip value is true, the media will be skipped and further steps will not be executed for the given file.
MediaFound(ctx TaskContext, fileInfo fs.FileInfo, mediaPath string) (skip bool, err error)
// AfterMediaFound will run each media file after is has been saved to the database, but not processed yet.
// It will run even when the media is already present in the database, in that case `newMedia` will be true.
AfterMediaFound(ctx TaskContext, media *models.Media, newMedia bool) error
BeforeProcessMedia(ctx TaskContext, mediaData *media_encoding.EncodeMediaData) (TaskContext, error)
ProcessMedia(ctx TaskContext, mediaData *media_encoding.EncodeMediaData, mediaCachePath string) (updatedURLs []*models.MediaURL, err error)
AfterProcessMedia(ctx TaskContext, mediaData *media_encoding.EncodeMediaData, updatedURLs []*models.MediaURL, mediaIndex int, mediaTotal int) error
}
type TaskContext struct {
ctx context.Context
}
func NewTaskContext(parent context.Context, db *gorm.DB, album *models.Album, cache *scanner_cache.AlbumScannerCache) TaskContext {
ctx := TaskContext{ctx: parent}
ctx = ctx.WithValue(taskCtxKeyAlbum, album)
ctx = ctx.WithValue(taskCtxKeyAlbumCache, cache)
ctx = ctx.WithDB(db)
return ctx
}
type taskCtxKeyType string
const (
taskCtxKeyAlbum taskCtxKeyType = "task_album"
taskCtxKeyAlbumCache taskCtxKeyType = "task_album_cache"
taskCtxKeyDatabase taskCtxKeyType = "task_database"
)
func (c TaskContext) GetAlbum() *models.Album {
return c.ctx.Value(taskCtxKeyAlbum).(*models.Album)
}
func (c TaskContext) GetCache() *scanner_cache.AlbumScannerCache {
return c.ctx.Value(taskCtxKeyAlbumCache).(*scanner_cache.AlbumScannerCache)
}
func (c TaskContext) GetDB() *gorm.DB {
return c.ctx.Value(taskCtxKeyDatabase).(*gorm.DB)
}
func (c TaskContext) DatabaseTransaction(transFunc func(ctx TaskContext) error, opts ...*sql.TxOptions) error {
return c.GetDB().Transaction(func(tx *gorm.DB) error {
return transFunc(c.WithDB(tx))
}, opts...)
}
func (c TaskContext) WithValue(key, val interface{}) TaskContext {
return TaskContext{
ctx: context.WithValue(c.ctx, key, val),
}
}
func (c TaskContext) Value(key interface{}) interface{} {
return c.ctx.Value(key)
}
func (c TaskContext) WithDB(db *gorm.DB) TaskContext {
// Allow db to be nil in tests
if db == nil && flag.Lookup("test.v") != nil {
return c
}
return c.WithValue(taskCtxKeyDatabase, db.WithContext(c.ctx))
}
func (c TaskContext) Done() <-chan struct{} {
return c.ctx.Done()
}
func (c TaskContext) Err() error {
return c.ctx.Err()
}

View File

@ -0,0 +1,39 @@
package scanner_task
import (
"io/fs"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/media_encoding"
)
// ScannerTaskBase provides a default "empty" implementation of ScannerTask,
type ScannerTaskBase struct{}
func (t ScannerTaskBase) BeforeScanAlbum(ctx TaskContext) (TaskContext, error) {
return ctx, nil
}
func (t ScannerTaskBase) AfterScanAlbum(ctx TaskContext, changedMedia []*models.Media, albumMedia []*models.Media) error {
return nil
}
func (t ScannerTaskBase) MediaFound(ctx TaskContext, fileInfo fs.FileInfo, mediaPath string) (skip bool, err error) {
return false, nil
}
func (t ScannerTaskBase) AfterMediaFound(ctx TaskContext, media *models.Media, newMedia bool) error {
return nil
}
func (t ScannerTaskBase) BeforeProcessMedia(ctx TaskContext, mediaData *media_encoding.EncodeMediaData) (TaskContext, error) {
return ctx, nil
}
func (t ScannerTaskBase) ProcessMedia(ctx TaskContext, mediaData *media_encoding.EncodeMediaData, mediaCachePath string) (updatedURLs []*models.MediaURL, err error) {
return []*models.MediaURL{}, nil
}
func (t ScannerTaskBase) AfterProcessMedia(ctx TaskContext, mediaData *media_encoding.EncodeMediaData, updatedURLs []*models.MediaURL, mediaIndex int, mediaTotal int) error {
return nil
}

View File

@ -1,4 +1,4 @@
package scanner
package cleanup_tasks
import (
"os"
@ -13,6 +13,7 @@ import (
"gorm.io/gorm"
)
// CleanupMedia removes media entries from the database that are no longer present on the filesystem
func CleanupMedia(db *gorm.DB, albumId int, albumMedia []*models.Media) []error {
albumMediaIds := make([]int, len(albumMedia))
for i, media := range albumMedia {
@ -63,8 +64,8 @@ func CleanupMedia(db *gorm.DB, albumId int, albumMedia []*models.Media) []error
return deleteErrors
}
// Find and delete old albums in the database and cache that does not exist on the filesystem anymore.
func deleteOldUserAlbums(db *gorm.DB, scannedAlbums []*models.Album, user *models.User) []error {
// DeleteOldUserAlbums finds and deletes old albums in the database and cache that does not exist on the filesystem anymore.
func DeleteOldUserAlbums(db *gorm.DB, scannedAlbums []*models.Album, user *models.User) []error {
if len(scannedAlbums) == 0 {
return nil
}

View File

@ -1,4 +1,4 @@
package scanner_test
package cleanup_tasks_test
import (
"os"
@ -13,6 +13,10 @@ import (
"github.com/stretchr/testify/assert"
)
func TestMain(m *testing.M) {
os.Exit(test_utils.IntegrationTestRun(m))
}
func TestCleanupMedia(t *testing.T) {
test_utils.FilesystemTest(t)
db := test_utils.DatabaseTest(t)
@ -27,7 +31,7 @@ func TestCleanupMedia(t *testing.T) {
}
test_dir := t.TempDir()
copy.Copy("./test_data", test_dir)
assert.NoError(t, copy.Copy("../../test_data", test_dir))
countAllMedia := func() int {
var all_media []*models.Media

View File

@ -0,0 +1,21 @@
package cleanup_tasks
import (
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/scanner/scanner_utils"
)
type MediaCleanupTask struct {
scanner_task.ScannerTaskBase
}
func (t MediaCleanupTask) AfterScanAlbum(ctx scanner_task.TaskContext, changedMedia []*models.Media, albumMedia []*models.Media) error {
cleanup_errors := CleanupMedia(ctx.GetDB(), ctx.GetAlbum().ID, albumMedia)
for _, err := range cleanup_errors {
scanner_utils.ScannerError("delete old media: %s", err)
}
return nil
}

View File

@ -0,0 +1,27 @@
package scanner_tasks
import (
"log"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/exif"
"github.com/photoview/photoview/api/scanner/scanner_task"
)
type ExifTask struct {
scanner_task.ScannerTaskBase
}
func (t ExifTask) AfterMediaFound(ctx scanner_task.TaskContext, media *models.Media, newMedia bool) error {
if !newMedia {
return nil
}
_, err := exif.SaveEXIF(ctx.GetDB(), media)
if err != nil {
log.Printf("WARN: SaveEXIF for %s failed: %s\n", media.Title, err)
}
return nil
}

View File

@ -0,0 +1,30 @@
package scanner_tasks
import (
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/face_detection"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/scanner/scanner_utils"
)
type FaceDetectionTask struct {
scanner_task.ScannerTaskBase
}
func (t FaceDetectionTask) AfterProcessMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData, updatedURLs []*models.MediaURL, mediaIndex int, mediaTotal int) error {
didProcess := len(updatedURLs) > 0
if didProcess && mediaData.Media.Type == models.MediaTypePhoto {
go func(media *models.Media) {
if face_detection.GlobalFaceDetector == nil {
return
}
if err := face_detection.GlobalFaceDetector.DetectFaces(ctx.GetDB(), media); err != nil {
scanner_utils.ScannerError("Error detecting faces in image (%s): %s", media.Path, err)
}
}(mediaData.Media)
}
return nil
}

View File

@ -0,0 +1,37 @@
package scanner_tasks
import (
"io/fs"
"log"
"github.com/photoview/photoview/api/scanner/scanner_task"
ignore "github.com/sabhiram/go-gitignore"
)
type IgnorefileTask struct {
scanner_task.ScannerTaskBase
}
type ignorefileTaskKey string
const albumIgnoreKey ignorefileTaskKey = "album_ignore_key"
func getAlbumIgnore(ctx scanner_task.TaskContext) *ignore.GitIgnore {
return ctx.Value(albumIgnoreKey).(*ignore.GitIgnore)
}
func (t IgnorefileTask) BeforeScanAlbum(ctx scanner_task.TaskContext) (scanner_task.TaskContext, error) {
albumIgnore := ignore.CompileIgnoreLines(*ctx.GetCache().GetAlbumIgnore(ctx.GetAlbum().Path)...)
return ctx.WithValue(albumIgnoreKey, albumIgnore), nil
}
func (t IgnorefileTask) MediaFound(ctx scanner_task.TaskContext, fileInfo fs.FileInfo, mediaPath string) (bool, error) {
// Match file against ignore data
if getAlbumIgnore(ctx).MatchesPath(fileInfo.Name()) {
log.Printf("File %s ignored\n", fileInfo.Name())
return true, nil
}
return false, nil
}

View File

@ -0,0 +1,74 @@
package scanner_tasks
import (
"fmt"
"time"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/graphql/notification"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/utils"
)
type NotificationTask struct {
scanner_task.ScannerTaskBase
throttle utils.Throttle
albumKey string
}
func NewNotificationTask() NotificationTask {
notifyThrottle := utils.NewThrottle(500 * time.Millisecond)
notifyThrottle.Trigger(nil)
return NotificationTask{
albumKey: utils.GenerateToken(),
throttle: notifyThrottle,
}
}
func (t NotificationTask) AfterMediaFound(ctx scanner_task.TaskContext, media *models.Media, newMedia bool) error {
if newMedia {
t.throttle.Trigger(func() {
notification.BroadcastNotification(&models.Notification{
Key: t.albumKey,
Type: models.NotificationTypeMessage,
Header: fmt.Sprintf("Found new media in album '%s'", ctx.GetAlbum().Title),
Content: fmt.Sprintf("Found %s", media.Path),
})
})
}
return nil
}
func (t NotificationTask) AfterProcessMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData, updatedURLs []*models.MediaURL, mediaIndex int, mediaTotal int) error {
if len(updatedURLs) > 0 {
progress := float64(mediaIndex) / float64(mediaTotal) * 100.0
notification.BroadcastNotification(&models.Notification{
Key: t.albumKey,
Type: models.NotificationTypeProgress,
Header: fmt.Sprintf("Processing media for album '%s'", ctx.GetAlbum().Title),
Content: fmt.Sprintf("Processed media at %s", mediaData.Media.Path),
Progress: &progress,
})
}
return nil
}
func (t NotificationTask) AfterScanAlbum(ctx scanner_task.TaskContext, changedMedia []*models.Media, albumMedia []*models.Media) error {
if len(changedMedia) > 0 {
timeoutDelay := 2000
notification.BroadcastNotification(&models.Notification{
Key: t.albumKey,
Type: models.NotificationTypeMessage,
Positive: true,
Header: fmt.Sprintf("Done processing media for album '%s'", ctx.GetAlbum().Title),
Content: "All media have been processed",
Timeout: &timeoutDelay,
})
}
return nil
}

View File

@ -0,0 +1,87 @@
package processing_tasks
import (
"io/fs"
"path"
"path/filepath"
"strings"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/media_type"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/scanner/scanner_utils"
"github.com/pkg/errors"
)
type CounterpartFilesTask struct {
scanner_task.ScannerTaskBase
}
func (t CounterpartFilesTask) MediaFound(ctx scanner_task.TaskContext, fileInfo fs.FileInfo, mediaPath string) (skip bool, err error) {
// Skip the JPEGs that are compressed version of raw files
counterpartFile := scanForRawCounterpartFile(mediaPath)
if counterpartFile != nil {
return true, nil
}
return false, nil
}
func (t CounterpartFilesTask) BeforeProcessMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData) (scanner_task.TaskContext, error) {
mediaType, err := ctx.GetCache().GetMediaType(mediaData.Media.Path)
if err != nil {
return ctx, errors.Wrap(err, "scan for counterpart file")
}
if !mediaType.IsRaw() {
return ctx, nil
}
counterpartFile := scanForCompressedCounterpartFile(mediaData.Media.Path)
if counterpartFile != nil {
mediaData.CounterpartPath = counterpartFile
}
return ctx, nil
}
func scanForCompressedCounterpartFile(imagePath string) *string {
ext := filepath.Ext(imagePath)
fileExtType, found := media_type.GetExtensionMediaType(ext)
if found {
if fileExtType.IsBasicTypeSupported() {
return nil
}
}
pathWithoutExt := strings.TrimSuffix(imagePath, path.Ext(imagePath))
for _, ext := range media_type.TypeJpeg.FileExtensions() {
testPath := pathWithoutExt + ext
if scanner_utils.FileExists(testPath) {
return &testPath
}
}
return nil
}
func scanForRawCounterpartFile(imagePath string) *string {
ext := filepath.Ext(imagePath)
fileExtType, found := media_type.GetExtensionMediaType(ext)
if found {
if !fileExtType.IsBasicTypeSupported() {
return nil
}
}
rawPath := media_type.RawCounterpart(imagePath)
if rawPath != nil {
return rawPath
}
return nil
}

View File

@ -0,0 +1,139 @@
package processing_tasks
import (
"fmt"
"log"
"os"
"path"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/media_encoding/media_utils"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/pkg/errors"
// Image decoders
_ "image/gif"
_ "image/png"
_ "golang.org/x/image/bmp"
_ "golang.org/x/image/tiff"
_ "golang.org/x/image/webp"
)
type ProcessPhotoTask struct {
scanner_task.ScannerTaskBase
}
func (t ProcessPhotoTask) ProcessMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData, mediaCachePath string) ([]*models.MediaURL, error) {
if mediaData.Media.Type != models.MediaTypePhoto {
return []*models.MediaURL{}, nil
}
updatedURLs := make([]*models.MediaURL, 0)
photo := mediaData.Media
log.Printf("Processing photo: %s\n", photo.Path)
photoURLFromDB := makePhotoURLChecker(ctx.GetDB(), photo.ID)
// original photo url
origURL, err := photoURLFromDB(models.MediaOriginal)
if err != nil {
return []*models.MediaURL{}, err
}
// Thumbnail
thumbURL, err := photoURLFromDB(models.PhotoThumbnail)
if err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "error processing photo thumbnail")
}
// Highres
highResURL, err := photoURLFromDB(models.PhotoHighRes)
if err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "error processing photo highres")
}
var photoDimensions *media_utils.PhotoDimensions
var baseImagePath string = photo.Path
// Generate high res jpeg
if highResURL == nil {
contentType, err := mediaData.ContentType()
if err != nil {
return []*models.MediaURL{}, err
}
if !contentType.IsWebCompatible() {
highresName := generateUniqueMediaNamePrefixed("highres", photo.Path, ".jpg")
baseImagePath = path.Join(mediaCachePath, highresName)
highRes, err := generateSaveHighResJPEG(ctx.GetDB(), photo, mediaData, highresName, baseImagePath, nil)
if err != nil {
return []*models.MediaURL{}, err
}
updatedURLs = append(updatedURLs, highRes)
}
} else {
// Verify that highres photo still exists in cache
baseImagePath = path.Join(mediaCachePath, highResURL.MediaName)
if _, err := os.Stat(baseImagePath); os.IsNotExist(err) {
fmt.Printf("High-res photo found in database but not in cache, re-encoding photo to cache: %s\n", highResURL.MediaName)
updatedURLs = append(updatedURLs, highResURL)
err = mediaData.EncodeHighRes(baseImagePath)
if err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "creating high-res cached image")
}
}
}
// Save original photo to database
if origURL == nil {
// Make sure photo dimensions is set
if photoDimensions == nil {
photoDimensions, err = media_utils.GetPhotoDimensions(baseImagePath)
if err != nil {
return []*models.MediaURL{}, err
}
}
original, err := saveOriginalPhotoToDB(ctx.GetDB(), photo, mediaData, photoDimensions)
if err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "saving original photo to database")
}
updatedURLs = append(updatedURLs, original)
}
// Save thumbnail to cache
if thumbURL == nil {
thumbnailName := generateUniqueMediaNamePrefixed("thumbnail", photo.Path, ".jpg")
thumbnail, err := generateSaveThumbnailJPEG(ctx.GetDB(), photo, thumbnailName, mediaCachePath, baseImagePath, nil)
if err != nil {
return []*models.MediaURL{}, err
}
updatedURLs = append(updatedURLs, thumbnail)
} else {
// Verify that thumbnail photo still exists in cache
thumbPath := path.Join(mediaCachePath, thumbURL.MediaName)
if _, err := os.Stat(thumbPath); os.IsNotExist(err) {
updatedURLs = append(updatedURLs, thumbURL)
fmt.Printf("Thumbnail photo found in database but not in cache, re-encoding photo to cache: %s\n", thumbURL.MediaName)
_, err := media_encoding.EncodeThumbnail(ctx.GetDB(), baseImagePath, thumbPath)
if err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "could not create thumbnail cached image")
}
}
}
return updatedURLs, nil
}

View File

@ -1,4 +1,4 @@
package scanner
package processing_tasks
import (
"context"
@ -13,54 +13,60 @@ import (
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/media_encoding/executable_worker"
"github.com/photoview/photoview/api/scanner/media_encoding/media_utils"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/utils"
"github.com/pkg/errors"
"gopkg.in/vansante/go-ffprobe.v2"
"gorm.io/gorm"
)
func processVideo(tx *gorm.DB, mediaData *media_encoding.EncodeMediaData, videoCachePath *string) (bool, error) {
type ProcessVideoTask struct {
scanner_task.ScannerTaskBase
}
func (t ProcessVideoTask) ProcessMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData, mediaCachePath string) ([]*models.MediaURL, error) {
if mediaData.Media.Type != models.MediaTypeVideo {
return []*models.MediaURL{}, nil
}
updatedURLs := make([]*models.MediaURL, 0)
video := mediaData.Media
didProcess := false
log.Printf("Processing video: %s", video.Path)
mediaURLFromDB := makePhotoURLChecker(tx, video.ID)
mediaURLFromDB := makePhotoURLChecker(ctx.GetDB(), video.ID)
videoOriginalURL, err := mediaURLFromDB(models.MediaOriginal)
if err != nil {
return false, errors.Wrap(err, "error processing video original format")
return []*models.MediaURL{}, errors.Wrap(err, "error processing video original format")
}
videoWebURL, err := mediaURLFromDB(models.VideoWeb)
if err != nil {
return false, errors.Wrap(err, "error processing video web-format")
return []*models.MediaURL{}, errors.Wrap(err, "error processing video web-format")
}
videoThumbnailURL, err := mediaURLFromDB(models.VideoThumbnail)
if err != nil {
return false, errors.Wrap(err, "error processing video thumbnail")
return []*models.MediaURL{}, errors.Wrap(err, "error processing video thumbnail")
}
videoType, err := mediaData.ContentType()
if err != nil {
return false, errors.Wrap(err, "error getting video content type")
return []*models.MediaURL{}, errors.Wrap(err, "error getting video content type")
}
if videoOriginalURL == nil && videoType.IsWebCompatible() {
didProcess = true
origVideoPath := video.Path
videoMediaName := generateUniqueMediaName(video.Path)
webMetadata, err := readVideoStreamMetadata(origVideoPath)
webMetadata, err := ReadVideoStreamMetadata(origVideoPath)
if err != nil {
return false, errors.Wrapf(err, "failed to read metadata for original video (%s)", video.Title)
return []*models.MediaURL{}, errors.Wrapf(err, "failed to read metadata for original video (%s)", video.Title)
}
fileStats, err := os.Stat(origVideoPath)
if err != nil {
return false, errors.Wrap(err, "reading file stats of original video")
return []*models.MediaURL{}, errors.Wrap(err, "reading file stats of original video")
}
mediaURL := models.MediaURL{
@ -73,35 +79,34 @@ func processVideo(tx *gorm.DB, mediaData *media_encoding.EncodeMediaData, videoC
FileSize: fileStats.Size(),
}
if err := tx.Create(&mediaURL).Error; err != nil {
return false, errors.Wrapf(err, "failed to insert original video into database (%s)", video.Title)
if err := ctx.GetDB().Create(&mediaURL).Error; err != nil {
return []*models.MediaURL{}, errors.Wrapf(err, "insert original video into database (%s)", video.Title)
}
updatedURLs = append(updatedURLs, &mediaURL)
}
if videoWebURL == nil && !videoType.IsWebCompatible() {
didProcess = true
web_video_name := fmt.Sprintf("web_video_%s_%s", path.Base(video.Path), utils.GenerateToken())
web_video_name = strings.ReplaceAll(web_video_name, ".", "_")
web_video_name = strings.ReplaceAll(web_video_name, " ", "_")
web_video_name = web_video_name + ".mp4"
webVideoPath := path.Join(*videoCachePath, web_video_name)
webVideoPath := path.Join(mediaCachePath, web_video_name)
err = executable_worker.FfmpegCli.EncodeMp4(video.Path, webVideoPath)
if err != nil {
return false, errors.Wrapf(err, "could not encode mp4 video (%s)", video.Path)
return []*models.MediaURL{}, errors.Wrapf(err, "could not encode mp4 video (%s)", video.Path)
}
webMetadata, err := readVideoStreamMetadata(webVideoPath)
webMetadata, err := ReadVideoStreamMetadata(webVideoPath)
if err != nil {
return false, errors.Wrapf(err, "failed to read metadata for encoded web-video (%s)", video.Title)
return []*models.MediaURL{}, errors.Wrapf(err, "failed to read metadata for encoded web-video (%s)", video.Title)
}
fileStats, err := os.Stat(webVideoPath)
if err != nil {
return false, errors.Wrap(err, "reading file stats of web-optimized video")
return []*models.MediaURL{}, errors.Wrap(err, "reading file stats of web-optimized video")
}
mediaURL := models.MediaURL{
@ -114,39 +119,39 @@ func processVideo(tx *gorm.DB, mediaData *media_encoding.EncodeMediaData, videoC
FileSize: fileStats.Size(),
}
if err := tx.Create(&mediaURL).Error; err != nil {
return false, errors.Wrapf(err, "failed to insert encoded web-video into database (%s)", video.Title)
if err := ctx.GetDB().Create(&mediaURL).Error; err != nil {
return []*models.MediaURL{}, errors.Wrapf(err, "failed to insert encoded web-video into database (%s)", video.Title)
}
updatedURLs = append(updatedURLs, &mediaURL)
}
probeData, err := mediaData.VideoMetadata()
if err != nil {
return false, err
return []*models.MediaURL{}, err
}
if videoThumbnailURL == nil {
didProcess = true
video_thumb_name := fmt.Sprintf("video_thumb_%s_%s", path.Base(video.Path), utils.GenerateToken())
video_thumb_name = strings.ReplaceAll(video_thumb_name, ".", "_")
video_thumb_name = strings.ReplaceAll(video_thumb_name, " ", "_")
video_thumb_name = video_thumb_name + ".jpg"
thumbImagePath := path.Join(*videoCachePath, video_thumb_name)
thumbImagePath := path.Join(mediaCachePath, video_thumb_name)
err = executable_worker.FfmpegCli.EncodeVideoThumbnail(video.Path, thumbImagePath, probeData)
if err != nil {
return false, errors.Wrapf(err, "failed to generate thumbnail for video (%s)", video.Title)
return []*models.MediaURL{}, errors.Wrapf(err, "failed to generate thumbnail for video (%s)", video.Title)
}
thumbDimensions, err := media_utils.GetPhotoDimensions(thumbImagePath)
if err != nil {
return false, errors.Wrap(err, "get dimensions of video thumbnail image")
return []*models.MediaURL{}, errors.Wrap(err, "get dimensions of video thumbnail image")
}
fileStats, err := os.Stat(thumbImagePath)
if err != nil {
return false, errors.Wrap(err, "reading file stats of video thumbnail")
return []*models.MediaURL{}, errors.Wrap(err, "reading file stats of video thumbnail")
}
thumbMediaURL := models.MediaURL{
@ -159,46 +164,48 @@ func processVideo(tx *gorm.DB, mediaData *media_encoding.EncodeMediaData, videoC
FileSize: fileStats.Size(),
}
if err := tx.Create(&thumbMediaURL).Error; err != nil {
return false, errors.Wrapf(err, "failed to insert video thumbnail image into database (%s)", video.Title)
if err := ctx.GetDB().Create(&thumbMediaURL).Error; err != nil {
return []*models.MediaURL{}, errors.Wrapf(err, "failed to insert video thumbnail image into database (%s)", video.Title)
}
updatedURLs = append(updatedURLs, &thumbMediaURL)
} else {
// Verify that video thumbnail still exists in cache
thumbImagePath := path.Join(*videoCachePath, videoThumbnailURL.MediaName)
thumbImagePath := path.Join(mediaCachePath, videoThumbnailURL.MediaName)
if _, err := os.Stat(thumbImagePath); os.IsNotExist(err) {
fmt.Printf("Video thumbnail found in database but not in cache, re-encoding photo to cache: %s\n", videoThumbnailURL.MediaName)
didProcess = true
updatedURLs = append(updatedURLs, videoThumbnailURL)
err = executable_worker.FfmpegCli.EncodeVideoThumbnail(video.Path, thumbImagePath, probeData)
if err != nil {
return false, errors.Wrapf(err, "failed to generate thumbnail for video (%s)", video.Title)
return []*models.MediaURL{}, errors.Wrapf(err, "failed to generate thumbnail for video (%s)", video.Title)
}
thumbDimensions, err := media_utils.GetPhotoDimensions(thumbImagePath)
if err != nil {
return false, errors.Wrap(err, "get dimensions of video thumbnail image")
return []*models.MediaURL{}, errors.Wrap(err, "get dimensions of video thumbnail image")
}
fileStats, err := os.Stat(thumbImagePath)
if err != nil {
return false, errors.Wrap(err, "reading file stats of video thumbnail")
return []*models.MediaURL{}, errors.Wrap(err, "reading file stats of video thumbnail")
}
videoThumbnailURL.Width = thumbDimensions.Width
videoThumbnailURL.Height = thumbDimensions.Height
videoThumbnailURL.FileSize = fileStats.Size()
if err := tx.Save(videoThumbnailURL).Error; err != nil {
return false, errors.Wrap(err, "updating video thumbnail url in database after re-encoding")
if err := ctx.GetDB().Save(videoThumbnailURL).Error; err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "updating video thumbnail url in database after re-encoding")
}
}
}
return didProcess, nil
return updatedURLs, nil
}
func readVideoMetadata(videoPath string) (*ffprobe.ProbeData, error) {
func ReadVideoMetadata(videoPath string) (*ffprobe.ProbeData, error) {
ctx, cancelFn := context.WithTimeout(context.Background(), 5*time.Second)
defer cancelFn()
@ -210,8 +217,8 @@ func readVideoMetadata(videoPath string) (*ffprobe.ProbeData, error) {
return data, nil
}
func readVideoStreamMetadata(videoPath string) (*ffprobe.Stream, error) {
data, err := readVideoMetadata(videoPath)
func ReadVideoStreamMetadata(videoPath string) (*ffprobe.Stream, error) {
data, err := ReadVideoMetadata(videoPath)
if err != nil {
return nil, errors.Wrap(err, "read video stream metadata")
}

View File

@ -0,0 +1,98 @@
package processing_tasks
import (
"os"
"path"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/media_encoding/media_utils"
"github.com/pkg/errors"
"gorm.io/gorm"
)
func generateSaveHighResJPEG(tx *gorm.DB, media *models.Media, imageData *media_encoding.EncodeMediaData, highres_name string, imagePath string, mediaURL *models.MediaURL) (*models.MediaURL, error) {
err := imageData.EncodeHighRes(imagePath)
if err != nil {
return nil, errors.Wrap(err, "creating high-res cached image")
}
photoDimensions, err := media_utils.GetPhotoDimensions(imagePath)
if err != nil {
return nil, err
}
fileStats, err := os.Stat(imagePath)
if err != nil {
return nil, errors.Wrap(err, "reading file stats of highres photo")
}
if mediaURL == nil {
mediaURL = &models.MediaURL{
MediaID: media.ID,
MediaName: highres_name,
Width: photoDimensions.Width,
Height: photoDimensions.Height,
Purpose: models.PhotoHighRes,
ContentType: "image/jpeg",
FileSize: fileStats.Size(),
}
if err := tx.Create(&mediaURL).Error; err != nil {
return nil, errors.Wrapf(err, "could not insert highres media url (%d, %s)", media.ID, highres_name)
}
} else {
mediaURL.Width = photoDimensions.Width
mediaURL.Height = photoDimensions.Height
mediaURL.FileSize = fileStats.Size()
if err := tx.Save(&mediaURL).Error; err != nil {
return nil, errors.Wrapf(err, "could not update media url after side car changes (%d, %s)", media.ID, highres_name)
}
}
return mediaURL, nil
}
func generateSaveThumbnailJPEG(tx *gorm.DB, media *models.Media, thumbnail_name string, photoCachePath string, baseImagePath string, mediaURL *models.MediaURL) (*models.MediaURL, error) {
thumbOutputPath := path.Join(photoCachePath, thumbnail_name)
thumbSize, err := media_encoding.EncodeThumbnail(tx, baseImagePath, thumbOutputPath)
if err != nil {
return nil, errors.Wrap(err, "could not create thumbnail cached image")
}
fileStats, err := os.Stat(thumbOutputPath)
if err != nil {
return nil, errors.Wrap(err, "reading file stats of thumbnail photo")
}
if mediaURL == nil {
mediaURL = &models.MediaURL{
MediaID: media.ID,
MediaName: thumbnail_name,
Width: thumbSize.Width,
Height: thumbSize.Height,
Purpose: models.PhotoThumbnail,
ContentType: "image/jpeg",
FileSize: fileStats.Size(),
}
if err := tx.Create(&mediaURL).Error; err != nil {
return nil, errors.Wrapf(err, "could not insert thumbnail media url (%d, %s)", media.ID, thumbnail_name)
}
} else {
mediaURL.Width = thumbSize.Width
mediaURL.Height = thumbSize.Height
mediaURL.FileSize = fileStats.Size()
if err := tx.Save(&mediaURL).Error; err != nil {
return nil, errors.Wrapf(err, "could not update media url after side car changes (%d, %s)", media.ID, thumbnail_name)
}
}
return mediaURL, nil
}

View File

@ -0,0 +1,82 @@
package processing_tasks
import (
"fmt"
"os"
"path"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/media_encoding/media_utils"
"github.com/photoview/photoview/api/utils"
"github.com/pkg/errors"
"gorm.io/gorm"
)
// Higher order function used to check if MediaURL for a given MediaPurpose exists
func makePhotoURLChecker(tx *gorm.DB, mediaID int) func(purpose models.MediaPurpose) (*models.MediaURL, error) {
return func(purpose models.MediaPurpose) (*models.MediaURL, error) {
var mediaURL []*models.MediaURL
result := tx.Where("purpose = ?", purpose).Where("media_id = ?", mediaID).Find(&mediaURL)
if result.Error != nil {
return nil, result.Error
}
if result.RowsAffected > 0 {
return mediaURL[0], nil
}
return nil, nil
}
}
func generateUniqueMediaNamePrefixed(prefix string, mediaPath string, extension string) string {
mediaName := fmt.Sprintf("%s_%s_%s", prefix, path.Base(mediaPath), utils.GenerateToken())
mediaName = models.SanitizeMediaName(mediaName)
mediaName = mediaName + extension
return mediaName
}
func generateUniqueMediaName(mediaPath string) string {
filename := path.Base(mediaPath)
baseName := filename[0 : len(filename)-len(path.Ext(filename))]
baseExt := path.Ext(filename)
mediaName := fmt.Sprintf("%s_%s", baseName, utils.GenerateToken())
mediaName = models.SanitizeMediaName(mediaName) + baseExt
return mediaName
}
func saveOriginalPhotoToDB(tx *gorm.DB, photo *models.Media, imageData *media_encoding.EncodeMediaData, photoDimensions *media_utils.PhotoDimensions) (*models.MediaURL, error) {
originalImageName := generateUniqueMediaName(photo.Path)
contentType, err := imageData.ContentType()
if err != nil {
return nil, err
}
fileStats, err := os.Stat(photo.Path)
if err != nil {
return nil, errors.Wrap(err, "reading file stats of original photo")
}
mediaURL := models.MediaURL{
Media: photo,
MediaName: originalImageName,
Width: photoDimensions.Width,
Height: photoDimensions.Height,
Purpose: models.MediaOriginal,
ContentType: string(*contentType),
FileSize: fileStats.Size(),
}
if err := tx.Create(&mediaURL).Error; err != nil {
return nil, errors.Wrapf(err, "inserting original photo url: %d, %s", photo.ID, photo.Title)
}
return &mediaURL, nil
}

View File

@ -0,0 +1,159 @@
package processing_tasks
import (
"crypto/md5"
"encoding/hex"
"fmt"
"io"
"log"
"os"
"path"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/scanner/scanner_utils"
"github.com/pkg/errors"
)
type SidecarTask struct {
scanner_task.ScannerTaskBase
}
func (t SidecarTask) AfterMediaFound(ctx scanner_task.TaskContext, media *models.Media, newMedia bool) error {
if media.Type != models.MediaTypePhoto || !newMedia {
return nil
}
mediaType, err := ctx.GetCache().GetMediaType(media.Path)
if err != nil {
return errors.Wrap(err, "scan for sidecar file")
}
if !mediaType.IsRaw() {
return nil
}
var sideCarPath *string = nil
var sideCarHash *string = nil
sideCarPath = scanForSideCarFile(media.Path)
if sideCarPath != nil {
sideCarHash = hashSideCarFile(sideCarPath)
}
// Add sidecar data to media
media.SideCarPath = sideCarPath
media.SideCarHash = sideCarHash
if err := ctx.GetDB().Save(media).Error; err != nil {
return errors.Wrapf(err, "update media sidecar info (%s)", *sideCarPath)
}
return nil
}
func (t SidecarTask) ProcessMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData, mediaCachePath string) (updatedURLs []*models.MediaURL, err error) {
mediaType, err := mediaData.ContentType()
if err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "sidecar task, process media")
}
if !mediaType.IsRaw() {
return []*models.MediaURL{}, nil
}
photo := mediaData.Media
sideCarFileHasChanged := false
var currentFileHash *string
currentSideCarPath := scanForSideCarFile(photo.Path)
if currentSideCarPath != nil {
currentFileHash = hashSideCarFile(currentSideCarPath)
if photo.SideCarHash == nil || *photo.SideCarHash != *currentFileHash {
sideCarFileHasChanged = true
}
} else if photo.SideCarPath != nil { // sidecar has been deleted since last scan
sideCarFileHasChanged = true
}
if !sideCarFileHasChanged {
return []*models.MediaURL{}, nil
}
fmt.Printf("Detected changed sidecar file for %s recreating JPG's to reflect changes\n", photo.Path)
highResURL, err := photo.GetHighRes()
if err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "sidecar task, get high-res media_url")
}
thumbURL, err := photo.GetThumbnail()
if err != nil {
return []*models.MediaURL{}, errors.Wrap(err, "sidecar task, get high-res media_url")
}
// update high res image may be cropped so dimentions and file size can change
baseImagePath := path.Join(mediaCachePath, highResURL.MediaName) // update base image path for thumbnail
tempHighResPath := baseImagePath + ".hold"
os.Rename(baseImagePath, tempHighResPath)
updatedHighRes, err := generateSaveHighResJPEG(ctx.GetDB(), photo, mediaData, highResURL.MediaName, baseImagePath, highResURL)
if err != nil {
os.Rename(tempHighResPath, baseImagePath)
return []*models.MediaURL{}, errors.Wrap(err, "sidecar task, recreating high-res cached image")
}
os.Remove(tempHighResPath)
// update thumbnail image may be cropped so dimentions and file size can change
thumbPath := path.Join(mediaCachePath, thumbURL.MediaName)
tempThumbPath := thumbPath + ".hold" // hold onto the original image incase for some reason we fail to recreate one with the new settings
os.Rename(thumbPath, tempThumbPath)
updatedThumbnail, err := generateSaveThumbnailJPEG(ctx.GetDB(), photo, thumbURL.MediaName, mediaCachePath, baseImagePath, thumbURL)
if err != nil {
os.Rename(tempThumbPath, thumbPath)
return []*models.MediaURL{}, errors.Wrap(err, "recreating thumbnail cached image")
}
os.Remove(tempThumbPath)
photo.SideCarHash = currentFileHash
photo.SideCarPath = currentSideCarPath
// save new side car hash
if err := ctx.GetDB().Save(&photo).Error; err != nil {
return []*models.MediaURL{}, errors.Wrapf(err, "could not update side car hash for media: %s", photo.Path)
}
return []*models.MediaURL{
updatedThumbnail,
updatedHighRes,
}, nil
}
func scanForSideCarFile(path string) *string {
testPath := path + ".xmp"
if scanner_utils.FileExists(testPath) {
return &testPath
}
return nil
}
func hashSideCarFile(path *string) *string {
if path == nil {
return nil
}
f, err := os.Open(*path)
if err != nil {
log.Printf("ERROR: %s", err)
}
defer f.Close()
h := md5.New()
if _, err := io.Copy(h, f); err != nil {
log.Printf("ERROR: %s", err)
}
hash := hex.EncodeToString(h.Sum(nil))
return &hash
}

View File

@ -0,0 +1,143 @@
package scanner_tasks
import (
"io/fs"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/media_encoding"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/scanner/scanner_tasks/cleanup_tasks"
"github.com/photoview/photoview/api/scanner/scanner_tasks/processing_tasks"
)
var allTasks []scanner_task.ScannerTask = []scanner_task.ScannerTask{
NotificationTask{},
IgnorefileTask{},
processing_tasks.CounterpartFilesTask{},
processing_tasks.SidecarTask{},
processing_tasks.ProcessPhotoTask{},
processing_tasks.ProcessVideoTask{},
FaceDetectionTask{},
ExifTask{},
VideoMetadataTask{},
cleanup_tasks.MediaCleanupTask{},
}
type scannerTasks struct {
scanner_task.ScannerTaskBase
}
var Tasks scannerTasks = scannerTasks{}
func simpleCombinedTasks(ctx scanner_task.TaskContext, doTask func(ctx scanner_task.TaskContext, task scanner_task.ScannerTask) error) error {
for _, task := range allTasks {
select {
case <-ctx.Done():
return ctx.Err()
default:
}
err := doTask(ctx, task)
if err != nil {
return err
}
}
return nil
}
func (t scannerTasks) BeforeScanAlbum(ctx scanner_task.TaskContext) (scanner_task.TaskContext, error) {
for _, task := range allTasks {
var err error
ctx, err = task.BeforeScanAlbum(ctx)
if err != nil {
return ctx, err
}
select {
case <-ctx.Done():
return ctx, ctx.Err()
default:
}
}
return ctx, nil
}
func (t scannerTasks) MediaFound(ctx scanner_task.TaskContext, fileInfo fs.FileInfo, mediaPath string) (bool, error) {
for _, task := range allTasks {
select {
case <-ctx.Done():
return false, ctx.Err()
default:
}
skip, err := task.MediaFound(ctx, fileInfo, mediaPath)
if err != nil {
return false, err
}
if skip {
return true, nil
}
}
return false, nil
}
func (t scannerTasks) AfterScanAlbum(ctx scanner_task.TaskContext, changedMedia []*models.Media, albumMedia []*models.Media) error {
return simpleCombinedTasks(ctx, func(ctx scanner_task.TaskContext, task scanner_task.ScannerTask) error {
return task.AfterScanAlbum(ctx, changedMedia, albumMedia)
})
}
func (t scannerTasks) AfterMediaFound(ctx scanner_task.TaskContext, media *models.Media, newMedia bool) error {
return simpleCombinedTasks(ctx, func(ctx scanner_task.TaskContext, task scanner_task.ScannerTask) error {
return task.AfterMediaFound(ctx, media, newMedia)
})
}
func (t scannerTasks) BeforeProcessMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData) (scanner_task.TaskContext, error) {
for _, task := range allTasks {
select {
case <-ctx.Done():
return ctx, ctx.Err()
default:
}
var err error
ctx, err = task.BeforeProcessMedia(ctx, mediaData)
if err != nil {
return ctx, err
}
}
return ctx, nil
}
func (t scannerTasks) ProcessMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData, mediaCachePath string) ([]*models.MediaURL, error) {
allNewMedia := make([]*models.MediaURL, 0)
for _, task := range allTasks {
select {
case <-ctx.Done():
return nil, ctx.Err()
default:
}
newMedia, err := task.ProcessMedia(ctx, mediaData, mediaCachePath)
if err != nil {
return []*models.MediaURL{}, err
}
allNewMedia = append(allNewMedia, newMedia...)
}
return allNewMedia, nil
}
func (t scannerTasks) AfterProcessMedia(ctx scanner_task.TaskContext, mediaData *media_encoding.EncodeMediaData, updatedURLs []*models.MediaURL, mediaIndex int, mediaTotal int) error {
return simpleCombinedTasks(ctx, func(ctx scanner_task.TaskContext, task scanner_task.ScannerTask) error {
return task.AfterProcessMedia(ctx, mediaData, updatedURLs, mediaIndex, mediaTotal)
})
}

View File

@ -1,18 +1,39 @@
package scanner
package scanner_tasks
import (
"fmt"
"log"
"strconv"
"strings"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/scanner_task"
"github.com/photoview/photoview/api/scanner/scanner_tasks/processing_tasks"
"github.com/pkg/errors"
"gorm.io/gorm"
)
type VideoMetadataTask struct {
scanner_task.ScannerTaskBase
}
func (t VideoMetadataTask) AfterMediaFound(ctx scanner_task.TaskContext, media *models.Media, newMedia bool) error {
if !newMedia || media.Type != models.MediaTypeVideo {
return nil
}
err := ScanVideoMetadata(ctx.GetDB(), media)
if err != nil {
log.Printf("WARN: ScanVideoMetadata for %s failed: %s\n", media.Title, err)
}
return nil
}
func ScanVideoMetadata(tx *gorm.DB, video *models.Media) error {
data, err := readVideoMetadata(video.Path)
data, err := processing_tasks.ReadVideoMetadata(video.Path)
if err != nil {
return errors.Wrapf(err, "scan video metadata failed (%s)", video.Title)
}

View File

@ -10,6 +10,7 @@ import (
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner/scanner_cache"
"github.com/photoview/photoview/api/scanner/scanner_tasks/cleanup_tasks"
"github.com/photoview/photoview/api/scanner/scanner_utils"
"github.com/photoview/photoview/api/utils"
"github.com/pkg/errors"
@ -42,7 +43,7 @@ func getPhotoviewIgnore(ignorePath string) ([]string, error) {
return photoviewIgnore, scanner.Err()
}
func findAlbumsForUser(db *gorm.DB, user *models.User, album_cache *scanner_cache.AlbumScannerCache) ([]*models.Album, []error) {
func FindAlbumsForUser(db *gorm.DB, user *models.User, album_cache *scanner_cache.AlbumScannerCache) ([]*models.Album, []error) {
if err := user.FillAlbums(db); err != nil {
return nil, []error{err}
@ -215,7 +216,7 @@ func findAlbumsForUser(db *gorm.DB, user *models.User, album_cache *scanner_cach
}
}
deleteErrors := deleteOldUserAlbums(db, userAlbums, user)
deleteErrors := cleanup_tasks.DeleteOldUserAlbums(db, userAlbums, user)
scanErrors = append(scanErrors, deleteErrors...)
return userAlbums, scanErrors

View File

@ -15,10 +15,11 @@ import (
"github.com/photoview/photoview/api/graphql/auth"
graphql_endpoint "github.com/photoview/photoview/api/graphql/endpoint"
"github.com/photoview/photoview/api/routes"
"github.com/photoview/photoview/api/scanner"
"github.com/photoview/photoview/api/scanner/exif"
"github.com/photoview/photoview/api/scanner/face_detection"
"github.com/photoview/photoview/api/scanner/media_encoding/executable_worker"
"github.com/photoview/photoview/api/scanner/periodic_scanner"
"github.com/photoview/photoview/api/scanner/scanner_queue"
"github.com/photoview/photoview/api/server"
"github.com/photoview/photoview/api/utils"
@ -45,11 +46,11 @@ func main() {
log.Panicf("Could not migrate database: %s\n", err)
}
if err := scanner.InitializeScannerQueue(db); err != nil {
if err := scanner_queue.InitializeScannerQueue(db); err != nil {
log.Panicf("Could not initialize scanner queue: %s\n", err)
}
if err := scanner.InitializePeriodicScanner(db); err != nil {
if err := periodic_scanner.InitializePeriodicScanner(db); err != nil {
log.Panicf("Could not initialize periodic scanner: %s", err)
}

View File

@ -4,33 +4,33 @@ import (
"testing"
"github.com/photoview/photoview/api/graphql/models"
"github.com/photoview/photoview/api/scanner"
"github.com/photoview/photoview/api/scanner/scanner_queue"
"github.com/stretchr/testify/assert"
"gorm.io/gorm"
)
func RunScannerOnUser(t *testing.T, db *gorm.DB, user *models.User) {
if !assert.NoError(t, scanner.InitializeScannerQueue(db)) {
if !assert.NoError(t, scanner_queue.InitializeScannerQueue(db)) {
return
}
if !assert.NoError(t, scanner.AddUserToQueue(user)) {
if !assert.NoError(t, scanner_queue.AddUserToQueue(user)) {
return
}
// wait for all jobs to finish
scanner.CloseScannerQueue()
scanner_queue.CloseScannerQueue()
}
func RunScannerAll(t *testing.T, db *gorm.DB) {
if !assert.NoError(t, scanner.InitializeScannerQueue(db)) {
if !assert.NoError(t, scanner_queue.InitializeScannerQueue(db)) {
return
}
if !assert.NoError(t, scanner.AddAllToQueue()) {
if !assert.NoError(t, scanner_queue.AddAllToQueue()) {
return
}
// wait for all jobs to finish
scanner.CloseScannerQueue()
scanner_queue.CloseScannerQueue()
}

58
api/utils/media_cache.go Normal file
View File

@ -0,0 +1,58 @@
package utils
import (
"os"
"path"
"strconv"
"github.com/pkg/errors"
)
// CachePathForMedia is a low-level implementation for Media.CachePath()
func CachePathForMedia(albumID int, mediaID int) (string, error) {
// Make root cache dir if not exists
if _, err := os.Stat(MediaCachePath()); os.IsNotExist(err) {
if err := os.Mkdir(MediaCachePath(), os.ModePerm); err != nil {
return "", errors.Wrap(err, "could not make root image cache directory")
}
}
// Make album cache dir if not exists
albumCachePath := path.Join(MediaCachePath(), strconv.Itoa(int(albumID)))
if _, err := os.Stat(albumCachePath); os.IsNotExist(err) {
if err := os.Mkdir(albumCachePath, os.ModePerm); err != nil {
return "", errors.Wrap(err, "could not make album image cache directory")
}
}
// Make photo cache dir if not exists
photoCachePath := path.Join(albumCachePath, strconv.Itoa(int(mediaID)))
if _, err := os.Stat(photoCachePath); os.IsNotExist(err) {
if err := os.Mkdir(photoCachePath, os.ModePerm); err != nil {
return "", errors.Wrap(err, "could not make photo image cache directory")
}
}
return photoCachePath, nil
}
var test_cache_path string = ""
func ConfigureTestCache(tmp_dir string) {
test_cache_path = tmp_dir
}
// MediaCachePath returns the path for where the media cache is located on the file system
func MediaCachePath() string {
if test_cache_path != "" {
return test_cache_path
}
photoCache := EnvMediaCachePath.GetValue()
if photoCache == "" {
photoCache = "./media_cache"
}
return photoCache
}

View File

@ -47,26 +47,6 @@ func HandleError(message string, err error) PhotoviewError {
}
}
var test_cache_path string = ""
func ConfigureTestCache(tmp_dir string) {
test_cache_path = tmp_dir
}
// MediaCachePath returns the path for where the media cache is located on the file system
func MediaCachePath() string {
if test_cache_path != "" {
return test_cache_path
}
photoCache := EnvMediaCachePath.GetValue()
if photoCache == "" {
photoCache = "./media_cache"
}
return photoCache
}
var test_face_recognition_models_path string = ""
func ConfigureTestFaceRecognitionModelsPath(path string) {

3
ui/.eslintignore Normal file
View File

@ -0,0 +1,3 @@
node_modules
dist
coverage

View File

@ -1,3 +1,5 @@
/* global __dirname */
module.exports = {
root: true,
parser: '@typescript-eslint/parser',
@ -11,6 +13,7 @@ module.exports = {
'plugin:react/recommended',
'plugin:@typescript-eslint/eslint-recommended',
'plugin:@typescript-eslint/recommended',
'plugin:@typescript-eslint/recommended-requiring-type-checking',
'prettier',
],
globals: {
@ -21,6 +24,8 @@ module.exports = {
require: 'readonly',
},
parserOptions: {
tsconfigRootDir: __dirname,
project: ['./tsconfig.json'],
ecmaFeatures: {
jsx: true,
},
@ -41,30 +46,13 @@ module.exports = {
version: 'detect',
},
},
// parser: 'babel-eslint',
overrides: [
Object.assign(require('eslint-plugin-jest').configs.recommended, {
files: ['**/*.test.js', '**/*.test.ts', '**/*.test.tsx'],
env: { jest: true },
plugins: ['jest', 'jest-dom'],
rules: Object.assign(
require('eslint-plugin-jest').configs.recommended.rules,
{
'no-import-assign': 'off',
'react/prop-types': 'off',
'jest/valid-title': 'off',
}
),
settings: {
jest: {
version: 26,
},
},
}),
{
files: ['**/*.js'],
files: ['**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx'],
rules: {
'@typescript-eslint/explicit-module-boundary-types': 'off',
'@typescript-eslint/no-floating-promises': 'off',
'@typescript-eslint/no-misused-promises': 'off',
},
},
],

View File

@ -1,7 +0,0 @@
module.exports = {
style: {
postcss: {
plugins: [require('tailwindcss'), require('autoprefixer')],
},
},
}

22
ui/index.html Normal file
View File

@ -0,0 +1,22 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="icon" href="/photoview-logo.svg" type="image/svg+xml" />
<link rel="apple-touch-icon" href="/logo192.png" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="theme-color" content="#000000" />
<link rel="manifest" href="/manifest.json" />
<meta name="apple-mobile-web-app-title" content="Photoview" />
<meta name="apple-mobile-web-app-capable" content="yes" />
<meta name="apple-mobile-web-app-status-bar-style" content="white" />
</head>
<body>
<noscript>You need to enable JavaScript to run Photoview.</noscript>
<div id="root"></div>
<script type="module" src="/src/index.tsx"></script>
</body>
</html>

37226
ui/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -8,82 +8,77 @@
},
"license": "GPL-3.0",
"description": "UI app for Photoview",
"scripts": {
"start": "vite",
"build": "vite build",
"lint": "eslint ./src --max-warnings 0 --config .eslintrc.js",
"test": "vitest",
"test:ci": "CI=true vitest --reporter verbose --run --coverage",
"genSchemaTypes": "apollo client:codegen --target=typescript --globalTypesFile=src/__generated__/globalTypes.ts --passthroughCustomScalars && prettier --write */**/__generated__/*.ts",
"extractTranslations": "i18next -c i18next-parser.config.js",
"prepare": "(cd .. && ./ui/node_modules/.bin/husky install)"
},
"dependencies": {
"@apollo/client": "^3.5.8",
"@babel/preset-typescript": "^7.16.7",
"@craco/craco": "^6.4.3",
"@headlessui/react": "^1.4.3",
"@react-aria/focus": "^3.5.0",
"@rollup/plugin-babel": "^5.3.0",
"@apollo/client": "^3.6.9",
"@babel/preset-typescript": "^7.18.6",
"@headlessui/react": "^1.6.6",
"@types/geojson": "^7946.0.8",
"@types/jest": "^27.4.0",
"@types/mapbox-gl": "^2.6.0",
"@types/react": "^17.0.38",
"@types/react-dom": "^17.0.11",
"@types/jest": "^28.1.4",
"@types/mapbox-gl": "^2.7.3",
"@types/react": "^18.0.15",
"@types/react-dom": "^18.0.6",
"@types/react-helmet": "^6.1.5",
"@types/react-router-dom": "^5.3.3",
"@types/styled-components": "^5.1.21",
"@types/styled-components": "^5.1.25",
"@types/url-join": "^4.0.1",
"autoprefixer": "^9.8.6",
"babel-plugin-graphql-tag": "^3.3.0",
"blurhash": "^1.1.4",
"@vitejs/plugin-react": "^1.3.2",
"autoprefixer": "^10.4.7",
"blurhash": "^1.1.5",
"classnames": "^2.3.1",
"connect-history-api-fallback": "^1.6.0",
"connect-history-api-fallback": "^2.0.0",
"copy-to-clipboard": "^3.3.1",
"eslint-config-prettier": "^8.3.0",
"eslint-plugin-jest-dom": "^4.0.1",
"fs-extra": "^10.0.0",
"i18next": "^21.6.10",
"mapbox-gl": "^2.6.1",
"postcss": "^7.0.36",
"prettier": "^2.5.1",
"prop-types": "^15.8.1",
"react": "^17.0.2",
"i18next": "^21.8.13",
"mapbox-gl": "^2.9.1",
"postcss": "^8.4.14",
"prettier": "^2.7.1",
"react": "^18.2.0",
"react-blurhash": "^0.1.3",
"react-dom": "^17.0.2",
"react-dom": "^18.2.0",
"react-helmet": "^6.1.0",
"react-hook-form": "^7.25.3",
"react-i18next": "^11.15.3",
"react-router-dom": "^6.2.1",
"react-router-prop-types": "^1.0.5",
"react-scripts": "^4.0.3",
"react-spring": "^8.0.27",
"react-test-renderer": "^17.0.2",
"styled-components": "^5.3.3",
"react-hook-form": "^7.33.1",
"react-i18next": "^11.18.0",
"react-router-dom": "^6.3.0",
"react-scripts": "^5.0.1",
"react-swipeable": "^6.1.0",
"react-test-renderer": "^18.2.0",
"styled-components": "^5.3.5",
"subscriptions-transport-ws": "^0.11.0",
"tailwind-override": "^0.6.1",
"tailwindcss": "npm:@tailwindcss/postcss7-compat@^2.2.17",
"typescript": "^4.5.5",
"react-swipeable": "^6.1.0",
"url-join": "^4.0.1"
},
"scripts": {
"start": "BROWSER=none PORT=1234 craco start",
"build": "craco build",
"test": "npm run lint && npm run jest -- --watchAll=false",
"test:ci": "npm run lint && npm run jest:ci",
"lint": "npm run lint:types & npm run lint:eslint",
"lint:eslint": "eslint ./src --max-warnings 0 --cache --config .eslintrc.js",
"lint:types": "tsc --noemit",
"jest": "craco test --setupFilesAfterEnv ./testing/setupTests.ts",
"jest:ci": "CI=true craco test --setupFilesAfterEnv ./testing/setupTests.ts --verbose --ci --coverage",
"genSchemaTypes": "apollo client:codegen --target=typescript --globalTypesFile=src/__generated__/globalTypes.ts && prettier --write */**/__generated__/*.ts",
"extractTranslations": "i18next -c i18next-parser.config.js",
"prepare": "(cd .. && npx husky install)"
"typescript": "^4.7.4",
"url-join": "^5.0.0",
"vite": "^2.9.13",
"vite-plugin-svgr": "^2.2.0"
},
"devDependencies": {
"@testing-library/jest-dom": "^5.16.1",
"@testing-library/react": "^12.1.2",
"@testing-library/user-event": "^13.5.0",
"apollo": "2.33.9",
"apollo-language-server": "1.26.7",
"husky": "^7.0.4",
"i18next-parser": "^5.4.0",
"lint-staged": "^12.3.2",
"tsc-files": "1.1.2"
"@testing-library/jest-dom": "^5.16.4",
"@testing-library/react": "^13.3.0",
"@testing-library/user-event": "^14.2.1",
"@typescript-eslint/eslint-plugin": "^5.30.6",
"@typescript-eslint/parser": "^5.30.6",
"@vitest/ui": "^0.17.1",
"apollo": "2.34.0",
"apollo-language-server": "1.26.9",
"c8": "^7.11.3",
"eslint": "^8.19.0",
"eslint-config-prettier": "^8.5.0",
"husky": "^8.0.1",
"i18next-parser": "^6.5.0",
"lint-staged": "^13.0.3",
"vitest": "^0.17.1"
},
"overrides": {
"graphql": "^15.0.0"
"graphql": "^15.5.0"
},
"prettier": {
"trailingComma": "es5",
@ -94,8 +89,7 @@
},
"lint-staged": {
"*.{ts,tsx,js,json,css,md,graphql}": "prettier --write",
"*.{js,ts,tsx}": "eslint --cache --fix --max-warnings 0",
"*.{ts,tsx}": "tsc-files --noEmit"
"*.{js,ts,tsx}": "eslint --cache --fix --max-warnings 0"
},
"browserslist": {
"production": [

6
ui/postcss.config.js Normal file
View File

@ -0,0 +1,6 @@
module.exports = {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
}

View File

@ -1,42 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="icon" href="%PUBLIC_URL%/photoview-logo.svg" type="image/svg+xml" />
<link rel="apple-touch-icon" href="%PUBLIC_URL%/logo192.png" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="theme-color" content="#000000" />
<!--
manifest.json provides metadata used when your web app is installed on a
user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/
-->
<link rel="manifest" href="%PUBLIC_URL%/manifest.json" />
<meta name="apple-mobile-web-app-title" content="Photoview" />
<meta name="apple-mobile-web-app-capable" content="yes" />
<meta name="apple-mobile-web-app-status-bar-style" content="white" />
<!--
Notice the use of %PUBLIC_URL% in the tags above.
It will be replaced with the URL of the `public` folder during the build.
Only files inside the `public` folder can be referenced from the HTML.
Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will
work correctly both with client-side routing and a non-root public URL.
Learn how to configure a non-root public URL by running `npm run build`.
-->
</head>
<body>
<noscript>You need to enable JavaScript to run Photoview.</noscript>
<div id="root"></div>
<!--
This HTML file is a template.
If you open it directly in the browser, you will see an empty page.
You can add webfonts, meta tags, or analytics to this file.
The build step will place the bundled scripts into the <body> tag.
To begin the development, run `npm start` or `yarn start`.
To create a production bundle, use `npm run build` or `yarn build`.
-->
</body>
</html>

View File

@ -5,3 +5,16 @@ declare module '*.svg' {
export { ReactComponent }
// export default content
}
interface ImportMetaEnv {
readonly REACT_APP_BUILD_VERSION: string | undefined
readonly REACT_APP_BUILD_DATE: string | undefined
readonly REACT_APP_BUILD_COMMIT_SHA: string | undefined
}
interface ImportMeta {
readonly env: ImportMetaEnv
}
type Time = string
type Any = object

View File

@ -4,7 +4,7 @@ import React from 'react'
import { MemoryRouter, Route, Routes } from 'react-router-dom'
import AlbumPage from './AlbumPage'
jest.mock('../../hooks/useScrollPagination')
vi.mock('../../hooks/useScrollPagination')
test('AlbumPage renders', () => {
render(

View File

@ -1,6 +1,8 @@
import React, { useCallback } from 'react'
import { useQuery, gql } from '@apollo/client'
import AlbumGallery from '../../components/albumGallery/AlbumGallery'
import AlbumGallery, {
ALBUM_GALLERY_FRAGMENT,
} from '../../components/albumGallery/AlbumGallery'
import Layout from '../../components/layout/Layout'
import useURLParameters from '../../hooks/useURLParameters'
import useScrollPagination from '../../hooks/useScrollPagination'
@ -12,6 +14,8 @@ import { useParams } from 'react-router-dom'
import { isNil } from '../../helpers/utils'
const ALBUM_QUERY = gql`
${ALBUM_GALLERY_FRAGMENT}
query albumQuery(
$id: ID!
$onlyFavorites: Boolean
@ -21,41 +25,7 @@ const ALBUM_QUERY = gql`
$offset: Int
) {
album(id: $id) {
id
title
subAlbums(
order: { order_by: "title", order_direction: $orderDirection }
) {
id
title
thumbnail {
id
thumbnail {
url
}
}
}
media(
paginate: { limit: $limit, offset: $offset }
order: { order_by: $mediaOrderBy, order_direction: $orderDirection }
onlyFavorites: $onlyFavorites
) {
id
type
blurhash
thumbnail {
url
width
height
}
highRes {
url
}
videoWeb {
url
}
favorite
}
...AlbumGalleryFields
}
}
`
@ -100,7 +70,7 @@ function AlbumPage() {
})
const toggleFavorites = useCallback(
onlyFavorites => {
(onlyFavorites: boolean) => {
if (
(refetchNeededAll && !onlyFavorites) ||
(refetchNeededFavorites && onlyFavorites)

View File

@ -8,35 +8,35 @@
// ====================================================
export interface getMyAlbums_myAlbums_thumbnail_thumbnail {
__typename: "MediaURL";
__typename: 'MediaURL'
/**
* URL for previewing the image
*/
url: string;
url: string
}
export interface getMyAlbums_myAlbums_thumbnail {
__typename: "Media";
id: string;
__typename: 'Media'
id: string
/**
* URL to display the media in a smaller resolution
*/
thumbnail: getMyAlbums_myAlbums_thumbnail_thumbnail | null;
thumbnail: getMyAlbums_myAlbums_thumbnail_thumbnail | null
}
export interface getMyAlbums_myAlbums {
__typename: "Album";
id: string;
title: string;
__typename: 'Album'
id: string
title: string
/**
* An image in this album used for previewing this album
*/
thumbnail: getMyAlbums_myAlbums_thumbnail | null;
thumbnail: getMyAlbums_myAlbums_thumbnail | null
}
export interface getMyAlbums {
/**
* List of albums owned by the logged in user.
*/
myAlbums: getMyAlbums_myAlbums[];
myAlbums: getMyAlbums_myAlbums[]
}

View File

@ -7,14 +7,12 @@ import * as authentication from '../../helpers/authentication'
import InitialSetupPage from './InitialSetupPage'
import { mockInitialSetupGraphql } from './loginTestHelpers'
jest.mock('../../helpers/authentication.ts')
vi.mock('../../helpers/authentication.ts')
const authToken = authentication.authToken as jest.Mock<
ReturnType<typeof authentication.authToken>
>
const authToken = vi.mocked(authentication.authToken)
describe('Initial setup page', () => {
test('Render initial setup form', async () => {
test('Render initial setup form', () => {
authToken.mockImplementation(() => null)
const history = createMemoryHistory({

View File

@ -10,6 +10,10 @@ import { CheckInitialSetup } from './__generated__/CheckInitialSetup'
import { useForm } from 'react-hook-form'
import { Submit, TextField } from '../../primitives/form/Input'
import MessageBox from '../../primitives/form/MessageBox'
import {
InitialSetup,
InitialSetupVariables,
} from './__generated__/InitialSetup'
const initialSetupMutation = gql`
mutation InitialSetup(
@ -59,13 +63,12 @@ const InitialSetupPage = () => {
}, [notInitialSetup])
const [authorize, { loading: authorizeLoading, data: authorizationData }] =
useMutation(initialSetupMutation, {
useMutation<InitialSetup, InitialSetupVariables>(initialSetupMutation, {
onCompleted: data => {
const { success, token } = data.initialSetupWizard
if (!data.initialSetupWizard) return
if (success) {
login(token)
}
const { success, token } = data.initialSetupWizard
if (success && token) login(token)
},
})
@ -84,8 +87,8 @@ const InitialSetupPage = () => {
}
let errorMessage = null
if (authorizationData && !authorizationData.initialSetupWizard.success) {
errorMessage = authorizationData.initialSetupWizard.status
if (authorizationData && !authorizationData?.initialSetupWizard?.success) {
errorMessage = authorizationData?.initialSetupWizard?.status
}
return (
@ -138,7 +141,7 @@ const InitialSetupPage = () => {
<MessageBox
type="negative"
message={errorMessage}
show={errorMessage}
show={!!errorMessage}
/>
<Submit className="mt-2" disabled={authorizeLoading}>
{t('login_page.initial_setup.field.submit', 'Setup Photoview')}

View File

@ -7,11 +7,9 @@ import { createMemoryHistory } from 'history'
import { MockedProvider } from '@apollo/client/testing'
import { mockInitialSetupGraphql } from './loginTestHelpers'
jest.mock('../../helpers/authentication.ts')
vi.mock('../../helpers/authentication.ts')
const authToken = authentication.authToken as jest.Mock<
ReturnType<typeof authentication.authToken>
>
const authToken = vi.mocked(authentication.authToken)
describe('Login page redirects', () => {
test('Auth token redirect', async () => {
@ -56,7 +54,7 @@ describe('Login page redirects', () => {
})
describe('Login page', () => {
test('Render login form', async () => {
test('Render login form', () => {
authToken.mockImplementation(() => null)
const history = createMemoryHistory({

View File

@ -29,7 +29,7 @@ const LogoHeader = () => {
<div className="flex justify-center flex-col mb-14 mt-20">
<img
className="h-24"
src={process.env.PUBLIC_URL + '/photoview-logo.svg'}
src={import.meta.env.BASE_URL + 'photoview-logo.svg'}
alt="photoview logo"
/>
<h1 className="text-3xl text-center mt-4">
@ -106,7 +106,7 @@ const LoginForm = () => {
<input
type="submit"
disabled={loading}
value={t('login_page.field.submit', 'Sign in') as string}
value={t('login_page.field.submit', 'Sign in')}
className="rounded-md px-8 py-2 mt-2 focus:outline-none cursor-pointer bg-gradient-to-bl from-[#FF8246] to-[#D6264D] text-white font-semibold focus:ring-2 focus:ring-red-200 disabled:cursor-default disabled:opacity-80"
/>
<MessageBox

View File

@ -8,13 +8,13 @@
// ====================================================
export interface CheckInitialSetup_siteInfo {
__typename: "SiteInfo";
__typename: 'SiteInfo'
/**
* Whether or not the initial setup wizard should be shown
*/
initialSetup: boolean;
initialSetup: boolean
}
export interface CheckInitialSetup {
siteInfo: CheckInitialSetup_siteInfo;
siteInfo: CheckInitialSetup_siteInfo
}

View File

@ -1,15 +1,26 @@
import React from 'react'
import styled from 'styled-components'
import { ProtectedImage } from '../../components/photoGallery/ProtectedMedia'
import {
ProtectedImage,
ProtectedImageProps,
} from '../../components/photoGallery/ProtectedMedia'
import {
myFaces_myFaceGroups_imageFaces_media,
myFaces_myFaceGroups_imageFaces_rectangle,
} from './__generated__/myFaces'
type FaceImageProps = ProtectedImageProps & {
origin: { x: number; y: number }
selectable: boolean
scale: number
}
const FaceImage = styled(
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const FaceImage = styled(({ origin, selectable, scale, ...rest }) => (
({ origin, selectable, scale, ...rest }: FaceImageProps) => (
<ProtectedImage {...rest} />
))<{ origin: { x: number; y: number }; selectable: boolean; scale: number }>`
)
)`
position: absolute;
transform-origin: ${({ origin }) => `${origin.x * 100}% ${origin.y * 100}%`};
object-fit: cover;

View File

@ -11,7 +11,7 @@ import { MockedProvider } from '@apollo/client/testing'
import { MemoryRouter } from 'react-router'
import { myFaces_myFaceGroups } from './__generated__/myFaces'
jest.mock('../../hooks/useScrollPagination')
vi.mock('../../hooks/useScrollPagination')
describe('PeoplePage component', () => {
const graphqlMocks = [
@ -142,7 +142,7 @@ describe('FaceDetails component', () => {
<MockedProvider mocks={[]} addTypename={false}>
<FaceDetails
editLabel={false}
setEditLabel={jest.fn()}
setEditLabel={vi.fn()}
group={emptyFaceGroup}
/>
</MockedProvider>
@ -161,7 +161,7 @@ describe('FaceDetails component', () => {
<MockedProvider mocks={[]} addTypename={false}>
<FaceDetails
editLabel={false}
setEditLabel={jest.fn()}
setEditLabel={vi.fn()}
group={labeledFaceGroup}
/>
</MockedProvider>
@ -181,7 +181,7 @@ describe('FaceDetails component', () => {
label: 'John Doe',
},
},
newData: jest.fn(() => ({
newData: vi.fn(() => ({
data: {
setFaceGroupLabel: {
__typename: 'FaceGroup',
@ -218,7 +218,7 @@ describe('FaceDetails component', () => {
})
})
test('cancel add label to face group', async () => {
test('cancel add label to face group', () => {
render(
<MockedProvider mocks={[]} addTypename={false}>
<MemoryRouter>

View File

@ -4,7 +4,7 @@ import { MockedProvider } from '@apollo/client/testing'
import SingleFaceGroup, { SINGLE_FACE_GROUP } from './SingleFaceGroup'
import { MemoryRouter } from 'react-router-dom'
jest.mock('../../../hooks/useScrollPagination')
vi.mock('../../../hooks/useScrollPagination')
test('single face group', async () => {
const graphqlMocks = [
@ -92,9 +92,6 @@ test('single face group', async () => {
)
await waitFor(() => {
// expect(screen.queryByText('Loading more media')).not.toHaveClass('active')
expect(screen.queryByText('Face Group Name')).toBeInTheDocument()
})
expect(screen.getAllByRole('img')).toHaveLength(2)
})
})

View File

@ -2,8 +2,8 @@ import { gql, useQuery } from '@apollo/client'
import React, { useEffect, useReducer } from 'react'
import { useTranslation } from 'react-i18next'
import PaginateLoader from '../../../components/PaginateLoader'
import PhotoGallery from '../../../components/photoGallery/PhotoGallery'
import { photoGalleryReducer } from '../../../components/photoGallery/photoGalleryReducer'
import MediaGallery from '../../../components/photoGallery/MediaGallery'
import { mediaGalleryReducer } from '../../../components/photoGallery/mediaGalleryReducer'
import useScrollPagination from '../../../hooks/useScrollPagination'
import FaceGroupTitle from './FaceGroupTitle'
import {
@ -62,7 +62,7 @@ const SingleFaceGroup = ({ faceGroupID }: SingleFaceGroupProps) => {
},
})
const [mediaState, dispatchMedia] = useReducer(photoGalleryReducer, {
const [mediaState, dispatchMedia] = useReducer(mediaGalleryReducer, {
presenting: false,
activeIndex: -1,
media: [],
@ -91,7 +91,7 @@ const SingleFaceGroup = ({ faceGroupID }: SingleFaceGroupProps) => {
<div ref={containerElem}>
<FaceGroupTitle faceGroup={faceGroup} />
<div>
<PhotoGallery
<MediaGallery
loading={loading}
dispatchMedia={dispatchMedia}
mediaState={mediaState}

View File

@ -8,18 +8,18 @@
// ====================================================
export interface combineFaces_combineFaceGroups {
__typename: "FaceGroup";
id: string;
__typename: 'FaceGroup'
id: string
}
export interface combineFaces {
/**
* Merge two face groups into a single one, all ImageFaces from source will be moved to destination
*/
combineFaceGroups: combineFaces_combineFaceGroups;
combineFaceGroups: combineFaces_combineFaceGroups
}
export interface combineFacesVariables {
destID: string;
srcID: string;
destID: string
srcID: string
}

View File

@ -8,24 +8,24 @@
// ====================================================
export interface moveImageFaces_moveImageFaces_imageFaces {
__typename: "ImageFace";
id: string;
__typename: 'ImageFace'
id: string
}
export interface moveImageFaces_moveImageFaces {
__typename: "FaceGroup";
id: string;
imageFaces: moveImageFaces_moveImageFaces_imageFaces[];
__typename: 'FaceGroup'
id: string
imageFaces: moveImageFaces_moveImageFaces_imageFaces[]
}
export interface moveImageFaces {
/**
* Move a list of ImageFaces to another face group
*/
moveImageFaces: moveImageFaces_moveImageFaces;
moveImageFaces: moveImageFaces_moveImageFaces
}
export interface moveImageFacesVariables {
faceIDs: string[];
destFaceGroupID: string;
faceIDs: string[]
destFaceGroupID: string
}

View File

@ -8,13 +8,13 @@
// ====================================================
export interface recognizeUnlabeledFaces_recognizeUnlabeledFaces {
__typename: "ImageFace";
id: string;
__typename: 'ImageFace'
id: string
}
export interface recognizeUnlabeledFaces {
/**
* Check all unlabeled faces to see if they match a labeled FaceGroup, and move them if they match
*/
recognizeUnlabeledFaces: recognizeUnlabeledFaces_recognizeUnlabeledFaces[];
recognizeUnlabeledFaces: recognizeUnlabeledFaces_recognizeUnlabeledFaces[]
}

View File

@ -49,7 +49,7 @@ const MapClusterMarker = ({
marker,
dispatchMarkerMedia,
}: MapClusterMarkerProps) => {
const thumbnail = JSON.parse(marker.thumbnail)
const thumbnail = JSON.parse(marker.thumbnail) as { url: string }
const presentMedia = () => {
dispatchMarkerMedia({

View File

@ -7,7 +7,7 @@ import styled from 'styled-components'
import Layout from '../../components/layout/Layout'
import { registerMediaMarkers } from '../../components/mapbox/mapboxHelperFunctions'
import useMapboxMap from '../../components/mapbox/MapboxMap'
import { urlPresentModeSetupHook } from '../../components/photoGallery/photoGalleryReducer'
import { urlPresentModeSetupHook } from '../../components/photoGallery/mediaGalleryReducer'
import MapPresentMarker from './MapPresentMarker'
import { PlacesAction, placesReducer } from './placesReducer'
import { mediaGeoJson } from './__generated__/mediaGeoJson'
@ -108,7 +108,7 @@ const configureMapbox =
map.addSource('media', {
type: 'geojson',
data: mapboxData?.myMediaGeoJson,
data: mapboxData?.myMediaGeoJson as never,
cluster: true,
clusterRadius: 50,
clusterProperties: {

View File

@ -11,5 +11,5 @@ export interface mediaGeoJson {
/**
* Get media owned by the logged in user, returned in GeoJson format
*/
myMediaGeoJson: any
myMediaGeoJson: Any
}

View File

@ -1,11 +1,11 @@
import { PresentMarker } from './PlacesPage'
import {
PhotoGalleryState,
MediaGalleryState,
PhotoGalleryAction,
photoGalleryReducer,
} from './../../components/photoGallery/photoGalleryReducer'
mediaGalleryReducer,
} from '../../components/photoGallery/mediaGalleryReducer'
export interface PlacesState extends PhotoGalleryState {
export interface PlacesState extends MediaGalleryState {
presentMarker?: PresentMarker
}
@ -36,6 +36,6 @@ export function placesReducer(
}
}
default:
return photoGalleryReducer(state, action)
return mediaGalleryReducer(state, action)
}
}

View File

@ -0,0 +1,51 @@
import React from 'react'
import { MockedProvider } from '@apollo/client/testing'
import { render, screen } from '@testing-library/react'
import {
CONCURRENT_WORKERS_QUERY,
SET_CONCURRENT_WORKERS_MUTATION,
ScannerConcurrentWorkers,
} from './ScannerConcurrentWorkers'
test('load ScannerConcurrentWorkers', () => {
const graphqlMocks = [
{
request: {
query: CONCURRENT_WORKERS_QUERY,
},
result: {
data: {
siteInfo: { concurrentWorkers: 3 },
},
},
},
{
request: {
query: SET_CONCURRENT_WORKERS_MUTATION,
variables: {
workers: '1',
},
},
result: {
data: {},
},
},
]
render(
<MockedProvider
mocks={graphqlMocks}
addTypename={false}
defaultOptions={{
// disable cache, required to make fragments work
watchQuery: { fetchPolicy: 'no-cache' },
query: { fetchPolicy: 'no-cache' },
}}
>
<ScannerConcurrentWorkers />
</MockedProvider>
)
expect(screen.getByText('Scanner concurrent workers')).toBeInTheDocument()
})

View File

@ -9,7 +9,7 @@ import {
} from './__generated__/setConcurrentWorkers'
import { TextField } from '../../primitives/form/Input'
const CONCURRENT_WORKERS_QUERY = gql`
export const CONCURRENT_WORKERS_QUERY = gql`
query concurrentWorkersQuery {
siteInfo {
concurrentWorkers
@ -17,15 +17,18 @@ const CONCURRENT_WORKERS_QUERY = gql`
}
`
const SET_CONCURRENT_WORKERS_MUTATION = gql`
export const SET_CONCURRENT_WORKERS_MUTATION = gql`
mutation setConcurrentWorkers($workers: Int!) {
setScannerConcurrentWorkers(workers: $workers)
}
`
const ScannerConcurrentWorkers = () => {
export const ScannerConcurrentWorkers = () => {
const { t } = useTranslation()
const workerAmountServerValue = useRef<null | number>(null)
const [workerAmount, setWorkerAmount] = useState(0)
const workerAmountQuery = useQuery<concurrentWorkersQuery>(
CONCURRENT_WORKERS_QUERY,
{
@ -41,9 +44,6 @@ const ScannerConcurrentWorkers = () => {
setConcurrentWorkersVariables
>(SET_CONCURRENT_WORKERS_MUTATION)
const workerAmountServerValue = useRef<null | number>(null)
const [workerAmount, setWorkerAmount] = useState(0)
const updateWorkerAmount = (workerAmount: number) => {
if (workerAmountServerValue.current != workerAmount) {
workerAmountServerValue.current = workerAmount
@ -86,5 +86,3 @@ const ScannerConcurrentWorkers = () => {
</div>
)
}
export default ScannerConcurrentWorkers

View File

@ -1,7 +1,7 @@
import React from 'react'
import { useMutation, gql } from '@apollo/client'
import PeriodicScanner from './PeriodicScanner'
import ScannerConcurrentWorkers from './ScannerConcurrentWorkers'
import { ScannerConcurrentWorkers } from './ScannerConcurrentWorkers'
import { SectionTitle, InputLabelDescription } from './SettingsPage'
import { useTranslation } from 'react-i18next'
import { scanAllMutation } from './__generated__/scanAllMutation'

View File

@ -5,6 +5,7 @@ import { useIsAdmin } from '../../components/routes/AuthorizedRoute'
import Layout from '../../components/layout/Layout'
import ScannerSection from './ScannerSection'
import UserPreferences from './UserPreferences'
import ThumbnailPreferences from './ThumbnailPreferences'
import UsersTable from './Users/UsersTable'
import VersionInfo from './VersionInfo'
import classNames from 'classnames'
@ -46,6 +47,7 @@ const SettingsPage = () => {
<>
<ScannerSection />
<UsersTable />
<ThumbnailPreferences />
</>
)}
<VersionInfo />

View File

@ -0,0 +1,52 @@
import React from 'react'
import { MockedProvider } from '@apollo/client/testing'
import { render, screen } from '@testing-library/react'
import { ThumbnailFilter } from '../../__generated__/globalTypes'
import ThumbnailPreferences, {
THUMBNAIL_METHOD_QUERY,
SET_THUMBNAIL_METHOD_MUTATION,
} from './ThumbnailPreferences'
test('load ThumbnailPreferences', () => {
const graphqlMocks = [
{
request: {
query: THUMBNAIL_METHOD_QUERY,
},
result: {
data: {
siteInfo: { method: ThumbnailFilter.NearestNeighbor },
},
},
},
{
request: {
query: SET_THUMBNAIL_METHOD_MUTATION,
variables: {
method: ThumbnailFilter.Lanczos,
},
},
result: {
data: {},
},
},
]
render(
<MockedProvider
mocks={graphqlMocks}
addTypename={false}
defaultOptions={{
// disable cache, required to make fragments work
watchQuery: { fetchPolicy: 'no-cache' },
query: { fetchPolicy: 'no-cache' },
}}
>
<ThumbnailPreferences />
</MockedProvider>
)
expect(screen.getByText('Downsampling method')).toBeInTheDocument()
})

View File

@ -0,0 +1,135 @@
import { gql } from '@apollo/client'
import React, { useRef, useState } from 'react'
import { useMutation, useQuery } from '@apollo/client'
import {
SectionTitle,
InputLabelDescription,
InputLabelTitle,
} from './SettingsPage'
import { useTranslation } from 'react-i18next'
import { ThumbnailFilter } from '../../__generated__/globalTypes'
import { thumbnailMethodQuery } from './__generated__/thumbnailMethodQuery'
import {
setThumbnailMethodMutation,
setThumbnailMethodMutationVariables,
} from './__generated__/setThumbnailMethodMutation'
import Dropdown, { DropdownItem } from '../../primitives/form/Dropdown'
import Loader from '../../primitives/Loader'
export const THUMBNAIL_METHOD_QUERY = gql`
query thumbnailMethodQuery {
siteInfo {
thumbnailMethod
}
}
`
export const SET_THUMBNAIL_METHOD_MUTATION = gql`
mutation setThumbnailMethodMutation($method: ThumbnailFilter!) {
setThumbnailDownsampleMethod(method: $method)
}
`
const ThumbnailPreferences = () => {
const { t } = useTranslation()
const downsampleMethodServerValue = useRef<null | number>(null)
const [downsampleMethod, setDownsampleMethod] = useState(0)
const downsampleMethodQuery = useQuery<thumbnailMethodQuery>(
THUMBNAIL_METHOD_QUERY,
{
onCompleted(data) {
setDownsampleMethod(data.siteInfo.thumbnailMethod)
downsampleMethodServerValue.current = data.siteInfo.thumbnailMethod
},
}
)
const [setDownsampleMutation, downsampleMutationData] = useMutation<
setThumbnailMethodMutation,
setThumbnailMethodMutationVariables
>(SET_THUMBNAIL_METHOD_MUTATION)
const updateDownsampleMethod = (downsampleMethod: number) => {
if (downsampleMethodServerValue.current != downsampleMethod) {
downsampleMethodServerValue.current = downsampleMethod
setDownsampleMutation({
variables: {
method: downsampleMethod,
},
})
}
}
const methodItems: DropdownItem[] = [
{
label: t(
'settings.thumbnails.method.filter.nearest_neighbor',
'Nearest Neighbor (default)'
),
value: ThumbnailFilter.NearestNeighbor,
},
{
label: t('settings.thumbnails.method.filter.box', 'Box'),
value: ThumbnailFilter.Box,
},
{
label: t('settings.thumbnails.method.filter.linear', 'Linear'),
value: ThumbnailFilter.Linear,
},
{
label: t(
'settings.thumbnails.method.filter.mitchell_netravali',
'Mitchell-Netravali'
),
value: ThumbnailFilter.MitchellNetravali,
},
{
label: t('settings.thumbnails.method.filter.catmull_rom', 'Catmull-Rom'),
value: ThumbnailFilter.CatmullRom,
},
{
label: t(
'settings.thumbnails.method.filter.Lanczos',
'Lanczos (highest quality)'
),
value: ThumbnailFilter.Lanczos,
},
]
return (
<div>
<SectionTitle>
{t('settings.thumbnails.title', 'Thumbnail preferences')}
</SectionTitle>
<label htmlFor="thumbnail_method_field">
<InputLabelTitle>
{t('settings.thumbnails.method.label', 'Downsampling method')}
</InputLabelTitle>
<InputLabelDescription>
{t(
'settings.thumbnails.method.description',
'The filter to use when generating thumbnails'
)}
</InputLabelDescription>
</label>
<Dropdown
aria-label="Method"
items={methodItems}
selected={downsampleMethod}
setSelected={value => {
setDownsampleMethod(value)
updateDownsampleMethod(value)
}}
/>
<Loader
active={downsampleMethodQuery.loading || downsampleMutationData.loading}
size="small"
style={{ marginLeft: 16 }}
/>
</div>
)
}
export default ThumbnailPreferences

View File

@ -47,6 +47,12 @@ const languagePreferences = [
flag: 'pt',
value: LanguageTranslation.Portuguese,
},
{
key: 13,
label: 'Euskara',
flag: 'eu',
value: LanguageTranslation.Basque,
},
]
const themePreferences = (t: TranslationFn) => [

View File

@ -33,8 +33,8 @@ const gqlMock = [
]
test('Add user with username and path', async () => {
const userAdded = jest.fn()
const setShow = jest.fn()
const userAdded = vi.fn()
const setShow = vi.fn()
render(
<MockedProvider addTypename={true} mocks={gqlMock}>
@ -62,8 +62,8 @@ test('Add user with username and path', async () => {
})
test('Add user with only username', async () => {
const userAdded = jest.fn()
const setShow = jest.fn()
const userAdded = vi.fn()
const setShow = vi.fn()
render(
<MockedProvider addTypename={true} mocks={gqlMock}>

View File

@ -4,6 +4,11 @@ import { useTranslation } from 'react-i18next'
import Checkbox from '../../../primitives/form/Checkbox'
import { TextField, Button, ButtonGroup } from '../../../primitives/form/Input'
import { TableRow, TableCell } from '../../../primitives/Table'
import { createUser, createUserVariables } from './__generated__/createUser'
import {
userAddRootPath,
userAddRootPathVariables,
} from './__generated__/userAddRootPath'
export const CREATE_USER_MUTATION = gql`
mutation createUser($username: String!, $admin: Boolean!) {
@ -46,21 +51,22 @@ const AddUserRow = ({ setShow, show, onUserAdded }: AddUserRowProps) => {
onUserAdded()
}
const [addRootPath, { loading: addRootPathLoading }] = useMutation(
USER_ADD_ROOT_PATH_MUTATION,
{
const [addRootPath, { loading: addRootPathLoading }] = useMutation<
userAddRootPath,
userAddRootPathVariables
>(USER_ADD_ROOT_PATH_MUTATION, {
onCompleted: () => {
finished()
},
onError: () => {
finished()
},
}
)
})
const [createUser, { loading: createUserLoading }] = useMutation(
CREATE_USER_MUTATION,
{
const [createUser, { loading: createUserLoading }] = useMutation<
createUser,
createUserVariables
>(CREATE_USER_MUTATION, {
onCompleted: ({ createUser: { id } }) => {
if (state.rootPath) {
addRootPath({
@ -73,8 +79,7 @@ const AddUserRow = ({ setShow, show, onUserAdded }: AddUserRowProps) => {
finished()
}
},
}
)
})
const loading = addRootPathLoading || createUserLoading

View File

@ -40,7 +40,7 @@ const ChangePasswordModal = ({
title={t('settings.users.password_reset.title', 'Change password')}
description={
<Trans t={t} i18nKey="settings.users.password_reset.description">
Change password for <b>{{ username: user.username }}</b>
Change password for <b>{user.username}</b>
</Trans>
}
actions={[

View File

@ -67,7 +67,7 @@ export type UserRowChildProps = {
export type UserRowProps = {
user: settingsUsersQuery_user
refetchUsers(): void
refetchUsers: () => void
}
const UserRow = ({ user, refetchUsers }: UserRowProps) => {

View File

@ -8,17 +8,17 @@
// ====================================================
export interface scanUser_scanUser {
__typename: "ScannerResult";
success: boolean;
__typename: 'ScannerResult'
success: boolean
}
export interface scanUser {
/**
* Scan a single user for new media
*/
scanUser: scanUser_scanUser;
scanUser: scanUser_scanUser
}
export interface scanUserVariables {
userId: string;
userId: string
}

View File

@ -7,10 +7,10 @@ import {
SectionTitle,
} from './SettingsPage'
const VERSION = process.env.REACT_APP_BUILD_VERSION ?? 'undefined'
const BUILD_DATE = process.env.REACT_APP_BUILD_DATE ?? 'undefined'
const VERSION = import.meta.env.REACT_APP_BUILD_VERSION ?? 'undefined'
const BUILD_DATE = import.meta.env.REACT_APP_BUILD_DATE ?? 'undefined'
const COMMIT_SHA = process.env.REACT_APP_BUILD_COMMIT_SHA as string | undefined
const COMMIT_SHA = import.meta.env.REACT_APP_BUILD_COMMIT_SHA
let commitLink: ReactElement
if (COMMIT_SHA) {

View File

@ -12,9 +12,9 @@ export interface changeScanIntervalMutation {
* Set how often, in seconds, the server should automatically scan for new media,
* a value of 0 will disable periodic scans
*/
setPeriodicScanInterval: number;
setPeriodicScanInterval: number
}
export interface changeScanIntervalMutationVariables {
interval: number;
interval: number
}

View File

@ -8,13 +8,13 @@
// ====================================================
export interface concurrentWorkersQuery_siteInfo {
__typename: "SiteInfo";
__typename: 'SiteInfo'
/**
* How many max concurrent scanner jobs that should run at once
*/
concurrentWorkers: number;
concurrentWorkers: number
}
export interface concurrentWorkersQuery {
siteInfo: concurrentWorkersQuery_siteInfo;
siteInfo: concurrentWorkersQuery_siteInfo
}

View File

@ -8,14 +8,14 @@
// ====================================================
export interface scanAllMutation_scanAll {
__typename: "ScannerResult";
success: boolean;
message: string | null;
__typename: 'ScannerResult'
success: boolean
message: string | null
}
export interface scanAllMutation {
/**
* Scan all users for new media
*/
scanAll: scanAllMutation_scanAll;
scanAll: scanAllMutation_scanAll
}

View File

@ -8,13 +8,13 @@
// ====================================================
export interface scanIntervalQuery_siteInfo {
__typename: "SiteInfo";
__typename: 'SiteInfo'
/**
* How often automatic scans should be initiated in seconds
*/
periodicScanInterval: number;
periodicScanInterval: number
}
export interface scanIntervalQuery {
siteInfo: scanIntervalQuery_siteInfo;
siteInfo: scanIntervalQuery_siteInfo
}

View File

@ -11,9 +11,9 @@ export interface setConcurrentWorkers {
/**
* Set max number of concurrent scanner jobs running at once
*/
setScannerConcurrentWorkers: number;
setScannerConcurrentWorkers: number
}
export interface setConcurrentWorkersVariables {
workers: number;
workers: number
}

Some files were not shown because too many files have changed in this diff Show More