diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 610f0bdb..d90a2fc0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,6 +23,9 @@ jobs: go-version-file: "go.mod" cache-dependency-path: "go.sum" + - name: Install ExifTool + run: sudo apt-get install -y exiftool + - name: golangci-lint uses: golangci/golangci-lint-action@v6 with: diff --git a/.gitignore b/.gitignore index 1c7b6703..5248dd31 100644 --- a/.gitignore +++ b/.gitignore @@ -18,3 +18,5 @@ archive.tar *.test .env *.env +test-config.yaml +__debug_* diff --git a/.goreleaser.yaml b/.goreleaser.yaml index 80d7835c..212551cb 100644 --- a/.goreleaser.yaml +++ b/.goreleaser.yaml @@ -1,5 +1,7 @@ # This is an example .goreleaser.yml file with some sensible defaults. # Make sure to check the documentation at https://goreleaser.com +version: 2 + before: hooks: # You may remove this if you don't use go modules. @@ -23,7 +25,7 @@ builds: flags: - -trimpath ldflags: - - '-s -w "-extldflags=-static" -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}} -X main.builtBy=goreleaser' + - '-s -w "-extldflags=-static" -X version.Version={{.Version}} -X version.Commit={{.Commit}} -X Version.Date={{.Date}} -X main.builtBy=goreleaser' archives: - format: tar.gz @@ -54,3 +56,191 @@ changelog: # Feel free to remove those if you don't want/use them. # yaml-language-server: $schema=https://goreleaser.com/static/schema.json # vim: set ts=2 sw=2 tw=0 fo=cnqoj + +# .goreleaser.yaml +release: + # Repo in which the release will be created. + # Default: extracted from the origin remote URL or empty if its private hosted. + # github: + # owner: user + # name: repo + + # IDs of the archives to use. + # Empty means all IDs. + # + # Default: []. + # ids: + # - foo + # - bar + + # If set to true, will not auto-publish the release. + # Note: all GitHub releases start as drafts while artifacts are uploaded. + # Available only for GitHub and Gitea. + draft: true + + # Whether to remove existing draft releases with the same name before creating + # a new one. + # + # Only effective if `draft` is set to true. + # Available only for GitHub. + replace_existing_draft: true + + # Whether to remove an artifact that already exists. + # + # Available only for GitHub. + # This might be a bit expensive (rate-limiting speaking), so it is only done + # when the upload of an artifact fails with a 422 (which means it already + # exists in the release). + # We then grab the list of artifacts from the release, and delete the file + # that matches the one we're trying to upload. + # GoReleaser will then retry its upload. + replace_existing_artifacts: true + + # Useful if you want to delay the creation of the tag in the remote. + # You can create the tag locally, but not push it, and run GoReleaser. + # It'll then set the `target_commitish` portion of the GitHub release to the + # value of this field. + # Only works on GitHub. + # + # Default: ''. + # Templates: allowed. + target_commitish: "{{ .Commit }}" + + # This allows to change which tag GitHub will create. + # Usually you'll use this together with `target_commitish`, or if you want to + # publish a binary from a monorepo into a public repository somewhere, without + # the tag prefix. + # + # This feature is only available in GoReleaser Pro. + # Default: '{{ .PrefixedCurrentTag }}'. + # Templates: allowed. + # tag: "{{ .CurrentTag }}" + + # If set, will create a release discussion in the category specified. + # + # Warning: do not use categories in the 'Announcement' format. + # Check https://github.com/goreleaser/goreleaser/issues/2304 for more info. + # + # Default: ''. + # discussion_category_name: General + + # If set to auto, will mark the release as not ready for production + # in case there is an indicator for this in the tag e.g. v1.0.0-rc1 + # If set to true, will mark the release as not ready for production. + # Default: false. + prerelease: auto + + # If set to false, will NOT mark the release as "latest". + # This prevents it from being shown at the top of the release list, + # and from being returned when calling https://api.github.com/repos/OWNER/REPO/releases/latest. + # + # Available only for GitHub. + # + # Default: true. + make_latest: true + + # What to do with the release notes in case there the release already exists. + # + # Valid options are: + # - `keep-existing`: keep the existing notes + # - `append`: append the current release notes to the existing notes + # - `prepend`: prepend the current release notes to the existing notes + # - `replace`: replace existing notes + # + # Default: `keep-existing`. + mode: append + + # Header for the release body. + # + # Templates: allowed. + # header: | + # ## Some title ({{ .Date }}) + + # Welcome to this new release! + + # Header for the release body. + # + # This feature is only available in GoReleaser Pro. + # header: + # # Loads from an URL. + # from_url: + # # Templates: allowed. + # url: https://foo.bar/header.md + # headers: + # x-api-token: "${MYCOMPANY_TOKEN}" + + # # Loads from a local file. + # # Overrides `from_url`. + # from_file: + # # Templates: allowed. + # path: ./header.md + + # Footer for the release body. + # + # Templates: allowed. + # footer: | + # ## Thanks + + # Those were the changes on {{ .Tag }}! + + # Footer for the release body. + # + # This feature is only available in GoReleaser Pro. + # footer: + # # Loads from an URL. + # from_url: + # # Templates: allowed. + # url: https://foo.bar/footer.md + # footers: + # x-api-token: "${MYCOMPANY_TOKEN}" + + # # Loads from a local file. + # # Overrides `from_url`. + # from_file: + # # Templates: allowed. + # path: ./footer.md + + # # You can change the name of the release. + # # + # # Default: '{{.Tag}}' ('{{.PrefixedTag}}' on Pro). + # # Templates: allowed. + # name_template: "{{.ProjectName}}-v{{.Version}} {{.Env.USER}}" + + # You can disable this pipe in order to not create the release on any SCM. + # Keep in mind that this might also break things that depend on the release + # URL, for instance, homebrew taps. + # + # Templates: allowed. + disable: true + + # Set this to true if you want to disable just the artifact upload to the SCM. + # If this is true, GoReleaser will still create the release with the + # changelog, but won't upload anything to it. + # + # Templates: allowed. + skip_upload: true + + # You can add extra pre-existing files to the release. + # The filename on the release will be the last part of the path (base). + # If another file with the same name exists, the last one found will be used. + # + # Templates: allowed. + # extra_files: + # - glob: ./path/to/file.txt + # - glob: ./glob/**/to/**/file/**/* + # - glob: ./glob/foo/to/bar/file/foobar/override_from_previous + # - glob: ./single_file.txt + # name_template: file.txt # note that this only works if glob matches 1 file only + + # Additional templated extra files to add to the release. + # Those files will have their contents pass through the template engine, + # and its results will be added to the release. + # + # This feature is only available in GoReleaser Pro. + # Templates: allowed. + # templated_extra_files: + # - src: LICENSE.tpl + # dst: LICENSE.txt + + # # Upload metadata.json and artifacts.json to the release as well. + # include_meta: true \ No newline at end of file diff --git a/adapters/adpaters.go b/adapters/adpaters.go new file mode 100644 index 00000000..ce6ee20d --- /dev/null +++ b/adapters/adpaters.go @@ -0,0 +1,16 @@ +package adapters + +import ( + "context" + + "github.com/simulot/immich-go/internal/assets" +) + +type Reader interface { + Browse(cxt context.Context) chan *assets.Group +} + +type AssetWriter interface { + WriteAsset(context.Context, *assets.Asset) error + // WriteGroup(ctx context.Context, group *assets.Group) error +} diff --git a/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063000139.jpg b/adapters/folder/DATA/2023/2023-10/2023-10-06/photo1_w_exif.jpg similarity index 100% rename from cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063000139.jpg rename to adapters/folder/DATA/2023/2023-10/2023-10-06/photo1_w_exif.jpg diff --git a/adapters/folder/DATA/2023/2023-10/2023-10-06/photo1_wo_exif.jpg b/adapters/folder/DATA/2023/2023-10/2023-10-06/photo1_wo_exif.jpg new file mode 100644 index 00000000..54455551 Binary files /dev/null and b/adapters/folder/DATA/2023/2023-10/2023-10-06/photo1_wo_exif.jpg differ diff --git a/adapters/folder/DATA/date-range/photo1_2023-10-06_wo_exif.jpg b/adapters/folder/DATA/date-range/photo1_2023-10-06_wo_exif.jpg new file mode 100644 index 00000000..54455551 Binary files /dev/null and b/adapters/folder/DATA/date-range/photo1_2023-10-06_wo_exif.jpg differ diff --git a/cmd/upload/TEST_DATA/Takeout2/Google Photos/Photos from 2023/PXL_20231006_063000139.jpg b/adapters/folder/DATA/date-range/photo1_2024-10-06_w_exif.jpg similarity index 100% rename from cmd/upload/TEST_DATA/Takeout2/Google Photos/Photos from 2023/PXL_20231006_063000139.jpg rename to adapters/folder/DATA/date-range/photo1_2024-10-06_w_exif.jpg diff --git a/cmd/upload/TEST_DATA/banned/PXL_20231006_063000139.jpg b/adapters/folder/DATA/date-range/photo1_w_exif.jpg similarity index 100% rename from cmd/upload/TEST_DATA/banned/PXL_20231006_063000139.jpg rename to adapters/folder/DATA/date-range/photo1_w_exif.jpg diff --git a/adapters/folder/DATA/date-range/photo1_wo_exif.jpg b/adapters/folder/DATA/date-range/photo1_wo_exif.jpg new file mode 100644 index 00000000..54455551 Binary files /dev/null and b/adapters/folder/DATA/date-range/photo1_wo_exif.jpg differ diff --git a/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063536303.jpg b/adapters/folder/DATA/not-motion/IMG_1234.jpg similarity index 100% rename from cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063536303.jpg rename to adapters/folder/DATA/not-motion/IMG_1234.jpg diff --git a/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063909898.LS.mp4 b/adapters/folder/DATA/not-motion/IMG_1234.mp4 similarity index 100% rename from cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063909898.LS.mp4 rename to adapters/folder/DATA/not-motion/IMG_1234.mp4 diff --git a/adapters/folder/options.go b/adapters/folder/options.go new file mode 100644 index 00000000..bb4b5a39 --- /dev/null +++ b/adapters/folder/options.go @@ -0,0 +1,151 @@ +package folder + +import ( + "fmt" + "strings" + "time" + + cliflags "github.com/simulot/immich-go/internal/cliFlags" + "github.com/simulot/immich-go/internal/filenames" + "github.com/simulot/immich-go/internal/filetypes" + "github.com/simulot/immich-go/internal/filters" + "github.com/simulot/immich-go/internal/namematcher" + "github.com/spf13/cobra" +) + +// ImportFolderOptions represents the flags used for importing assets from a file system. +type ImportFolderOptions struct { + // UsePathAsAlbumName determines whether to create albums based on the full path to the asset. + UsePathAsAlbumName AlbumFolderMode + + // AlbumNamePathSeparator specifies how multiple (sub) folders are joined when creating album names. + AlbumNamePathSeparator string + + // ImportIntoAlbum is the name of the album where all assets will be added. + ImportIntoAlbum string + + // BannedFiles is a list of file name patterns to be excluded from the import process. + BannedFiles namematcher.List + + // Recursive indicates whether to explore the folder and all its sub-folders. + Recursive bool + + // InclusionFlags controls the file extensions to be included in the import process. + InclusionFlags cliflags.InclusionFlags + + // // ExifToolFlags specifies options for the exif. + // ExifToolFlags exif.ExifToolFlags + + // IgnoreSideCarFiles indicates whether to ignore XMP files during the import process. + IgnoreSideCarFiles bool + + // Stack jpg/raw + StackJpgWithRaw bool + + // Stack burst + StackBurstPhotos bool + + // SupportedMedia is the server's actual list of supported media types. + SupportedMedia filetypes.SupportedMedia + + // InfoCollector is used to extract information from the file name. + InfoCollector *filenames.InfoCollector + + // ManageHEICJPG determines whether to manage HEIC to JPG conversion options. + ManageHEICJPG filters.HeicJpgFlag + + // ManageRawJPG determines how to manage raw and JPEG files. + ManageRawJPG filters.RawJPGFlag + + // BurstFlag determines how to manage burst photos. + ManageBurst filters.BurstFlag + + // ManageEpsonFastFoto enables the management of Epson FastFoto files. + ManageEpsonFastFoto bool + + // Tags is a list of tags to be added to the imported assets. + Tags []string + + // Folder as tags + FolderAsTags bool + + // SessionTag indicates whether to add a session tag to the imported assets. + SessionTag bool + session string // Session tag value + + // TakeDateFromFilename indicates whether to take the date from the filename if the date isn't available in the image. + TakeDateFromFilename bool + + // local time zone + TZ *time.Location +} + +func (o *ImportFolderOptions) AddFromFolderFlags(cmd *cobra.Command, parent *cobra.Command) { + o.ManageHEICJPG = filters.HeicJpgNothing + o.ManageRawJPG = filters.RawJPGNothing + o.ManageBurst = filters.BurstNothing + o.Recursive = true + o.SupportedMedia = filetypes.DefaultSupportedMedia + o.UsePathAsAlbumName = FolderModeNone + o.BannedFiles, _ = namematcher.New( + `@eaDir/`, + `@__thumb/`, // QNAP + `SYNOFILE_THUMB_*.*`, // SYNOLOGY + `Lightroom Catalog/`, // LR + `thumbnails/`, // Android photo + `.DS_Store/`, // Mac OS custom attributes + `._*.*`, // MacOS resource files + ) + cmd.Flags().StringVar(&o.ImportIntoAlbum, "into-album", "", "Specify an album to import all files into") + cmd.Flags().Var(&o.UsePathAsAlbumName, "folder-as-album", "Import all files in albums defined by the folder structure. Can be set to 'FOLDER' to use the folder name as the album name, or 'PATH' to use the full path as the album name") + cmd.Flags().StringVar(&o.AlbumNamePathSeparator, "album-path-joiner", " / ", "Specify a string to use when joining multiple folder names to create an album name (e.g. ' ',' - ')") + cmd.Flags().BoolVar(&o.Recursive, "recursive", true, "Explore the folder and all its sub-folders") + cmd.Flags().Var(&o.BannedFiles, "ban-file", "Exclude a file based on a pattern (case-insensitive). Can be specified multiple times.") + cmd.Flags().BoolVar(&o.IgnoreSideCarFiles, "ignore-sidecar-files", false, "Don't upload sidecar with the photo.") + + cmd.Flags().StringSliceVar(&o.Tags, "tag", nil, "Add tags to the imported assets. Can be specified multiple times. Hierarchy is supported using a / separator (e.g. 'tag1/subtag1')") + cmd.Flags().BoolVar(&o.FolderAsTags, "folder-as-tags", false, "Use the folder structure as tags, (ex: the file holiday/summer 2024/file.jpg will have the tag holiday/summer 2024)") + cmd.Flags().BoolVar(&o.SessionTag, "session-tag", false, "Tag uploaded photos with a tag \"{immich-go}/YYYY-MM-DD HH-MM-SS\"") + + cliflags.AddInclusionFlags(cmd, &o.InclusionFlags) + cmd.Flags().BoolVar(&o.TakeDateFromFilename, "date-from-name", true, "Use the date from the filename if the date isn't available in the metadata (Only for .jpg,mp4,.heic,.dng,cr2,.cr3,).") + + // exif.AddExifToolFlags(cmd, &o.ExifToolFlags) // disabled for now + + if parent != nil && parent.Name() == "upload" { + cmd.Flags().Var(&o.ManageHEICJPG, "manage-heic-jpeg", "Manage coupled HEIC and JPEG files. Possible values: KeepHeic, KeepJPG, StackCoverHeic, StackCoverJPG") + cmd.Flags().Var(&o.ManageRawJPG, "manage-raw-jpeg", "Manage coupled RAW and JPEG files. Possible values: KeepRaw, KeepJPG, StackCoverRaw, StackCoverJPG") + cmd.Flags().Var(&o.ManageBurst, "manage-burst", "Manage burst photos. Possible values: Stack, StackKeepRaw, StackKeepJPEG") + cmd.Flags().BoolVar(&o.ManageEpsonFastFoto, "manage-epson-fastfoto", false, "Manage Epson FastFoto file (default: false)") + } +} + +// AlbumFolderMode represents the mode in which album folders are organized. +// Implement the interface pflag.Value + +type AlbumFolderMode string + +const ( + FolderModeNone AlbumFolderMode = "NONE" + FolderModeFolder AlbumFolderMode = "FOLDER" + FolderModePath AlbumFolderMode = "PATH" +) + +func (m AlbumFolderMode) String() string { + return string(m) +} + +func (m *AlbumFolderMode) Set(v string) error { + v = strings.TrimSpace(strings.ToUpper(v)) + switch v { + case string(FolderModeFolder), string(FolderModePath): + *m = AlbumFolderMode(v) + default: + return fmt.Errorf("invalid value for folder mode, expected %s, %s or %s", FolderModeFolder, FolderModePath, FolderModeNone) + } + return nil +} + +func (m AlbumFolderMode) Type() string { + return "folderMode" +} diff --git a/adapters/folder/readFolder.go b/adapters/folder/readFolder.go new file mode 100644 index 00000000..a6a9b2a5 --- /dev/null +++ b/adapters/folder/readFolder.go @@ -0,0 +1,410 @@ +package folder + +import ( + "bytes" + "context" + "errors" + "fmt" + "io/fs" + "path" + "path/filepath" + "sort" + "strings" + "sync" + "time" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/exif" + "github.com/simulot/immich-go/internal/exif/sidecars/jsonsidecar" + "github.com/simulot/immich-go/internal/exif/sidecars/xmpsidecar" + "github.com/simulot/immich-go/internal/fileevent" + "github.com/simulot/immich-go/internal/filenames" + "github.com/simulot/immich-go/internal/filetypes" + "github.com/simulot/immich-go/internal/filters" + "github.com/simulot/immich-go/internal/fshelper" + "github.com/simulot/immich-go/internal/groups" + "github.com/simulot/immich-go/internal/groups/burst" + "github.com/simulot/immich-go/internal/groups/epsonfastfoto" + "github.com/simulot/immich-go/internal/groups/series" + "github.com/simulot/immich-go/internal/worker" +) + +type LocalAssetBrowser struct { + fsyss []fs.FS + log *fileevent.Recorder + flags *ImportFolderOptions + pool *worker.Pool + wg sync.WaitGroup + groupers []groups.Grouper +} + +func NewLocalFiles(ctx context.Context, l *fileevent.Recorder, flags *ImportFolderOptions, fsyss ...fs.FS) (*LocalAssetBrowser, error) { + if flags.ImportIntoAlbum != "" && flags.UsePathAsAlbumName != FolderModeNone { + return nil, errors.New("cannot use both --into-album and --folder-as-album") + } + + la := LocalAssetBrowser{ + fsyss: fsyss, + flags: flags, + log: l, + pool: worker.NewPool(3), // TODO: Make this configurable + } + if flags.InfoCollector == nil { + flags.InfoCollector = filenames.NewInfoCollector(flags.TZ, flags.SupportedMedia) + } + + if flags.InclusionFlags.DateRange.IsSet() { + flags.InclusionFlags.DateRange.SetTZ(flags.TZ) + } + + if flags.SessionTag { + flags.session = fmt.Sprintf("{immich-go}/%s", time.Now().Format("2006-01-02 15:04:05")) + } + + // if flags.ExifToolFlags.UseExifTool { + // err := exif.NewExifTool(&flags.ExifToolFlags) + // if err != nil { + // return nil, err + // } + // } + + if flags.ManageEpsonFastFoto { + g := epsonfastfoto.Group{} + la.groupers = append(la.groupers, g.Group) + } + if flags.ManageBurst != filters.BurstNothing { + la.groupers = append(la.groupers, burst.Group) + } + la.groupers = append(la.groupers, series.Group) + + return &la, nil +} + +func (la *LocalAssetBrowser) Browse(ctx context.Context) chan *assets.Group { + gOut := make(chan *assets.Group) + go func() { + defer close(gOut) + for _, fsys := range la.fsyss { + la.concurrentParseDir(ctx, fsys, ".", gOut) + } + la.wg.Wait() + la.pool.Stop() + }() + return gOut +} + +func (la *LocalAssetBrowser) concurrentParseDir(ctx context.Context, fsys fs.FS, dir string, gOut chan *assets.Group) { + la.wg.Add(1) + ctx, cancel := context.WithCancelCause(ctx) + go la.pool.Submit(func() { + defer la.wg.Done() + err := la.parseDir(ctx, fsys, dir, gOut) + if err != nil { + la.log.Log().Error(err.Error()) + cancel(err) + } + }) +} + +func (la *LocalAssetBrowser) parseDir(ctx context.Context, fsys fs.FS, dir string, gOut chan *assets.Group) error { + fsName := "" + if fsys, ok := fsys.(interface{ Name() string }); ok { + fsName = fsys.Name() + } + + var as []*assets.Asset + var entries []fs.DirEntry + var err error + + select { + case <-ctx.Done(): + return ctx.Err() + default: + entries, err = fs.ReadDir(fsys, dir) + if err != nil { + return err + } + } + + for _, entry := range entries { + base := entry.Name() + name := path.Join(dir, base) + if entry.IsDir() { + continue + } + + if la.flags.BannedFiles.Match(name) { + la.log.Record(ctx, fileevent.DiscoveredDiscarded, fshelper.FSName(fsys, entry.Name()), "reason", "banned file") + continue + } + + ext := filepath.Ext(base) + mediaType := la.flags.SupportedMedia.TypeFromExt(ext) + + if mediaType == filetypes.TypeUnknown { + la.log.Record(ctx, fileevent.DiscoveredUnsupported, fshelper.FSName(fsys, name), "reason", "unsupported file type") + continue + } + + switch mediaType { + case filetypes.TypeImage: + la.log.Record(ctx, fileevent.DiscoveredImage, fshelper.FSName(fsys, name)) + case filetypes.TypeVideo: + la.log.Record(ctx, fileevent.DiscoveredVideo, fshelper.FSName(fsys, name)) + case filetypes.TypeSidecar: + if la.flags.IgnoreSideCarFiles { + la.log.Record(ctx, fileevent.DiscoveredDiscarded, fshelper.FSName(fsys, name), "reason", "sidecar file ignored") + continue + } + la.log.Record(ctx, fileevent.DiscoveredSidecar, fshelper.FSName(fsys, name)) + continue + } + + if !la.flags.InclusionFlags.IncludedExtensions.Include(ext) { + la.log.Record(ctx, fileevent.DiscoveredDiscarded, fshelper.FSName(fsys, name), "reason", "extension not included") + continue + } + + if la.flags.InclusionFlags.ExcludedExtensions.Exclude(ext) { + la.log.Record(ctx, fileevent.DiscoveredDiscarded, fshelper.FSName(fsys, name), "reason", "extension excluded") + continue + } + + select { + case <-ctx.Done(): + return ctx.Err() + default: + // we have a file to process + a, err := la.assetFromFile(ctx, fsys, name) + if err != nil { + la.log.Record(ctx, fileevent.Error, fshelper.FSName(fsys, name), "error", err.Error()) + return err + } + if a != nil { + as = append(as, a) + } + } + } + + // process the left over dirs + for _, entry := range entries { + base := entry.Name() + name := path.Join(dir, base) + if entry.IsDir() { + if la.flags.BannedFiles.Match(name) { + la.log.Record(ctx, fileevent.DiscoveredDiscarded, fshelper.FSName(fsys, name), "reason", "banned folder") + continue // Skip this folder, no error + } + if la.flags.Recursive && entry.Name() != "." { + la.concurrentParseDir(ctx, fsys, name, gOut) + } + continue + } + } + + in := make(chan *assets.Asset) + go func() { + defer close(in) + + sort.Slice(as, func(i, j int) bool { + // Sort by radical first + radicalI := as[i].Radical + radicalJ := as[j].Radical + if radicalI != radicalJ { + return radicalI < radicalJ + } + // If radicals are the same, sort by date + return as[i].CaptureDate.Before(as[j].CaptureDate) + }) + + for _, a := range as { + // check the presence of a JSON file + jsonName, err := checkExistSideCar(fsys, a.File.Name(), ".json") + if err == nil && jsonName != "" { + buf, err := fs.ReadFile(fsys, jsonName) + if err != nil { + la.log.Record(ctx, fileevent.Error, nil, "error", err.Error()) + } else { + md := &assets.Metadata{} + err = jsonsidecar.Read(bytes.NewReader(buf), md) + if err != nil { + la.log.Record(ctx, fileevent.Error, nil, "error", err.Error()) + } else { + md.File = fshelper.FSName(fsys, jsonName) + a.FromApplication = a.UseMetadata(md) + } + } + } + // check the presence of a XMP file + xmpName, err := checkExistSideCar(fsys, a.File.Name(), ".xmp") + if err == nil && xmpName != "" { + buf, err := fs.ReadFile(fsys, xmpName) + if err != nil { + la.log.Record(ctx, fileevent.Error, nil, "error", err.Error()) + } else { + md := &assets.Metadata{} + err = xmpsidecar.ReadXMP(bytes.NewReader(buf), md) + if err != nil { + la.log.Record(ctx, fileevent.Error, nil, "error", err.Error()) + } else { + md.File = fshelper.FSName(fsys, xmpName) + a.FromSideCar = a.UseMetadata(md) + } + } + } + + // Read metadata from the file only id needed (date range or take date from filename) + if la.flags.InclusionFlags.DateRange.IsSet() || la.flags.TakeDateFromFilename { + if a.CaptureDate.IsZero() { + // no date in XMp, JSON, try reading the metadata + f, name, err := a.PartialSourceReader() + if err == nil { + md, err := exif.GetMetaData(f, name, la.flags.TZ) + if err != nil { + la.log.Record(ctx, fileevent.INFO, a.File, "error", err.Error()) + if la.flags.TakeDateFromFilename { + a.CaptureDate = a.NameInfo.Taken + } + } else { + a.FromSourceFile = a.UseMetadata(md) + } + } + } + } + + if !la.flags.InclusionFlags.DateRange.InRange(a.CaptureDate) { + a.Close() + la.log.Record(ctx, fileevent.DiscoveredDiscarded, a.File, "reason", "asset outside date range") + continue + } + + // Add tags + if len(la.flags.Tags) > 0 { + for _, t := range la.flags.Tags { + a.AddTag(t) + } + } + + // Add folder as tags + if la.flags.FolderAsTags { + t := fsName + if dir != "." { + t = path.Join(t, dir) + } + if t != "" { + a.AddTag(t) + } + } + + if la.flags.SessionTag { + a.AddTag(la.flags.session) + } + select { + case in <- a: + case <-ctx.Done(): + return + } + } + }() + + gs := groups.NewGrouperPipeline(ctx, la.groupers...).PipeGrouper(ctx, in) + for g := range gs { + // Add album information + if la.flags.ImportIntoAlbum != "" { + g.Albums = []assets.Album{{Title: la.flags.ImportIntoAlbum}} + } else if la.flags.UsePathAsAlbumName != FolderModeNone && la.flags.UsePathAsAlbumName != "" { + Album := "" + switch la.flags.UsePathAsAlbumName { + case FolderModeFolder: + if dir == "." { + Album = fsName + } else { + Album = filepath.Base(dir) + } + case FolderModePath: + parts := []string{} + if fsName != "" { + parts = append(parts, fsName) + } + if dir != "." { + parts = append(parts, strings.Split(dir, "/")...) + // parts = append(parts, strings.Split(dir, string(filepath.Separator))...) + } + Album = strings.Join(parts, la.flags.AlbumNamePathSeparator) + } + g.Albums = []assets.Album{{Title: Album}} + } else { + for _, a := range g.Assets { + for _, al := range a.Albums { + g.AddAlbum(al) + } + } + } + for _, a := range g.Assets { + a.Albums = g.Albums + } + select { + case gOut <- g: + case <-ctx.Done(): + return ctx.Err() + } + } + return nil +} + +func checkExistSideCar(fsys fs.FS, name string, ext string) (string, error) { + ext2 := "" + for _, r := range ext { + if r == '.' { + ext2 += "." + continue + } + ext2 += "[" + strings.ToLower(string(r)) + strings.ToUpper(string(r)) + "]" + } + + base := name + l, err := fs.Glob(fsys, base+ext2) + if err != nil { + return "", err + } + if len(l) > 0 { + return l[0], nil + } + + ext = path.Ext(base) + if !filetypes.DefaultSupportedMedia.IsMedia(ext) { + return "", nil + } + base = strings.TrimSuffix(base, ext) + + l, err = fs.Glob(fsys, base+ext2) + if err != nil { + return "", err + } + if len(l) > 0 { + return l[0], nil + } + return "", nil +} + +func (la *LocalAssetBrowser) assetFromFile(_ context.Context, fsys fs.FS, name string) (*assets.Asset, error) { + a := &assets.Asset{ + File: fshelper.FSName(fsys, name), + OriginalFileName: filepath.Base(name), + } + i, err := fs.Stat(fsys, name) + if err != nil { + a.Close() + return nil, err + } + a.FileSize = int(i.Size()) + a.FileDate = i.ModTime() + + n := path.Dir(name) + "/" + a.OriginalFileName + if fsys, ok := fsys.(interface{ Name() string }); ok { + n = path.Join(fsys.Name(), n) + } + + a.SetNameInfo(la.flags.InfoCollector.GetInfo(n)) + return a, nil +} diff --git a/adapters/folder/readFolderWithFIles_test.go b/adapters/folder/readFolderWithFIles_test.go new file mode 100644 index 00000000..0585f32e --- /dev/null +++ b/adapters/folder/readFolderWithFIles_test.go @@ -0,0 +1,181 @@ +package folder + +import ( + "context" + "io/fs" + "os" + "reflect" + "sort" + "testing" + "time" + + "github.com/kr/pretty" + "github.com/simulot/immich-go/app" + cliflags "github.com/simulot/immich-go/internal/cliFlags" + "github.com/simulot/immich-go/internal/configuration" + "github.com/simulot/immich-go/internal/fileevent" + "github.com/simulot/immich-go/internal/filetypes" + "github.com/simulot/immich-go/internal/filters" + "github.com/simulot/immich-go/internal/fshelper" +) + +func TestLocalAssets(t *testing.T) { + tc := []struct { + name string + fsys []fs.FS + flags ImportFolderOptions + expectedFiles []string + expectedCounts []int64 + expectedAlbums map[string][]string + }{ + { + name: "easy", + flags: ImportFolderOptions{ + SupportedMedia: filetypes.DefaultSupportedMedia, + ManageBurst: filters.BurstNothing, + ManageRawJPG: filters.RawJPGNothing, + ManageHEICJPG: filters.HeicJpgNothing, + TZ: time.Local, + }, + fsys: []fs.FS{ + os.DirFS("DATA/date-range"), + }, + expectedFiles: []string{ + "photo1_w_exif.jpg", + "photo1_wo_exif.jpg", + "photo1_2024-10-06_w_exif.jpg", + "photo1_2023-10-06_wo_exif.jpg", + }, + expectedCounts: fileevent.NewCounts().Set(fileevent.DiscoveredImage, 4).Set(fileevent.Uploaded, 4).Value(), + }, + { + name: "date on the path given as argument, use names", + flags: ImportFolderOptions{ + ManageBurst: filters.BurstNothing, + ManageRawJPG: filters.RawJPGNothing, + ManageHEICJPG: filters.HeicJpgNothing, + SupportedMedia: filetypes.DefaultSupportedMedia, + TakeDateFromFilename: true, + TZ: time.Local, + InclusionFlags: cliflags.InclusionFlags{ + DateRange: cliflags.InitDateRange(time.Local, "2023-10-06"), + }, + }, + fsys: []fs.FS{ + fshelper.NewFSWithName(os.DirFS("DATA/2023/2023-10/2023-10-06"), "2023-10-06"), + }, + expectedFiles: []string{ + "photo1_w_exif.jpg", + "photo1_wo_exif.jpg", + }, + expectedCounts: fileevent.NewCounts(). + Set(fileevent.DiscoveredImage, 2).Set(fileevent.DiscoveredDiscarded, 0).Set(fileevent.Uploaded, 2). + Set(fileevent.INFO, 1).Value(), + }, + { + name: "date on the path given as argument, use names, in a TZ", + flags: ImportFolderOptions{ + ManageBurst: filters.BurstNothing, + ManageRawJPG: filters.RawJPGNothing, + ManageHEICJPG: filters.HeicJpgNothing, + SupportedMedia: filetypes.DefaultSupportedMedia, + TakeDateFromFilename: true, + TZ: time.FixedZone("UTC-4", -4*60*60), + InclusionFlags: cliflags.InclusionFlags{ + DateRange: cliflags.InitDateRange(time.FixedZone("UTC-4", -4*60*60), "2023-10-06"), + }, + }, + fsys: []fs.FS{ + fshelper.NewFSWithName(os.DirFS("DATA/2023/2023-10/2023-10-06"), "2023-10-06"), + }, + expectedFiles: []string{ + "photo1_w_exif.jpg", + "photo1_wo_exif.jpg", + }, + expectedCounts: fileevent.NewCounts(). + Set(fileevent.DiscoveredImage, 2).Set(fileevent.DiscoveredDiscarded, 0).Set(fileevent.Uploaded, 2). + Set(fileevent.INFO, 1).Value(), + }, + { + name: "date on the path given as argument, don't use names", + flags: ImportFolderOptions{ + ManageBurst: filters.BurstNothing, + ManageRawJPG: filters.RawJPGNothing, + ManageHEICJPG: filters.HeicJpgNothing, + SupportedMedia: filetypes.DefaultSupportedMedia, + TakeDateFromFilename: false, + TZ: time.Local, + InclusionFlags: cliflags.InclusionFlags{ + DateRange: cliflags.InitDateRange(time.Local, "2023-10-06"), + }, + }, + fsys: []fs.FS{ + fshelper.NewFSWithName(os.DirFS("DATA/2023/2023-10/2023-10-06"), "2023-10-06"), + }, + expectedFiles: []string{ + "photo1_w_exif.jpg", + }, + expectedCounts: fileevent.NewCounts(). + Set(fileevent.DiscoveredImage, 2).Set(fileevent.DiscoveredDiscarded, 1).Set(fileevent.Uploaded, 1). + Set(fileevent.INFO, 1).Value(), + }, + } + + logFile := configuration.DefaultLogFile() + for _, c := range tc { + t.Run(c.name, func(t *testing.T) { + ctx := context.Background() + + log := app.Log{ + File: logFile, + Level: "DEBUG", + } + err := log.OpenLogFile() + if err != nil { + t.Error(err) + return + } + log.Info("Test case: " + c.name) + recorder := fileevent.NewRecorder(log.Logger) + b, err := NewLocalFiles(ctx, recorder, &c.flags, c.fsys...) + if err != nil { + t.Error(err) + } + + groupChan := b.Browse(ctx) + + results := []string{} + albums := map[string][]string{} + for g := range groupChan { + if err := g.Validate(); err != nil { + t.Error(err) + return + } + + for _, a := range g.Assets { + results = append(results, a.File.Name()) + if len(c.expectedAlbums) > 0 { + for _, album := range g.Albums { + albums[album.Title] = append(albums[album.Title], a.File.Name()) + } + } + recorder.Record(ctx, fileevent.Uploaded, a.File) + } + } + sort.Strings(c.expectedFiles) + sort.Strings(results) + + if !reflect.DeepEqual(results, c.expectedFiles) { + t.Errorf("file list difference\n") + pretty.Ldiff(t, c.expectedFiles, results) + } + if !reflect.DeepEqual(recorder.GetCounts(), c.expectedCounts) { + t.Errorf("counters difference\n") + pretty.Ldiff(t, c.expectedCounts, recorder.GetCounts()) + } + if c.expectedAlbums != nil { + compareAlbums(t, albums, c.expectedAlbums) + } + }) + } +} diff --git a/adapters/folder/readFolder_test.go b/adapters/folder/readFolder_test.go new file mode 100644 index 00000000..2ef1e2f5 --- /dev/null +++ b/adapters/folder/readFolder_test.go @@ -0,0 +1,548 @@ +package folder + +import ( + "context" + "errors" + "io/fs" + "path" + "reflect" + "sort" + "testing" + "time" + "unsafe" + + "github.com/kr/pretty" + "github.com/psanford/memfs" + "github.com/simulot/immich-go/app" + cliflags "github.com/simulot/immich-go/internal/cliFlags" + "github.com/simulot/immich-go/internal/configuration" + "github.com/simulot/immich-go/internal/fileevent" + "github.com/simulot/immich-go/internal/filenames" + "github.com/simulot/immich-go/internal/filetypes" + "github.com/simulot/immich-go/internal/namematcher" +) + +type inMemFS struct { + *memfs.FS + name string + err error + ic *filenames.InfoCollector +} + +func newInMemFS(name string, ic *filenames.InfoCollector) *inMemFS { // nolint: unparam + return &inMemFS{ + name: name, + FS: memfs.New(), + ic: ic, + } +} + +func (mfs inMemFS) Name() string { + return mfs.name +} + +func (mfs *inMemFS) addFile(name string, _ time.Time) *inMemFS { + if mfs.err != nil { + return mfs + } + dir := path.Dir(name) + base := path.Base(name) + mfs.err = errors.Join(mfs.err, mfs.MkdirAll(dir, 0o777)) + i := mfs.ic.GetInfo(base) + mfs.err = errors.Join(mfs.err, mfs.WriteFile(name, *(*[]byte)(unsafe.Pointer(&i)), 0o777)) + return mfs +} + +func TestInMemLocalAssets(t *testing.T) { + t0 := time.Date(2021, 1, 1, 0, 0, 0, 0, time.Local) + ic := filenames.NewInfoCollector(time.Local, filetypes.DefaultSupportedMedia) + tc := []struct { + name string + fsys []fs.FS + flags ImportFolderOptions + expectedFiles []string + expectedCounts []int64 + expectedAlbums map[string][]string + }{ + { + name: "easy", + flags: ImportFolderOptions{ + SupportedMedia: filetypes.DefaultSupportedMedia, + InfoCollector: ic, + }, + fsys: []fs.FS{ + newInMemFS("MemFS", ic). + addFile("root_01.jpg", t0), + }, + expectedFiles: []string{"root_01.jpg"}, + expectedCounts: fileevent.NewCounts().Set(fileevent.DiscoveredImage, 1).Value(), + }, + { + name: "recursive", + flags: ImportFolderOptions{ + InfoCollector: ic, + SupportedMedia: filetypes.DefaultSupportedMedia, + Recursive: true, + }, + fsys: []fs.FS{ + newInMemFS("MemFS", ic). + addFile("root_01.jpg", t0). + addFile("photos/photo_01.jpg", t0), + }, + expectedFiles: []string{"root_01.jpg", "photos/photo_01.jpg"}, + expectedCounts: fileevent.NewCounts().Set(fileevent.DiscoveredImage, 2).Value(), + }, + { + name: "non-recursive", + flags: ImportFolderOptions{ + SupportedMedia: filetypes.DefaultSupportedMedia, + InfoCollector: ic, + Recursive: false, + }, + fsys: []fs.FS{ + newInMemFS("MemFS", ic). + addFile("root_01.jpg", t0). + addFile("photos/photo_01.jpg", t0), + }, + expectedFiles: []string{"root_01.jpg"}, + expectedCounts: fileevent.NewCounts().Set(fileevent.DiscoveredImage, 1).Value(), + }, + + { + name: "banned files", + flags: ImportFolderOptions{ + BannedFiles: namematcher.MustList(`@eaDir`, `.@__thumb`, `SYNOFILE_THUMB_*.*`, "BLOG/", "Database/", `._*.*`, `._*.*`), + SupportedMedia: filetypes.DefaultSupportedMedia, + InclusionFlags: cliflags.InclusionFlags{}, + InfoCollector: ic, + Recursive: true, + }, + fsys: []fs.FS{ + newInMemFS("MemFS", ic). + addFile("root_01.jpg", t0). + addFile("photos/photo_01.jpg", t0). + addFile("photos/photo_02.cr3", t0). + addFile("photos/photo_03.jpg", t0). + addFile("photos/summer 2023/20230801-001.jpg", t0). + addFile("photos/summer 2023/20230801-002.jpg", t0). + addFile("photos/summer 2023/20230801-003.cr3", t0). + addFile("@eaDir/thb1.jpg", t0). + addFile("photos/SYNOFILE_THUMB_0001.jpg", t0). + addFile("photos/summer 2023/.@__thumb/thb2.jpg", t0). + addFile("BLOG/blog.jpg", t0). + addFile("Project/Database/database_01.jpg", t0). + addFile("photos/database_01.jpg", t0). + addFile("mac/image.JPG", t0). + addFile("mac/._image.JPG", t0). + addFile("mac/image.JPG", t0). + addFile("mac/._image.JPG", t0), + }, + expectedFiles: []string{ + "root_01.jpg", + "photos/photo_01.jpg", + "photos/photo_02.cr3", + "photos/photo_03.jpg", + "photos/summer 2023/20230801-001.jpg", + "photos/summer 2023/20230801-002.jpg", + "photos/summer 2023/20230801-003.cr3", + "photos/database_01.jpg", + "mac/image.JPG", + }, + expectedCounts: fileevent.NewCounts().Set(fileevent.DiscoveredImage, 9). + Set(fileevent.DiscoveredDiscarded, 6).Value(), + }, + { + name: "excluded extensions", + flags: ImportFolderOptions{ + BannedFiles: namematcher.MustList(`@eaDir/`, `.@__thumb`, `SYNOFILE_THUMB_*.*`), + SupportedMedia: filetypes.DefaultSupportedMedia, + + InclusionFlags: cliflags.InclusionFlags{ + ExcludedExtensions: cliflags.ExtensionList{".cr3"}, + }, + Recursive: true, + InfoCollector: ic, + }, + fsys: []fs.FS{ + newInMemFS("MemFS", ic). + addFile("root_01.jpg", t0). + addFile("photos/photo_01.jpg", t0). + addFile("photos/photo_02.cr3", t0). + addFile("photos/photo_03.jpg", t0). + addFile("photos/summer 2023/20230801-001.jpg", t0). + addFile("photos/summer 2023/20230801-002.jpg", t0). + addFile("photos/summer 2023/20230801-003.cr3", t0). + addFile("@eaDir/thb1.jpg", t0). + addFile("photos/SYNOFILE_THUMB_0001.jpg", t0). + addFile("photos/summer 2023/.@__thumb/thb2.jpg", t0), + }, + expectedFiles: []string{ + "root_01.jpg", + "photos/photo_01.jpg", + "photos/photo_03.jpg", + "photos/summer 2023/20230801-001.jpg", + "photos/summer 2023/20230801-002.jpg", + }, + expectedCounts: fileevent.NewCounts().Set(fileevent.DiscoveredImage, 7). + Set(fileevent.DiscoveredDiscarded, 5).Value(), + }, + { + name: "included extensions", + flags: ImportFolderOptions{ + BannedFiles: namematcher.MustList(`@eaDir/`, `.@__thumb`, `SYNOFILE_THUMB_*.*`), + SupportedMedia: filetypes.DefaultSupportedMedia, + + InclusionFlags: cliflags.InclusionFlags{ + IncludedExtensions: cliflags.ExtensionList{".cr3"}, + }, + Recursive: true, + InfoCollector: ic, + }, + fsys: []fs.FS{ + newInMemFS("MemFS", ic). + addFile("root_01.jpg", t0). + addFile("photos/photo_01.jpg", t0). + addFile("photos/photo_02.cr3", t0). + addFile("photos/photo_03.jpg", t0). + addFile("photos/summer 2023/20230801-001.jpg", t0). + addFile("photos/summer 2023/20230801-002.jpg", t0). + addFile("photos/summer 2023/20230801-003.cr3", t0). + addFile("@eaDir/thb1.jpg", t0). + addFile("photos/SYNOFILE_THUMB_0001.jpg", t0). + addFile("photos/summer 2023/.@__thumb/thb2.jpg", t0), + }, + expectedFiles: []string{ + "photos/photo_02.cr3", + "photos/summer 2023/20230801-003.cr3", + }, + expectedCounts: fileevent.NewCounts().Set(fileevent.DiscoveredImage, 7). + Set(fileevent.DiscoveredDiscarded, 8).Value(), + }, + + { + name: "motion picture", + flags: ImportFolderOptions{ + BannedFiles: namematcher.MustList(`@eaDir/`, `.@__thumb`, `SYNOFILE_THUMB_*.*`), + SupportedMedia: filetypes.DefaultSupportedMedia, + InclusionFlags: cliflags.InclusionFlags{}, + Recursive: true, + InfoCollector: ic, + }, + fsys: []fs.FS{ + newInMemFS("MemFS", ic). + addFile("motion/nomotion.MP4", t0). + addFile("motion/PXL_20210102_221126856.MP~2", t0). + addFile("motion/PXL_20210102_221126856.MP~2.jpg", t0). + addFile("motion/PXL_20210102_221126856.MP.jpg", t0). + addFile("motion/PXL_20210102_221126856.MP", t0). + addFile("motion/20231227_152817.jpg", t0). + addFile("motion/20231227_152817.MP4", t0), + }, + expectedFiles: []string{ + "motion/PXL_20210102_221126856.MP.jpg", "motion/PXL_20210102_221126856.MP", + "motion/PXL_20210102_221126856.MP~2.jpg", "motion/PXL_20210102_221126856.MP~2", + "motion/20231227_152817.jpg", "motion/20231227_152817.MP4", + "motion/nomotion.MP4", + }, + expectedCounts: fileevent.NewCounts().Set(fileevent.DiscoveredImage, 3). + Set(fileevent.DiscoveredVideo, 4).Value(), + }, + + { + name: "date in range, use name", + flags: ImportFolderOptions{ + SupportedMedia: filetypes.DefaultSupportedMedia, + + InclusionFlags: cliflags.InclusionFlags{ + DateRange: cliflags.InitDateRange(time.Local, "2023-08"), + }, + Recursive: true, + TZ: time.Local, + TakeDateFromFilename: true, + }, + fsys: []fs.FS{ + newInMemFS("MemFS", ic). + addFile("root_01.jpg", t0). + addFile("photos/photo_01.jpg", t0). + addFile("photos/photo_02.cr3", t0). + addFile("photos/photo_03.jpg", t0). + addFile("photos/summer 2023/20230801-001.jpg", t0). + addFile("photos/summer 2023/20230801-002.jpg", t0). + addFile("photos/summer 2023/20230801-003.cr3", t0), + }, + expectedFiles: []string{ + "photos/summer 2023/20230801-001.jpg", + "photos/summer 2023/20230801-002.jpg", + "photos/summer 2023/20230801-003.cr3", + }, + expectedCounts: fileevent.NewCounts().Set(fileevent.DiscoveredImage, 7). + Set(fileevent.DiscoveredDiscarded, 4). + Set(fileevent.INFO, 7).Value(), + }, + + { + name: "path as album name", + flags: ImportFolderOptions{ + SupportedMedia: filetypes.DefaultSupportedMedia, + UsePathAsAlbumName: FolderModePath, + AlbumNamePathSeparator: " ¤ ", + InclusionFlags: cliflags.InclusionFlags{}, + Recursive: true, + InfoCollector: ic, + }, + fsys: []fs.FS{ + newInMemFS("MemFS", ic). + addFile("root_01.jpg", t0). + addFile("photos/photo_01.jpg", t0). + addFile("photos/photo_02.cr3", t0). + addFile("photos/photo_03.jpg", t0). + addFile("photos/summer 2023/20230801-001.jpg", t0). + addFile("photos/summer 2023/20230801-002.jpg", t0). + addFile("photos/summer 2023/20230801-003.cr3", t0), + }, + expectedFiles: []string{ + "root_01.jpg", + "photos/photo_01.jpg", + "photos/photo_02.cr3", + "photos/photo_03.jpg", + "photos/summer 2023/20230801-001.jpg", + "photos/summer 2023/20230801-002.jpg", + "photos/summer 2023/20230801-003.cr3", + }, + expectedCounts: fileevent.NewCounts().Set(fileevent.DiscoveredImage, 7).Value(), + expectedAlbums: map[string][]string{ + "MemFS": {"root_01.jpg"}, + "MemFS ¤ photos": {"photos/photo_01.jpg", "photos/photo_02.cr3", "photos/photo_03.jpg"}, + "MemFS ¤ photos ¤ summer 2023": {"photos/summer 2023/20230801-001.jpg", "photos/summer 2023/20230801-002.jpg", "photos/summer 2023/20230801-003.cr3"}, + }, + }, + + { + name: "folder as album name", + flags: ImportFolderOptions{ + SupportedMedia: filetypes.DefaultSupportedMedia, + UsePathAsAlbumName: FolderModeFolder, + AlbumNamePathSeparator: " ¤ ", + InclusionFlags: cliflags.InclusionFlags{}, + Recursive: true, + InfoCollector: ic, + }, + fsys: []fs.FS{ + newInMemFS("MemFS", ic). + addFile("root_01.jpg", t0). + addFile("photos/photo_01.jpg", t0). + addFile("photos/photo_02.cr3", t0). + addFile("photos/photo_03.jpg", t0). + addFile("photos/summer 2023/20230801-001.jpg", t0). + addFile("photos/summer 2023/20230801-002.jpg", t0). + addFile("photos/summer 2023/20230801-003.cr3", t0), + }, + expectedFiles: []string{ + "root_01.jpg", + "photos/photo_01.jpg", + "photos/photo_02.cr3", + "photos/photo_03.jpg", + "photos/summer 2023/20230801-001.jpg", + "photos/summer 2023/20230801-002.jpg", + "photos/summer 2023/20230801-003.cr3", + }, + expectedCounts: fileevent.NewCounts().Set(fileevent.DiscoveredImage, 7).Value(), + expectedAlbums: map[string][]string{ + "MemFS": {"root_01.jpg"}, + "photos": {"photos/photo_01.jpg", "photos/photo_02.cr3", "photos/photo_03.jpg"}, + "summer 2023": {"photos/summer 2023/20230801-001.jpg", "photos/summer 2023/20230801-002.jpg", "photos/summer 2023/20230801-003.cr3"}, + }, + }, + } + + logFile := configuration.DefaultLogFile() + for _, c := range tc { + t.Run(c.name, func(t *testing.T) { + ctx := context.Background() + + log := app.Log{ + File: logFile, + Level: "INFO", + } + err := log.OpenLogFile() + if err != nil { + t.Error(err) + return + } + log.Logger.Info("\n\n\ntest case: " + c.name) + recorder := fileevent.NewRecorder(log.Logger) + b, err := NewLocalFiles(ctx, recorder, &c.flags, c.fsys...) + if err != nil { + t.Error(err) + } + + groupChan := b.Browse(ctx) + + results := []string{} + albums := map[string][]string{} + + for g := range groupChan { + if err = g.Validate(); err != nil { + t.Error(err) + return + } + for _, a := range g.Assets { + results = append(results, a.File.Name()) + if len(c.expectedAlbums) > 0 { + for _, album := range g.Albums { + albums[album.Title] = append(albums[album.Title], a.File.Name()) + } + } + } + } + + sort.Strings(c.expectedFiles) + sort.Strings(results) + + if !reflect.DeepEqual(results, c.expectedFiles) { + t.Errorf("file list difference\n") + pretty.Ldiff(t, c.expectedFiles, results) + } + if !reflect.DeepEqual(recorder.GetCounts(), c.expectedCounts) { + t.Errorf("counters difference\n") + pretty.Ldiff(t, c.expectedCounts, recorder.GetCounts()) + } + if c.expectedAlbums != nil { + compareAlbums(t, albums, c.expectedAlbums) + } + }) + } +} + +func TestInMemLocalAssetsWithTags(t *testing.T) { + t0 := time.Date(2021, 1, 1, 0, 0, 0, 0, time.Local) + ic := filenames.NewInfoCollector(time.Local, filetypes.DefaultSupportedMedia) + tc := []struct { + name string + fsys []fs.FS + flags ImportFolderOptions + want map[string][]string + }{ + { + name: "tags", + flags: ImportFolderOptions{ + SupportedMedia: filetypes.DefaultSupportedMedia, + InfoCollector: ic, + Recursive: true, + Tags: []string{"tag1", "tag2/subtag2"}, + }, + fsys: []fs.FS{ + newInMemFS("MemFS", ic). + addFile("root_01.jpg", t0). + addFile("photos/photo_01.jpg", t0), + }, + want: map[string][]string{ + "root_01.jpg": {"tag1", "tag2/subtag2"}, + "photos/photo_01.jpg": {"tag1", "tag2/subtag2"}, + }, + }, + { + name: "folder as tags", + flags: ImportFolderOptions{ + SupportedMedia: filetypes.DefaultSupportedMedia, + InfoCollector: ic, + Recursive: true, + FolderAsTags: true, + }, + fsys: []fs.FS{ + newInMemFS("MemFS", ic). + addFile("root_01.jpg", t0). + addFile("photos/photo_01.jpg", t0). + addFile("photos/summer/photo_02.jpg", t0), + }, + want: map[string][]string{ + "root_01.jpg": {"MemFS"}, + "photos/photo_01.jpg": {"MemFS/photos"}, + "photos/summer/photo_02.jpg": {"MemFS/photos/summer"}, + }, + }, + { + name: "folder as tags and a tag", + flags: ImportFolderOptions{ + SupportedMedia: filetypes.DefaultSupportedMedia, + InfoCollector: ic, + Recursive: true, + FolderAsTags: true, + Tags: []string{"tag1"}, + }, + fsys: []fs.FS{ + newInMemFS("MemFS", ic). + addFile("root_01.jpg", t0). + addFile("photos/photo_01.jpg", t0). + addFile("photos/summer/photo_02.jpg", t0), + }, + want: map[string][]string{ + "root_01.jpg": {"tag1", "MemFS"}, + "photos/photo_01.jpg": {"tag1", "MemFS/photos"}, + "photos/summer/photo_02.jpg": {"tag1", "MemFS/photos/summer"}, + }, + }, + } + + logFile := configuration.DefaultLogFile() + for _, c := range tc { + t.Run(c.name, func(t *testing.T) { + ctx := context.Background() + + log := app.Log{ + File: logFile, + Level: "INFO", + } + err := log.OpenLogFile() + if err != nil { + t.Error(err) + return + } + log.Logger.Info("\n\n\ntest case: " + c.name) + recorder := fileevent.NewRecorder(log.Logger) + b, err := NewLocalFiles(ctx, recorder, &c.flags, c.fsys...) + if err != nil { + t.Error(err) + } + + groupChan := b.Browse(ctx) + + got := map[string][]string{} + + for g := range groupChan { + if err = g.Validate(); err != nil { + t.Error(err) + return + } + for _, a := range g.Assets { + tags := []string{} + for _, tag := range a.Tags { + tags = append(tags, tag.Value) + } + + got[a.File.Name()] = tags + } + } + if !reflect.DeepEqual(got, c.want) { + t.Errorf("tags difference\n") + pretty.Ldiff(t, c.want, got) + } + }) + } +} + +func compareAlbums(t *testing.T, a, b map[string][]string) { + a = sortAlbum(a) + b = sortAlbum(b) + if !reflect.DeepEqual(a, b) { + t.Errorf("album list difference\n") + pretty.Ldiff(t, a, b) + } +} + +func sortAlbum(a map[string][]string) map[string][]string { + for k := range a { + sort.Strings(a[k]) + } + return a +} diff --git a/adapters/folder/writeFolder.go b/adapters/folder/writeFolder.go new file mode 100644 index 00000000..cd1b5586 --- /dev/null +++ b/adapters/folder/writeFolder.go @@ -0,0 +1,127 @@ +package folder + +import ( + "context" + "errors" + "fmt" + "io" + "io/fs" + "os" + "path" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/exif/sidecars/jsonsidecar" + "github.com/simulot/immich-go/internal/fshelper" +) + +// type minimalFSWriter interface { +// fs.FS +// fshelper.FSCanWrite +// } + +type closer interface { + Close() error +} +type LocalAssetWriter struct { + WriteToFS fs.FS + createdDir map[string]struct{} +} + +func NewLocalAssetWriter(fsys fs.FS, writeToPath string) (*LocalAssetWriter, error) { + if _, ok := fsys.(fshelper.FSCanWrite); !ok { + return nil, errors.New("FS does not support writing") + } + return &LocalAssetWriter{ + WriteToFS: fsys, + createdDir: make(map[string]struct{}), + }, nil +} + +func (w *LocalAssetWriter) WriteGroup(ctx context.Context, group *assets.Group) error { + var err error + + if fsys, ok := w.WriteToFS.(closer); ok { + defer fsys.Close() + } + for _, a := range group.Assets { + select { + case <-ctx.Done(): + return errors.Join(err, ctx.Err()) + default: + err = errors.Join(err, w.WriteAsset(ctx, a)) + } + } + return err +} + +func (w *LocalAssetWriter) WriteAsset(ctx context.Context, a *assets.Asset) error { + base := a.Base + dir := w.pathOfAsset(a) + if _, ok := w.createdDir[dir]; !ok { + err := fshelper.MkdirAll(w.WriteToFS, dir, 0o755) + if err != nil { + return err + } + w.createdDir[dir] = struct{}{} + } + select { + case <-ctx.Done(): + return ctx.Err() + default: + r, err := a.Open() + if err != nil { + return err + } + defer r.Close() + + select { + case <-ctx.Done(): + return ctx.Err() + default: + // write the asset + err = fshelper.WriteFile(w.WriteToFS, path.Join(dir, base), r) + if err != nil { + return err + } + // XMP? + if a.FromSideCar != nil { + // Sidecar file is set, copy it + var scr fs.File + scr, err = a.FromSideCar.File.Open() + if err != nil { + return err + } + defer scr.Close() + var scw fshelper.WFile + scw, err = fshelper.OpenFile(w.WriteToFS, path.Join(dir, base+".XMP"), os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0o644) + if err != nil { + return err + } + _, err = io.Copy(scw, scr) + scw.Close() + } + + // For a Application or immich-go JSON? + if a.FromApplication != nil { + var scw fshelper.WFile + scw, err = fshelper.OpenFile(w.WriteToFS, path.Join(dir, base+".JSON"), os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0o644) + if err != nil { + return err + } + err = jsonsidecar.Write(a.FromApplication, scw) + scw.Close() + } + + return err + } + } +} + +func (w *LocalAssetWriter) pathOfAsset(a *assets.Asset) string { + d := a.CaptureDate + if d.IsZero() { + return "no-date" + } + p := path.Join(fmt.Sprintf("%04d", d.Year()), fmt.Sprintf("%04d-%02d", d.Year(), d.Month())) + return p +} diff --git a/adapters/fromimmich/fromimmich.go b/adapters/fromimmich/fromimmich.go new file mode 100644 index 00000000..2f81ef0f --- /dev/null +++ b/adapters/fromimmich/fromimmich.go @@ -0,0 +1,222 @@ +package fromimmich + +import ( + "context" + "errors" + "fmt" + "time" + + "github.com/simulot/immich-go/app" + "github.com/simulot/immich-go/immich" + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/fileevent" + "github.com/simulot/immich-go/internal/filenames" + "github.com/simulot/immich-go/internal/fshelper" + "github.com/simulot/immich-go/internal/immichfs" +) + +type FromImmich struct { + flags *FromImmichFlags + // client *app.Client + ifs *immichfs.ImmichFS + ic *filenames.InfoCollector + + mustFetchAlbums bool // True if we need to fetch the asset's albums in 2nd step + errCount int // Count the number of errors, to stop after 5 +} + +func NewFromImmich(ctx context.Context, app *app.Application, jnl *fileevent.Recorder, flags *FromImmichFlags) (*FromImmich, error) { + client := &flags.client + err := client.Initialize(ctx, app) + if err != nil { + return nil, err + } + err = client.Open(ctx) + if err != nil { + return nil, err + } + + ifs := immichfs.NewImmichFS(ctx, flags.client.Server, client.Immich) + f := FromImmich{ + flags: flags, + ifs: ifs, + ic: filenames.NewInfoCollector(time.Local, client.Immich.SupportedMedia()), + } + return &f, nil +} + +func (f *FromImmich) Browse(ctx context.Context) chan *assets.Group { + gOut := make(chan *assets.Group) + go func() { + defer close(gOut) + var err error + switch { + case len(f.flags.Albums) > 0: + err = f.getAssetsFromAlbums(ctx, gOut) + default: + err = f.getAssets(ctx, gOut) + } + if err != nil { + f.flags.client.ClientLog.Error(fmt.Sprintf("Error while getting Immich assets: %v", err)) + } + }() + return gOut +} + +const timeFormat = "2006-01-02T15:04:05.000Z" + +func (f *FromImmich) getAssets(ctx context.Context, grpChan chan *assets.Group) error { + query := immich.SearchMetadataQuery{ + Make: f.flags.Make, + Model: f.flags.Model, + // WithExif: true, + WithArchived: f.flags.WithArchived, + } + + f.mustFetchAlbums = true + if f.flags.DateRange.IsSet() { + query.TakenAfter = f.flags.DateRange.After.Format(timeFormat) + query.TakenBefore = f.flags.DateRange.Before.Format(timeFormat) + } + + return f.flags.client.Immich.GetAllAssetsWithFilter(ctx, &query, func(a *immich.Asset) error { + if f.flags.Favorite && !a.IsFavorite { + return nil + } + if !f.flags.WithTrashed && a.IsTrashed { + return nil + } + return f.filterAsset(ctx, a, grpChan) + }) +} + +func (f *FromImmich) getAssetsFromAlbums(ctx context.Context, grpChan chan *assets.Group) error { + f.mustFetchAlbums = false + + assets := map[string]*immich.Asset{} // List of assets to get by ID + + albums, err := f.flags.client.Immich.GetAllAlbums(ctx) + if err != nil { + return f.logError(err) + } + for _, album := range albums { + for _, albumName := range f.flags.Albums { + if album.Title == albumName { + al, err := f.flags.client.Immich.GetAlbumInfo(ctx, album.ID, false) + if err != nil { + return f.logError(err) + } + for _, a := range al.Assets { + if _, ok := assets[a.ID]; !ok { + a.Albums = append(a.Albums, immich.AlbumSimplified{ + AlbumName: album.Title, + }) + assets[a.ID] = a + } else { + assets[a.ID].Albums = append(assets[a.ID].Albums, immich.AlbumSimplified{ + AlbumName: album.Title, + }) + } + } + } + } + } + + for _, a := range assets { + err = f.filterAsset(ctx, a, grpChan) + if err != nil { + return f.logError(err) + } + } + return nil +} + +func (f *FromImmich) filterAsset(ctx context.Context, a *immich.Asset, grpChan chan *assets.Group) error { + var err error + if f.flags.Favorite && !a.IsFavorite { + return nil + } + + if !f.flags.WithTrashed && a.IsTrashed { + return nil + } + + albums := immich.AlbumsFromAlbumSimplified(a.Albums) + + if f.mustFetchAlbums && len(albums) == 0 { + albums, err = f.flags.client.Immich.GetAssetAlbums(ctx, a.ID) + if err != nil { + return f.logError(err) + } + } + if len(f.flags.Albums) > 0 && len(albums) > 0 { + keepMe := false + newAlbumList := []assets.Album{} + for _, album := range f.flags.Albums { + for _, aAlbum := range albums { + if album == aAlbum.Title { + keepMe = true + newAlbumList = append(newAlbumList, aAlbum) + } + } + } + if !keepMe { + return nil + } + albums = newAlbumList + } + + // Some information are missing in the metadata result, + // so we need to get the asset details + + a, err = f.flags.client.Immich.GetAssetInfo(ctx, a.ID) + if err != nil { + return f.logError(err) + } + asset := a.AsAsset() + asset.SetNameInfo(f.ic.GetInfo(asset.OriginalFileName)) + asset.File = fshelper.FSName(f.ifs, a.ID) + + asset.FromApplication = &assets.Metadata{ + Latitude: a.ExifInfo.Latitude, + Longitude: a.ExifInfo.Longitude, + Description: a.ExifInfo.Description, + DateTaken: a.ExifInfo.DateTimeOriginal.Time, + Trashed: a.IsTrashed, + Archived: a.IsArchived, + Favorited: a.IsFavorite, + Rating: byte(a.Rating), + Albums: albums, + Tags: asset.Tags, + } + + if f.flags.MinimalRating > 0 && a.Rating < f.flags.MinimalRating { + return nil + } + + if f.flags.DateRange.IsSet() { + if asset.CaptureDate.Before(f.flags.DateRange.After) || asset.CaptureDate.After(f.flags.DateRange.Before) { + return nil + } + } + + g := assets.NewGroup(assets.GroupByNone, asset) + g.Albums = asset.Albums + select { + case grpChan <- g: + case <-ctx.Done(): + return ctx.Err() + } + return nil +} + +func (f *FromImmich) logError(err error) error { + f.flags.client.ClientLog.Error(fmt.Sprintf("Error while getting Immich assets: %v", err)) + f.errCount++ + if f.errCount > 5 { + err := errors.New("too many errors, aborting") + f.flags.client.ClientLog.Error(err.Error()) + return err + } + return nil +} diff --git a/adapters/fromimmich/options.go b/adapters/fromimmich/options.go new file mode 100644 index 00000000..479687b2 --- /dev/null +++ b/adapters/fromimmich/options.go @@ -0,0 +1,40 @@ +package fromimmich + +import ( + "time" + + "github.com/simulot/immich-go/app" + cliflags "github.com/simulot/immich-go/internal/cliFlags" + "github.com/spf13/cobra" +) + +type FromImmichFlags struct { + DateRange cliflags.DateRange // get assets only within this date range (fromat: YYYY-MM-DD,YYYY-MM-DD) + Albums []string // get assets only from those albums + Tags []string // get assets only with those tags + WithArchived bool // get archived assets too + WithTrashed bool // get trashed assets too + Favorite bool // get only favorite assets + MinimalRating int // get only assets with a rating greater or equal to this value + Make string // get only assets with this make + Model string // get only assets with this model + client app.Client // client to use for the import +} + +func (o *FromImmichFlags) AddFromImmichFlags(cmd *cobra.Command, parent *cobra.Command) { + // cmd.Flags().StringVar(&o.Make, "from-make", "", "Get only assets with this make") + // cmd.Flags().StringVar(&o.Model, "from-model", "", "Get only assets with this model") + cmd.Flags().StringSliceVar(&o.Albums, "from-album", nil, "Get assets only from those albums, can be used multiple times") + // cmd.Flags().StringSliceVar(&o.Tags, "from-tags", nil, "Get assets only with those tags") + cmd.Flags().Var(&o.DateRange, "from-date-range", "Get assets only within this date range (fromat: YYYY[-MM[-DD[,YYYY-MM-DD]]])") + // cmd.Flags().BoolVar(&o.WithArchived, "from-archived", false, "Get archived assets too") + // cmd.Flags().BoolVar(&o.WithTrashed, "from-trashed", false, "Get trashed assets too") + // cmd.Flags().BoolVar(&o.Favorite, "from-favorite", false, "Get only favorite assets") + // cmd.Flags().IntVar(&o.MinimalRating, "from-minimal-rating", 0, "Get only assets with a rating greater or equal to this value") + + cmd.Flags().StringVar(&o.client.Server, "from-server", o.client.Server, "Immich server address (example http://your-ip:2283 or https://your-domain)") + cmd.Flags().StringVar(&o.client.APIKey, "from-api-key", "", "API Key") + cmd.Flags().BoolVar(&o.client.APITrace, "from-api-trace", false, "Enable trace of api calls") + cmd.Flags().BoolVar(&o.client.SkipSSL, "from-skip-verify-ssl", false, "Skip SSL verification") + cmd.Flags().DurationVar(&o.client.ClientTimeout, "from-client-timeout", 5*time.Minute, "Set server calls timeout") +} diff --git a/adapters/googlePhotos/changelog.md b/adapters/googlePhotos/changelog.md new file mode 100644 index 00000000..7e975279 --- /dev/null +++ b/adapters/googlePhotos/changelog.md @@ -0,0 +1,6 @@ +- improved counters + - duplicates counters when linked videos + - discarded partner's files not counted + - correct counters when the same path/file is found in different parts of the the archive +- improved logs + - the zip file that contains the asset file is now logged diff --git a/adapters/googlePhotos/e2etests/e2e_nonregression_test.go b/adapters/googlePhotos/e2etests/e2e_nonregression_test.go new file mode 100644 index 00000000..c2d430f5 --- /dev/null +++ b/adapters/googlePhotos/e2etests/e2e_nonregression_test.go @@ -0,0 +1,119 @@ +//go:build e2e +// +build e2e + +package gp_test + +import ( + "context" + "io/fs" + "log/slog" + "os" + "testing" + "time" + + "github.com/joho/godotenv" + gp "github.com/simulot/immich-go/adapters/googlePhotos" + "github.com/simulot/immich-go/internal/fileevent" + "github.com/simulot/immich-go/internal/filenames" + "github.com/simulot/immich-go/internal/filetypes" +) + +var myEnv map[string]string + +func initMyEnv(t *testing.T) { + if len(myEnv) > 0 { + return + } + var err error + e, err := godotenv.Read("../../../e2e.env") + if err != nil { + t.Fatalf("cant initialize environment variables: %s", err) + } + myEnv = e + if myEnv["IMMICHGO_TESTFILES"] == "" { + t.Fatal("missing IMMICHGO_TESTFILES in .env file") + } +} + +type expectedCounts map[fileevent.Code]int64 + +func simulateAndCheck(t *testing.T, fileList string, flags *gp.ImportFlags, expected expectedCounts, fsyss []fs.FS) { + if flags.SupportedMedia == nil { + flags.SupportedMedia = filetypes.DefaultSupportedMedia + } + flags.InfoCollector = filenames.NewInfoCollector(time.Local, flags.SupportedMedia) + jnl, err := simulate_upload(fileList, flags, fsyss) + if err != nil { + t.Error(err) + return + } + + counts := jnl.GetCounts() + + shouldUpload := counts[fileevent.DiscoveredImage] + + counts[fileevent.DiscoveredVideo] - + counts[fileevent.AnalysisLocalDuplicate] - + counts[fileevent.DiscoveredDiscarded] + if !flags.KeepJSONLess { + shouldUpload -= counts[fileevent.AnalysisMissingAssociatedMetadata] + } + diff := shouldUpload - counts[fileevent.Uploaded] + if diff != 0 { + t.Errorf("The counter[Uploaded]==%d, expected %d, diff %d", counts[fileevent.Uploaded], shouldUpload, diff) + } + + for c := fileevent.Code(0); c < fileevent.MaxCode; c++ { + if v, ok := expected[c]; ok { + if counts[c] != v { + t.Errorf("The counter[%s]==%d, expected %d", c.String(), counts[c], expected[c]) + } + } + } +} + +// Simulate takeout archive upload +func simulate_upload(testname string, flags *gp.ImportFlags, fsys []fs.FS) (*fileevent.Recorder, error) { + ctx := context.Background() + + logFile, err := os.Create(testname + ".json") + if err != nil { + return nil, err + } + defer logFile.Close() + + log := slog.New(slog.NewJSONHandler(logFile, nil)) + jnl := fileevent.NewRecorder(log) + adapter, err := gp.NewTakeout(ctx, jnl, flags, fsys...) + if err != nil { + return nil, err + } + + assetsGroups := adapter.Browse(ctx) + for g := range assetsGroups { + for i, a := range g.Assets { + jnl.Record(ctx, fileevent.Uploaded, a) + if i >= 0 { + for _, album := range g.Albums { + jnl.Record(ctx, fileevent.UploadAddToAlbum, a, "album", album.Title) + } + } + } + } + + jnl.Report() + + trackerFile, err := os.Create(testname + ".tracker.csv") + if err != nil { + return nil, err + } + defer trackerFile.Close() + adapter.DebugFileTracker(trackerFile) + + linkedFiles, err := os.Create(testname + ".linked.csv") + if err != nil { + return nil, err + } + defer linkedFiles.Close() + + return jnl, nil +} diff --git a/adapters/googlePhotos/e2etests/e2e_takeout_test.go b/adapters/googlePhotos/e2etests/e2e_takeout_test.go new file mode 100644 index 00000000..2a09bfea --- /dev/null +++ b/adapters/googlePhotos/e2etests/e2e_takeout_test.go @@ -0,0 +1,197 @@ +//go:build e2e +// +build e2e + +package gp_test + +import ( + "testing" + + gp "github.com/simulot/immich-go/adapters/googlePhotos" + "github.com/simulot/immich-go/internal/fakefs" + "github.com/simulot/immich-go/internal/fileevent" + "github.com/simulot/immich-go/internal/fshelper" +) + +func TestPixilTakeOut(t *testing.T) { + initMyEnv(t) + + files := myEnv["IMMICHGO_TESTFILES"] + "/User Files/pixil/0.22.0/list.lst.zip" + fsyss, err := fakefs.ScanFileList(files, "01-02-2006 15:04") + if err != nil { + t.Error(err) + return + } + simulateAndCheck(t, files, &gp.ImportFlags{ + CreateAlbums: true, + }, expectedCounts{ + fileevent.DiscoveredImage: 21340, + fileevent.DiscoveredVideo: 8644, + fileevent.DiscoveredSidecar: 21560, + fileevent.DiscoveredUnsupported: 8, + fileevent.DiscoveredDiscarded: 0, + fileevent.AnalysisAssociatedMetadata: 29984, + fileevent.AnalysisLocalDuplicate: 13151, + fileevent.UploadAddToAlbum: 13391, + fileevent.Uploaded: 16833, + fileevent.AnalysisMissingAssociatedMetadata: 0, + }, fsyss) +} + +func TestDemoTakeOut(t *testing.T) { + initMyEnv(t) + + files := myEnv["IMMICHGO_TESTFILES"] + "/demo takeout/Takeout" + fsyss, err := fshelper.ParsePath([]string{files}) + if err != nil { + t.Error(err) + return + } + simulateAndCheck(t, files, &gp.ImportFlags{ + CreateAlbums: true, + KeepJSONLess: true, + KeepPartner: false, + }, expectedCounts{ + fileevent.DiscoveredImage: 338, + fileevent.DiscoveredVideo: 9, + fileevent.DiscoveredSidecar: 345, + fileevent.DiscoveredUnsupported: 1, + fileevent.AnalysisAssociatedMetadata: 346, + fileevent.AnalysisLocalDuplicate: 49, + fileevent.UploadAddToAlbum: 215, + fileevent.Uploaded: 286, + fileevent.AnalysisMissingAssociatedMetadata: 0, + }, fsyss) +} + +/* +TestPhyl404TakeOut +In this dataset, a file can be present in different ZIP files, with the same path: + + ex: zip1:/album1/photo1.jpg and zip2:/album1/photo1.jpg +*/ +func TestPhyl404TakeOut(t *testing.T) { + initMyEnv(t) + + files := myEnv["IMMICHGO_TESTFILES"] + "/User Files/Phyl404/list.lst" + fsyss, err := fakefs.ScanFileList(files, "2006-01-02 15:04") + if err != nil { + t.Error(err) + return + } + simulateAndCheck(t, files, &gp.ImportFlags{ + CreateAlbums: true, + KeepPartner: true, + }, expectedCounts{ + fileevent.DiscoveredImage: 113181, + fileevent.DiscoveredVideo: 20542, + fileevent.DiscoveredSidecar: 139660, + fileevent.DiscoveredUnsupported: 5, + fileevent.AnalysisAssociatedMetadata: 111592, + fileevent.AnalysisLocalDuplicate: 20776, + fileevent.UploadAddToAlbum: 2625, + fileevent.Uploaded: 109966, + fileevent.AnalysisMissingAssociatedMetadata: 2978, + }, fsyss) +} + +func TestPhyl404_2TakeOut(t *testing.T) { + initMyEnv(t) + + files := myEnv["IMMICHGO_TESTFILES"] + "/User Files/Phyl404#2/list.lst" + fsyss, err := fakefs.ScanFileList(files, "2006-01-02 15:04") + if err != nil { + t.Error(err) + return + } + simulateAndCheck(t, files, &gp.ImportFlags{ + CreateAlbums: true, + }, expectedCounts{ + fileevent.DiscoveredImage: 105918, + fileevent.DiscoveredVideo: 18607, + fileevent.DiscoveredSidecar: 122981, + fileevent.DiscoveredUnsupported: 5, + fileevent.AnalysisAssociatedMetadata: 124521, + fileevent.AnalysisLocalDuplicate: 2896, + fileevent.UploadAddToAlbum: 4379, + fileevent.Uploaded: 121625, + fileevent.AnalysisMissingAssociatedMetadata: 1, + }, fsyss) +} + +func TestSteve81TakeOut(t *testing.T) { + initMyEnv(t) + + files := myEnv["IMMICHGO_TESTFILES"] + "/User Files/Steve81/list.list" + fsyss, err := fakefs.ScanFileList(files, "2006-01-02 15:04") + if err != nil { + t.Error(err) + return + } + simulateAndCheck(t, files, &gp.ImportFlags{ + CreateAlbums: true, + KeepPartner: true, + }, expectedCounts{ + fileevent.DiscoveredImage: 44072, + fileevent.DiscoveredVideo: 4160, + fileevent.DiscoveredSidecar: 44987, + fileevent.DiscoveredUnsupported: 57, + fileevent.AnalysisAssociatedMetadata: 44907, + fileevent.AnalysisLocalDuplicate: 23131, + fileevent.UploadAddToAlbum: 31364, + fileevent.Uploaded: 25097, + fileevent.AnalysisMissingAssociatedMetadata: 4, + }, fsyss) +} + +func TestMuetyTakeOut(t *testing.T) { + initMyEnv(t) + + files := myEnv["IMMICHGO_TESTFILES"] + "/User Files/muety/list.lst.zip" + fsyss, err := fakefs.ScanFileList(files, "01-02-2006 15:04") + if err != nil { + t.Error(err) + return + } + simulateAndCheck(t, files, &gp.ImportFlags{ + CreateAlbums: true, + KeepPartner: true, + }, expectedCounts{ + fileevent.DiscoveredImage: 25716, + fileevent.DiscoveredVideo: 470, + fileevent.DiscoveredSidecar: 20070, + fileevent.DiscoveredDiscarded: 1, + fileevent.DiscoveredUnsupported: 6, + fileevent.AnalysisAssociatedMetadata: 21420, + fileevent.AnalysisLocalDuplicate: 10045, + fileevent.UploadAddToAlbum: 6178, + fileevent.Uploaded: 16127, + fileevent.AnalysisMissingAssociatedMetadata: 13, + }, fsyss) +} + +func TestMissingJSONTakeOut(t *testing.T) { + initMyEnv(t) + + files := myEnv["IMMICHGO_TESTFILES"] + "/User Files/MissingJSON/list.lst" + fsyss, err := fakefs.ScanFileList(files, "01-02-2006 15:04") + if err != nil { + t.Error(err) + return + } + simulateAndCheck(t, files, &gp.ImportFlags{ + CreateAlbums: true, + KeepPartner: true, + KeepJSONLess: true, + }, expectedCounts{ + fileevent.DiscoveredImage: 4, + fileevent.DiscoveredVideo: 1, + fileevent.DiscoveredSidecar: 2, + fileevent.DiscoveredDiscarded: 0, + fileevent.DiscoveredUnsupported: 0, + fileevent.AnalysisAssociatedMetadata: 1, + fileevent.AnalysisLocalDuplicate: 0, + fileevent.UploadAddToAlbum: 2, + fileevent.Uploaded: 5, + fileevent.AnalysisMissingAssociatedMetadata: 4, + }, fsyss) +} diff --git a/adapters/googlePhotos/googlephotos.go b/adapters/googlePhotos/googlephotos.go new file mode 100644 index 00000000..287cadf0 --- /dev/null +++ b/adapters/googlePhotos/googlephotos.go @@ -0,0 +1,597 @@ +package gp + +import ( + "bytes" + "context" + "fmt" + "io/fs" + "log/slog" + "path" + "path/filepath" + "sort" + "strings" + "time" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/fileevent" + "github.com/simulot/immich-go/internal/filenames" + "github.com/simulot/immich-go/internal/filetypes" + "github.com/simulot/immich-go/internal/filters" + "github.com/simulot/immich-go/internal/fshelper" + "github.com/simulot/immich-go/internal/gen" + "github.com/simulot/immich-go/internal/groups" + "github.com/simulot/immich-go/internal/groups/burst" + "github.com/simulot/immich-go/internal/groups/epsonfastfoto" + "github.com/simulot/immich-go/internal/groups/series" +) + +type Takeout struct { + fsyss []fs.FS + catalogs map[string]directoryCatalog // file catalogs by directory in the set of the all takeout parts + albums map[string]assets.Album // track album names by folder + fileTracker map[fileKeyTracker]trackingInfo // key is base name + file size, value is list of file paths + // debugLinkedFiles []linkedFiles + log *fileevent.Recorder + flags *ImportFlags // command-line flags + groupers []groups.Grouper +} + +type fileKeyTracker struct { + baseName string + size int64 +} + +type trackingInfo struct { + paths []string + count int + metadata *assets.Metadata + status fileevent.Code +} + +func trackerKeySortFunc(a, b fileKeyTracker) int { + cmp := strings.Compare(a.baseName, b.baseName) + if cmp != 0 { + return cmp + } + return int(a.size) - int(b.size) +} + +// directoryCatalog captures all files in a given directory +type directoryCatalog struct { + jsons map[string]*assets.Metadata // metadata in the catalog by base name + unMatchedFiles map[string]*assetFile // files to be matched map by base name + matchedFiles map[string]*assets.Asset // files matched by base name +} + +// assetFile keep information collected during pass one +type assetFile struct { + fsys fs.FS // Remember in which part of the archive the file is located + base string // Remember the original file name + length int // file length in bytes + date time.Time // file modification date + md *assets.Metadata // will point to the associated metadata +} + +// Implement slog.LogValuer for assetFile +func (af assetFile) LogValue() slog.Value { + return slog.GroupValue( + slog.String("base", af.base), + slog.Int("length", af.length), + slog.Time("date", af.date), + ) +} + +func NewTakeout(ctx context.Context, l *fileevent.Recorder, flags *ImportFlags, fsyss ...fs.FS) (*Takeout, error) { + to := Takeout{ + fsyss: fsyss, + catalogs: map[string]directoryCatalog{}, + albums: map[string]assets.Album{}, + fileTracker: map[fileKeyTracker]trackingInfo{}, + log: l, + flags: flags, + } + if flags.InfoCollector == nil { + flags.InfoCollector = filenames.NewInfoCollector(flags.TZ, flags.SupportedMedia) + } + // if flags.ExifToolFlags.UseExifTool { + // err := exif.NewExifTool(&flags.ExifToolFlags) + // if err != nil { + // return nil, err + // } + // } + if flags.SessionTag { + flags.session = fmt.Sprintf("{immich-go}/%s", time.Now().Format("2006-01-02 15:04:05")) + } + + if flags.ManageEpsonFastFoto { + g := epsonfastfoto.Group{} + to.groupers = append(to.groupers, g.Group) + } + if flags.ManageBurst != filters.BurstNothing { + to.groupers = append(to.groupers, burst.Group) + } + to.groupers = append(to.groupers, series.Group) + + return &to, nil +} + +// Prepare scans all files in all walker to build the file catalog of the archive +// metadata files content is read and kept +// return a channel of asset groups after the puzzle is solved + +func (to *Takeout) Browse(ctx context.Context) chan *assets.Group { + ctx, cancel := context.WithCancelCause(ctx) + gOut := make(chan *assets.Group) + go func() { + defer close(gOut) + + for _, w := range to.fsyss { + err := to.passOneFsWalk(ctx, w) + if err != nil { + cancel(err) + return + } + } + err := to.solvePuzzle(ctx) + if err != nil { + cancel(err) + return + } + err = to.passTwo(ctx, gOut) + cancel(err) + }() + return gOut +} + +func (to *Takeout) passOneFsWalk(ctx context.Context, w fs.FS) error { + err := fs.WalkDir(w, ".", func(name string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + + select { + case <-ctx.Done(): + return ctx.Err() + default: + + if d.IsDir() { + return nil + } + + dir, base := path.Split(name) + dir = strings.TrimSuffix(dir, "/") + ext := strings.ToLower(path.Ext(base)) + + dirCatalog, ok := to.catalogs[dir] + if !ok { + dirCatalog.jsons = map[string]*assets.Metadata{} + dirCatalog.unMatchedFiles = map[string]*assetFile{} + dirCatalog.matchedFiles = map[string]*assets.Asset{} + } + finfo, err := d.Info() + if err != nil { + to.log.Record(ctx, fileevent.Error, fshelper.FSName(w, name), "error", err.Error()) + return err + } + switch ext { + case ".json": + var md *assets.Metadata + b, err := fs.ReadFile(w, name) + if err != nil { + to.log.Record(ctx, fileevent.Error, fshelper.FSName(w, name), "error", err.Error()) + return nil + } + if bytes.Contains(b, []byte("immich-go version:")) { + md, err = assets.UnMarshalMetadata(b) + if err != nil { + to.log.Record(ctx, fileevent.DiscoveredUnsupported, fshelper.FSName(w, name), "reason", "unknown JSONfile") + } + md.FileName = base + to.log.Record(ctx, fileevent.DiscoveredSidecar, fshelper.FSName(w, name), "type", "immich-go metadata", "title", md.FileName) + md.File = fshelper.FSName(w, name) + } else { + md, err := fshelper.UnmarshalJSON[GoogleMetaData](b) + if err == nil { + switch { + case md.isAsset(): + md := md.AsMetadata(fshelper.FSName(w, name)) // Keep metadata + md.File = fshelper.FSName(w, name) + dirCatalog.jsons[base] = md + to.log.Log().Debug("Asset JSON", "metadata", md) + to.log.Record(ctx, fileevent.DiscoveredSidecar, fshelper.FSName(w, name), "type", "asset metadata", "title", md.FileName) + case md.isAlbum(): + to.log.Log().Debug("Album JSON", "metadata", md) + if !to.flags.KeepUntitled && md.Title == "" { + to.log.Record(ctx, fileevent.DiscoveredUnsupported, fshelper.FSName(w, name), "reason", "discard untitled album") + return nil + } + a := to.albums[dir] + a.Title = md.Title + if a.Title == "" { + a.Title = filepath.Base(dir) + } + if e := md.Enrichments; e != nil { + a.Description = e.Text + a.Latitude = e.Latitude + a.Longitude = e.Longitude + } + to.albums[dir] = a + to.log.Record(ctx, fileevent.DiscoveredSidecar, fshelper.FSName(w, name), "type", "album metadata", "title", md.Title) + default: + to.log.Record(ctx, fileevent.DiscoveredUnsupported, fshelper.FSName(w, name), "reason", "unknown JSONfile") + return nil + } + } else { + to.log.Record(ctx, fileevent.DiscoveredUnsupported, fshelper.FSName(w, name), "reason", "unknown JSONfile") + return nil + } + } + default: + + if to.flags.BannedFiles.Match(name) { + to.log.Record(ctx, fileevent.DiscoveredDiscarded, fshelper.FSName(w, name), "reason", "banned file") + return nil + } + + if !to.flags.InclusionFlags.IncludedExtensions.Include(ext) { + to.log.Record(ctx, fileevent.DiscoveredDiscarded, fshelper.FSName(w, name), "reason", "file extension not selected") + return nil + } + if to.flags.InclusionFlags.ExcludedExtensions.Exclude(ext) { + to.log.Record(ctx, fileevent.DiscoveredDiscarded, fshelper.FSName(w, name), "reason", "file extension not allowed") + return nil + } + t := to.flags.SupportedMedia.TypeFromExt(ext) + switch t { + case filetypes.TypeUnknown: + to.log.Record(ctx, fileevent.DiscoveredUnsupported, fshelper.FSName(w, name), "reason", "unsupported file type") + return nil + case filetypes.TypeVideo: + to.log.Record(ctx, fileevent.DiscoveredVideo, fshelper.FSName(w, name)) + if strings.Contains(name, "Failed Videos") { + to.log.Record(ctx, fileevent.DiscoveredDiscarded, fshelper.FSName(w, name), "reason", "can't upload failed videos") + return nil + } + case filetypes.TypeImage: + to.log.Record(ctx, fileevent.DiscoveredImage, fshelper.FSName(w, name)) + } + + key := fileKeyTracker{ + baseName: base, + size: finfo.Size(), + } + + tracking := to.fileTracker[key] + tracking.paths = append(tracking.paths, dir) + tracking.count++ + to.fileTracker[key] = tracking + + if a, ok := dirCatalog.unMatchedFiles[base]; ok { + to.logMessage(ctx, fileevent.AnalysisLocalDuplicate, a, "duplicated in the directory") + return nil + } + + dirCatalog.unMatchedFiles[base] = &assetFile{ + fsys: w, + base: base, + length: int(finfo.Size()), + date: finfo.ModTime(), + } + } + to.catalogs[dir] = dirCatalog + return nil + } + }) + return err +} + +// solvePuzzle prepares metadata with information collected during pass one for each accepted files +// +// JSON files give important information about the relative photos / movies: +// - The original name (useful when it as been truncated) +// - The date of capture (useful when the files doesn't have this date) +// - The GPS coordinates (will be useful in a future release) +// +// Each JSON is checked. JSON is duplicated in albums folder. +// --Associated files with the JSON can be found in the JSON's folder, or in the Year photos.-- +// ++JSON and files are located in the same folder +/// +// Once associated and sent to the main program, files are tagged for not been associated with an other one JSON. +// Association is done with the help of a set of matcher functions. Each one implement a rule +// +// 1 JSON can be associated with 1+ files that have a part of their name in common. +// - the file is named after the JSON name +// - the file name can be 1 UTF-16 char shorter (🤯) than the JSON name +// - the file name is longer than 46 UTF-16 chars (🤯) is truncated. But the truncation can creates duplicates, then a number is added. +// - if there are several files with same original name, the first instance kept as it is, the next has a sequence number. +// File is renamed as IMG_1234(1).JPG and the JSON is renamed as IMG_1234.JPG(1).JSON +// - of course those rules are likely to collide. They have to be applied from the most common to the least one. +// - sometimes the file isn't in the same folder than the json... It can be found in Year's photos folder +// +// --The duplicates files (same name, same length in bytes) found in the local source are discarded before been presented to the immich server. +// ++ Duplicates are presented to the next layer to allow the album handling +// +// To solve the puzzle, each directory is checked with all matchers in the order of the most common to the least. + +type matcherFn func(jsonName string, fileName string, sm filetypes.SupportedMedia) bool + +// matchers is a list of matcherFn from the most likely to be used to the least one +var matchers = []struct { + name string + fn matcherFn +}{ + {name: "normalMatch", fn: normalMatch}, + {name: "livePhotoMatch", fn: livePhotoMatch}, + {name: "matchWithOneCharOmitted", fn: matchWithOneCharOmitted}, + {name: "matchVeryLongNameWithNumber", fn: matchVeryLongNameWithNumber}, + {name: "matchDuplicateInYear", fn: matchDuplicateInYear}, + {name: "matchEditedName", fn: matchEditedName}, + {name: "matchForgottenDuplicates", fn: matchForgottenDuplicates}, +} + +func (to *Takeout) solvePuzzle(ctx context.Context) error { + dirs := gen.MapKeysSorted(to.catalogs) + for _, dir := range dirs { + cat := to.catalogs[dir] + jsons := gen.MapKeysSorted(cat.jsons) + for _, matcher := range matchers { + for _, json := range jsons { + md := cat.jsons[json] + for f := range cat.unMatchedFiles { + select { + case <-ctx.Done(): + return ctx.Err() + default: + if matcher.fn(json, f, to.flags.SupportedMedia) { + i := cat.unMatchedFiles[f] + i.md = md + a := to.makeAsset(ctx, dir, i, md) + cat.matchedFiles[f] = a + to.log.Record(ctx, fileevent.AnalysisAssociatedMetadata, fshelper.FSName(i.fsys, path.Join(dir, i.base)), "json", json, "matcher", matcher.name) + delete(cat.unMatchedFiles, f) + } + } + } + } + } + to.catalogs[dir] = cat + if len(cat.unMatchedFiles) > 0 { + files := gen.MapKeys(cat.unMatchedFiles) + sort.Strings(files) + for _, f := range files { + i := cat.unMatchedFiles[f] + to.log.Record(ctx, fileevent.AnalysisMissingAssociatedMetadata, fshelper.FSName(i.fsys, path.Join(dir, i.base))) + if to.flags.KeepJSONLess { + a := to.makeAsset(ctx, dir, i, nil) + cat.matchedFiles[f] = a + delete(cat.unMatchedFiles, f) + } + } + } + } + return nil +} + +// Browse return a channel of assets +// Each asset is a group of files that are associated with each other + +func (to *Takeout) passTwo(ctx context.Context, gOut chan *assets.Group) error { + dirs := gen.MapKeys(to.catalogs) + sort.Strings(dirs) + for _, dir := range dirs { + if len(to.catalogs[dir].matchedFiles) > 0 { + err := to.handleDir(ctx, dir, gOut) + if err != nil { + return err + } + } + } + return nil +} + +// type linkedFiles struct { +// dir string +// base string +// video *assetFile +// image *assetFile +// } + +func (to *Takeout) handleDir(ctx context.Context, dir string, gOut chan *assets.Group) error { + catalog := to.catalogs[dir] + + dirEntries := make([]*assets.Asset, 0, len(catalog.matchedFiles)) + + for name := range catalog.matchedFiles { + a := catalog.matchedFiles[name] + key := fileKeyTracker{baseName: name, size: int64(a.FileSize)} + track := to.fileTracker[key] + if track.status == fileevent.Uploaded { + a.Close() + to.logMessage(ctx, fileevent.AnalysisLocalDuplicate, a.File, "local duplicate") + continue + } + + // Filter on metadata + if code := to.filterOnMetadata(ctx, a); code != fileevent.Code(0) { + a.Close() + continue + } + dirEntries = append(dirEntries, a) + } + + in := make(chan *assets.Asset) + go func() { + defer close(in) + + sort.Slice(dirEntries, func(i, j int) bool { + // Sort by radical first + radicalI := dirEntries[i].Radical + radicalJ := dirEntries[j].Radical + if radicalI != radicalJ { + return radicalI < radicalJ + } + // If radicals are the same, sort by date + return dirEntries[i].CaptureDate.Before(dirEntries[j].CaptureDate) + }) + + for _, a := range dirEntries { + select { + case in <- a: + case <-ctx.Done(): + return + } + } + }() + + gs := groups.NewGrouperPipeline(ctx, to.groupers...).PipeGrouper(ctx, in) + for g := range gs { + // Manage albums + for _, a := range g.Assets { + if to.flags.CreateAlbums { + if to.flags.ImportIntoAlbum != "" { + // Force this album + g.Albums = []assets.Album{{Title: to.flags.ImportIntoAlbum}} + } else { + // check if its duplicates are in some albums, and push them all at once + key := fileKeyTracker{baseName: filepath.Base(a.File.Name()), size: int64(a.FileSize)} + track := to.fileTracker[key] + for _, p := range track.paths { + if album, ok := to.albums[p]; ok { + title := album.Title + if title == "" { + if !to.flags.KeepUntitled { + continue + } + title = filepath.Base(p) + } + g.AddAlbum(assets.Album{ + Title: title, + Description: album.Description, + Latitude: album.Latitude, + Longitude: album.Longitude, + }) + } + } + } + + // Force this album for partners photos + if to.flags.PartnerSharedAlbum != "" && a.FromPartner { + g.Albums = append(g.Albums, assets.Album{Title: to.flags.PartnerSharedAlbum}) + } + } + // If the asset has no GPS information, but the album has, use the album's location + if a.Latitude == 0 && a.Longitude == 0 { + for _, album := range g.Albums { + if album.Latitude != 0 || album.Longitude != 0 { + // when there isn't GPS information on the photo, but the album has a location, use that location + a.Latitude = album.Latitude + a.Longitude = album.Longitude + break + } + } + } + } + + for _, a := range g.Assets { + a.Albums = g.Albums + if to.flags.SessionTag { + a.AddTag(to.flags.session) + } + if to.flags.Tags != nil { + for _, tag := range to.flags.Tags { + a.AddTag(tag) + } + } + if to.flags.TakeoutTag { + a.AddTag(to.flags.TakeoutName) + } + } + + select { + case gOut <- g: + for _, a := range g.Assets { + key := fileKeyTracker{ + baseName: path.Base(a.File.Name()), + size: int64(a.FileSize), + } + track := to.fileTracker[key] + track.status = fileevent.Uploaded + to.fileTracker[key] = track + } + case <-ctx.Done(): + return ctx.Err() + } + } + return nil +} + +// makeAsset makes a localAssetFile based on the google metadata +func (to *Takeout) makeAsset(_ context.Context, dir string, f *assetFile, md *assets.Metadata) *assets.Asset { + file := path.Join(dir, f.base) + a := &assets.Asset{ + File: fshelper.FSName(f.fsys, file), // File as named in the archive + FileSize: f.length, + OriginalFileName: f.base, + FileDate: f.date, + } + + // get the original file name from metadata + if md != nil && md.FileName != "" { + a.OriginalFileName = md.FileName + + title := md.FileName + // trim superfluous extensions + titleExt := path.Ext(title) + fileExt := path.Ext(file) + + if titleExt != fileExt { + title = strings.TrimSuffix(title, titleExt) + titleExt = path.Ext(title) + if titleExt != fileExt { + title = strings.TrimSuffix(title, titleExt) + fileExt + } + } + a.FromApplication = a.UseMetadata(md) + a.OriginalFileName = title + } + a.FromApplication = a.UseMetadata(md) + a.SetNameInfo(to.flags.InfoCollector.GetInfo(a.OriginalFileName)) + return a +} + +func (to *Takeout) filterOnMetadata(ctx context.Context, a *assets.Asset) fileevent.Code { + if !to.flags.KeepArchived && a.Archived { + to.logMessage(ctx, fileevent.DiscoveredDiscarded, a, "discarding archived file") + a.Close() + return fileevent.DiscoveredDiscarded + } + if !to.flags.KeepPartner && a.FromPartner { + to.logMessage(ctx, fileevent.DiscoveredDiscarded, a, "discarding partner file") + a.Close() + return fileevent.DiscoveredDiscarded + } + if !to.flags.KeepTrashed && a.Trashed { + to.logMessage(ctx, fileevent.DiscoveredDiscarded, a, "discarding trashed file") + a.Close() + return fileevent.DiscoveredDiscarded + } + + if to.flags.InclusionFlags.DateRange.IsSet() && !to.flags.InclusionFlags.DateRange.InRange(a.CaptureDate) { + to.logMessage(ctx, fileevent.DiscoveredDiscarded, a, "discarding files out of date range") + a.Close() + return fileevent.DiscoveredDiscarded + } + if to.flags.ImportFromAlbum != "" { + keep := false + dir := path.Dir(a.File.Name()) + if album, ok := to.albums[dir]; ok { + keep = keep || album.Title == to.flags.ImportFromAlbum + } + if !keep { + to.logMessage(ctx, fileevent.DiscoveredDiscarded, a, "discarding files not in the specified album") + a.Close() + return fileevent.DiscoveredDiscarded + } + } + return fileevent.Code(0) +} diff --git a/adapters/googlePhotos/json.go b/adapters/googlePhotos/json.go new file mode 100644 index 00000000..3abb00b4 --- /dev/null +++ b/adapters/googlePhotos/json.go @@ -0,0 +1,251 @@ +package gp + +import ( + "encoding/json" + "fmt" + "log/slog" + "strconv" + "time" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/fshelper" + "github.com/simulot/immich-go/internal/tzone" +) + +type GoogleMetaData struct { + Title string `json:"title"` + Description string `json:"description"` + Category string `json:"category"` + Date *googTimeObject `json:"date,omitempty"` + PhotoTakenTime *googTimeObject `json:"photoTakenTime"` + GeoDataExif *googGeoData `json:"geoDataExif"` + GeoData *googGeoData `json:"geoData"` + Trashed bool `json:"trashed,omitempty"` + Archived bool `json:"archived,omitempty"` + URLPresent googIsPresent `json:"url,omitempty"` // true when the file is an asset metadata + Favorited bool `json:"favorited,omitempty"` // true when starred in GP + Enrichments *googleEnrichments `json:"enrichments,omitempty"` // Album enrichments + GooglePhotosOrigin struct { + FromPartnerSharing googIsPresent `json:"fromPartnerSharing,omitempty"` // true when this is a partner's asset + } `json:"googlePhotosOrigin"` +} + +func (gmd *GoogleMetaData) UnmarshalJSON(data []byte) error { + // test the presence of the key albumData + type md GoogleMetaData + type album struct { + AlbumData *md `json:"albumData"` + } + + var t album + err := json.Unmarshal(data, &t) + if err == nil && t.AlbumData != nil { + *gmd = GoogleMetaData(*(t.AlbumData)) + return nil + } + + var gg md + err = json.Unmarshal(data, &gg) + if err != nil { + return err + } + + *gmd = GoogleMetaData(gg) + return nil +} + +func (gmd GoogleMetaData) LogValue() slog.Value { + return slog.GroupValue( + slog.String("Title", gmd.Title), + slog.String("Description", gmd.Description), + slog.String("Category", gmd.Category), + slog.Any("Date", gmd.Date), + slog.Any("PhotoTakenTime", gmd.PhotoTakenTime), + slog.Any("GeoDataExif", gmd.GeoDataExif), + slog.Any("GeoData", gmd.GeoData), + slog.Bool("Trashed", gmd.Trashed), + slog.Bool("Archived", gmd.Archived), + slog.Bool("URLPresent", bool(gmd.URLPresent)), + slog.Bool("Favorited", gmd.Favorited), + slog.Any("Enrichments", gmd.Enrichments), + slog.Bool("FromPartnerSharing", bool(gmd.GooglePhotosOrigin.FromPartnerSharing)), + ) +} + +func (gmd GoogleMetaData) AsMetadata(name fshelper.FSAndName) *assets.Metadata { + md := assets.Metadata{ + FileName: gmd.Title, + Description: gmd.Description, + Trashed: gmd.Trashed, + Archived: gmd.Archived, + Favorited: gmd.Favorited, + FromPartner: gmd.isPartner(), + } + if gmd.GeoDataExif != nil { + md.Latitude, md.Longitude = gmd.GeoDataExif.Latitude, gmd.GeoDataExif.Longitude + if md.Latitude == 0 && md.Longitude == 0 && gmd.GeoData != nil { + md.Latitude, md.Longitude = gmd.GeoData.Latitude, gmd.GeoData.Longitude + } + } + if gmd.PhotoTakenTime != nil && gmd.PhotoTakenTime.Timestamp != "" && gmd.PhotoTakenTime.Timestamp != "0" { + md.DateTaken = gmd.PhotoTakenTime.Time() + } + return &md +} + +func (gmd *GoogleMetaData) isAlbum() bool { + if gmd == nil || gmd.Date == nil { + return false + } + return gmd.Date.Timestamp != "" +} + +func (gmd *GoogleMetaData) isAsset() bool { + if gmd == nil || gmd.PhotoTakenTime == nil { + return false + } + return gmd.PhotoTakenTime.Timestamp != "" +} + +func (gmd *GoogleMetaData) isPartner() bool { + if gmd == nil { + return false + } + return bool(gmd.GooglePhotosOrigin.FromPartnerSharing) +} + +// Key return an expected unique key for the asset +// based on the title and the timestamp +func (gmd GoogleMetaData) Key() string { + return fmt.Sprintf("%s,%s", gmd.Title, gmd.PhotoTakenTime.Timestamp) +} + +// googIsPresent is set when the field is present. The content of the field is not relevant +type googIsPresent bool + +func (p *googIsPresent) UnmarshalJSON(b []byte) error { + var bl bool + err := json.Unmarshal(b, &bl) + if err == nil { + return nil + } + + *p = len(b) > 0 + return nil +} + +func (p googIsPresent) MarshalJSON() ([]byte, error) { + if p { + return json.Marshal("present") + } + return json.Marshal(struct{}{}) +} + +// googGeoData contains GPS coordinates +type googGeoData struct { + Latitude float64 `json:"latitude"` + Longitude float64 `json:"longitude"` + Altitude float64 `json:"altitude"` +} + +func (ggd *googGeoData) LogValue() slog.Value { + if ggd == nil { + return slog.Value{} + } + return slog.GroupValue( + slog.Float64("Latitude", ggd.Latitude), + slog.Float64("Longitude", ggd.Longitude), + slog.Float64("Altitude", ggd.Altitude), + ) +} + +// googTimeObject to handle the epoch timestamp +type googTimeObject struct { + Timestamp string `json:"timestamp"` + // Formatted string `json:"formatted"` +} + +func (gt *googTimeObject) LogValue() slog.Value { + if gt == nil { + return slog.Value{} + } + return slog.TimeValue(gt.Time()) +} + +// Time return the time.Time of the epoch +func (gt googTimeObject) Time() time.Time { + ts, _ := strconv.ParseInt(gt.Timestamp, 10, 64) + if ts == 0 { + return time.Time{} + } + t := time.Unix(ts, 0) + local, _ := tzone.Local() + // t = time.Date(t.Year(), t.Month(), t.Day(), t.Hour(), t.Minute(), t.Second(), t.Nanosecond(), time.UTC) + return t.In(local) +} + +type googleEnrichments struct { + Text string + Latitude float64 + Longitude float64 +} + +func (ge *googleEnrichments) LogValue() slog.Value { + if ge == nil { + return slog.Value{} + } + return slog.GroupValue( + slog.String("Text", ge.Text), + slog.Float64("Latitude", ge.Latitude), + slog.Float64("Longitude", ge.Longitude), + ) +} + +func (ge *googleEnrichments) UnmarshalJSON(b []byte) error { + type googleEnrichment struct { + NarrativeEnrichment struct { + Text string `json:"text"` + } `json:"narrativeEnrichment,omitempty"` + LocationEnrichment struct { + Location []struct { + Name string `json:"name"` + Description string `json:"description"` + LatitudeE7 int `json:"latitudeE7"` + LongitudeE7 int `json:"longitudeE7"` + } `json:"location"` + } `json:"locationEnrichment,omitempty"` + } + + var enrichments []googleEnrichment + + err := json.Unmarshal(b, &enrichments) + if err != nil { + return err + } + + for _, e := range enrichments { + if e.NarrativeEnrichment.Text != "" { + ge.Text = addString(ge.Text, "\n", e.NarrativeEnrichment.Text) + } + if e.LocationEnrichment.Location != nil { + for _, l := range e.LocationEnrichment.Location { + if l.Name != "" { + ge.Text = addString(ge.Text, "\n", l.Name) + } + if l.Description != "" { + ge.Text = addString(ge.Text, " - ", l.Description) + } + ge.Latitude = float64(l.LatitudeE7) / 10e6 + ge.Longitude = float64(l.LongitudeE7) / 10e6 + } + } + } + return err +} + +func addString(s string, sep string, t string) string { + if s != "" { + return s + sep + t + } + return t +} diff --git a/browser/gp/json_test.go b/adapters/googlePhotos/json_test.go similarity index 68% rename from browser/gp/json_test.go rename to adapters/googlePhotos/json_test.go index 6fe5e71f..2f08a8a2 100644 --- a/browser/gp/json_test.go +++ b/adapters/googlePhotos/json_test.go @@ -2,6 +2,8 @@ package gp import ( "encoding/json" + "fmt" + "log/slog" "strings" "testing" "time" @@ -14,6 +16,8 @@ func TestPresentFields(t *testing.T) { isPartner bool isAlbum bool isAsset bool + dateTaken time.Time + title string }{ { name: "regularJSON", @@ -56,28 +60,33 @@ func TestPresentFields(t *testing.T) { isPartner: false, isAlbum: false, isAsset: true, + dateTaken: time.Unix(1695394176, 0), + title: "title", }, { - name: "albumJson", + name: "old albumJson issue #212", json: `{ - "title": "Album Name", - "description": "", - "access": "", - "date": { - "timestamp": "0", - "formatted": "1 janv. 1970, 00:00:00 UTC" - }, - "location": "", - "geoData": { - "latitude": 0.0, - "longitude": 0.0, - "altitude": 0.0, - "latitudeSpan": 0.0, - "longitudeSpan": 0.0 - } - }`, + "albumData": { + "title": "Trip to Gdańsk", + "description": "", + "access": "protected", + "location": "", + "date": { + "timestamp": "1502439626", + "formatted": "11 sie 2017, 08:20:26 UTC" + }, + "geoData": { + "latitude": 0.0, + "longitude": 0.0, + "altitude": 0.0, + "latitudeSpan": 0.0, + "longitudeSpan": 0.0 + } + } + }`, isPartner: false, isAlbum: true, + title: "Trip to Gdańsk", }, { name: "partner", @@ -116,27 +125,30 @@ func TestPresentFields(t *testing.T) { isPartner: true, isAlbum: false, isAsset: true, + title: "IMG_1559.HEIC", + dateTaken: time.Unix(1687791968, 0), }, { name: "new_takeout_album", json: `{ - "title": "Trip to Gdańsk", - "description": "", - "access": "protected", - "date": { - "timestamp": "1502439626", - "formatted": "11 sie 2017, 08:20:26 UTC" - }, - "geoData": { - "latitude": 0.0, - "longitude": 0.0, - "altitude": 0.0, - "latitudeSpan": 0.0, - "longitudeSpan": 0.0 - } - }`, + "title": "Trip to Gdańsk", + "description": "", + "access": "protected", + "date": { + "timestamp": "1502439626", + "formatted": "11 sie 2017, 08:20:26 UTC" + }, + "geoData": { + "latitude": 0.0, + "longitude": 0.0, + "altitude": 0.0, + "latitudeSpan": 0.0, + "longitudeSpan": 0.0 + } + }`, isPartner: false, isAlbum: true, + title: "Trip to Gdańsk", }, { name: "old_takeout_album", @@ -161,6 +173,7 @@ func TestPresentFields(t *testing.T) { }`, isPartner: false, isAlbum: true, + title: "Trip to Gdańsk", }, { name: "old_takeout_photo", @@ -195,7 +208,9 @@ func TestPresentFields(t *testing.T) { "formatted": "3 sie 2017, 09:54:31 UTC" } }`, - isAsset: true, + isAsset: true, + title: "IMG_20170803_115431469_HDR.jpg", + dateTaken: time.Unix(1501754071, 0), }, { name: "new takeout_asset", @@ -235,7 +250,9 @@ func TestPresentFields(t *testing.T) { } } }`, - isAsset: true, + isAsset: true, + title: "IMG_20170803_115431469_HDR.jpg", + dateTaken: time.Unix(1501754071, 0), }, { name: "print_order", @@ -274,6 +291,12 @@ func TestPresentFields(t *testing.T) { if c.isPartner != md.isPartner() { t.Errorf("expected isPartner to be %t, got %t", c.isPartner, md.isPartner()) } + if !c.dateTaken.IsZero() && !c.dateTaken.Equal(md.PhotoTakenTime.Time()) { + t.Errorf("expected dateTaken to be %s, got %s", c.dateTaken, md.PhotoTakenTime.Time()) + } + if c.title != md.Title { + t.Errorf("expected Title to be %s, got %s", c.title, md.Title) + } }) } } @@ -364,3 +387,112 @@ func TestEnrichedAlbum(t *testing.T) { }) } } + +func TestLog(t *testing.T) { + tcs := []struct { + name string + json string + }{ + { + name: "regularJSON", + json: `{ + "title": "title", + "description": "", + "imageViews": "0", + "creationTime": { + "timestamp": "1695397525", + "formatted": "22 sept. 2023, 15:45:25 UTC" + }, + "photoTakenTime": { + "timestamp": "1695394176", + "formatted": "22 sept. 2023, 14:49:36 UTC" + }, + "geoData": { + "latitude": 48.7981917, + "longitude": 2.4866832999999997, + "altitude": 90.25, + "latitudeSpan": 0.0, + "longitudeSpan": 0.0 + }, + "geoDataExif": { + "latitude": 48.7981917, + "longitude": 2.4866832999999997, + "altitude": 90.25, + "latitudeSpan": 0.0, + "longitudeSpan": 0.0 + }, + "url": "https://photos.google.com/photo/AAMKMAKZMAZMKAZMKZMAK", + "googlePhotosOrigin": { + "mobileUpload": { + "deviceFolder": { + "localFolderName": "" + }, + "deviceType": "ANDROID_PHONE" + } + } + }`, + }, + { + name: "album enrichments", + json: `{ + "title": "Album test 6/10/23", + "description": "", + "access": "protected", + "date": { + "timestamp": "1697872351", + "formatted": "21 oct. 2023, 07:12:31 UTC" + }, + "enrichments": [ + { + "narrativeEnrichment": { + "text": "Ici c\u0027est du text" + } + }, + { + "narrativeEnrichment": { + "text": "Et hop" + } + }, + { + "locationEnrichment": { + "location": [ + { + "name": "Saint-Maur-des-Fossés", + "description": "Île-de-France", + "latitudeE7": 488029439, + "longitudeE7": 24854290 + } + ] + } + }, + { + "locationEnrichment": { + "location": [ + { + "name": "Champigny-sur-Marne", + "description": "Île-de-France", + "latitudeE7": 488236547, + "longitudeE7": 24964847 + } + ] + } + } + ] + }`, + }, + } + for _, c := range tcs { + t.Run(c.name, func(t *testing.T) { + var md GoogleMetaData + + err := json.NewDecoder(strings.NewReader(c.json)).Decode(&md) + if err != nil { + t.Error(err) + } + sb := strings.Builder{} + log := slog.New(slog.NewTextHandler(&sb, &slog.HandlerOptions{Level: slog.LevelDebug})) + log.Debug("debug", "md", md) + fmt.Println(sb.String()) + }) + } +} diff --git a/adapters/googlePhotos/logs.go b/adapters/googlePhotos/logs.go new file mode 100644 index 00000000..2e3dbf98 --- /dev/null +++ b/adapters/googlePhotos/logs.go @@ -0,0 +1,76 @@ +package gp + +import ( + "context" + "encoding/csv" + "io" + "log/slog" + "slices" + "strconv" + "strings" + + "github.com/simulot/immich-go/internal/fileevent" + "github.com/simulot/immich-go/internal/gen" +) + +// logMessage for the photo and the movie attached to the photo +func (to *Takeout) logMessage(ctx context.Context, code fileevent.Code, a slog.LogValuer, reason string) { + t := "reason" + if code == fileevent.Error { + t = "error" + } + to.log.Record(ctx, code, a, t, reason) +} + +func (to *Takeout) DebugFileTracker(w io.Writer) { + csv := csv.NewWriter(w) + _ = csv.Write([]string{"File", "Size", "Count", "Duplicated", "Uploaded", "Status", "Date", "Albums", "Paths"}) + + keys := gen.MapKeys(to.fileTracker) + + slices.SortFunc(keys, trackerKeySortFunc) + line := make([]string, 9) + for _, k := range keys { + track := to.fileTracker[k] + line[0] = k.baseName + line[1] = strconv.Itoa(int(k.size)) // Size + line[2] = strconv.Itoa(track.count) // Count + line[3] = strconv.Itoa(track.count - 1) // Duplicated + if track.status == fileevent.Uploaded { + line[4] = "1" // Uploaded + } else { + line[4] = "0" + } + line[5] = track.status.String() + if track.metadata != nil { + albums := make([]string, 0, len(track.metadata.Albums)) + for _, a := range track.metadata.Albums { + albums = append(albums, a.Title) + } + line[6] = track.metadata.DateTaken.Format("2006-01-02 15:04:05 -0700") // Date + line[7] = strings.Join(albums, ",") // Albums + } else { + line[6] = "" + line[7] = "" + } + line[8] = strings.Join(track.paths, ",") // Paths + _ = csv.Write(line) + } + csv.Flush() +} + +/* +func (to *Takeout) DebugUploadedFiles(w io.Writer) { + csv := csv.NewWriter(w) + _ = csv.Write([]string{"File", "Size"}) + + slices.SortFunc(to.debugUploadedFile, trackerKeySortFunc) + line := make([]string, 2) + for _, k := range to.debugUploadedFile { + line[0] = k.baseName + line[1] = strconv.Itoa(int(k.size)) + _ = csv.Write(line) + } + csv.Flush() +} +*/ diff --git a/browser/gp/googlephotos_test.go b/adapters/googlePhotos/matcher_test.go similarity index 94% rename from browser/gp/googlephotos_test.go rename to adapters/googlePhotos/matcher_test.go index 4e469830..cf58c218 100644 --- a/browser/gp/googlephotos_test.go +++ b/adapters/googlePhotos/matcher_test.go @@ -3,107 +3,9 @@ package gp import ( "testing" - "github.com/simulot/immich-go/immich" + "github.com/simulot/immich-go/internal/filetypes" ) -func Test_matchers(t *testing.T) { - tests := []struct { - jsonName string - fileName string - want string - }{ - { - jsonName: "PXL_20211013_220651983.jpg.json", - fileName: "PXL_20211013_220651983.jpg", - want: "normalMatch", - }, - { - jsonName: "PXL_20220405_090123740.PORTRAIT.jpg.json", - fileName: "PXL_20220405_090123740.PORTRAIT-modifié.jpg", - want: "matchEditedName", - }, - { - jsonName: "PXL_20220405_090123740.PORTRAIT.jpg.json", - fileName: "PXL_20220405_100123740.PORTRAIT-modifié.jpg", - want: "", - }, - { - jsonName: "DSC_0238.JPG.json", - fileName: "DSC_0238.JPG", - want: "normalMatch", - }, - { - jsonName: "DSC_0238.JPG(1).json", - fileName: "DSC_0238(1).JPG", - want: "matchDuplicateInYear", - }, - { - jsonName: "IMG_2710.HEIC(1).json", - fileName: "IMG_2710(1).HEIC", - want: "matchDuplicateInYear", - }, - { - jsonName: "PXL_20231118_035751175.MP.jpg.json", - fileName: "PXL_20231118_035751175.MP.jpg", - want: "normalMatch", - }, - { - jsonName: "PXL_20231118_035751175.MP.jpg.json", - fileName: "PXL_20231118_035751175.MP", - want: "livePhotoMatch", - }, - { - jsonName: "PXL_20230809_203449253.LONG_EXPOSURE-02.ORIGIN.json", - fileName: "PXL_20230809_203449253.LONG_EXPOSURE-02.ORIGINA.jpg", - want: "matchWithOneCharOmitted", - }, - { - jsonName: "05yqt21kruxwwlhhgrwrdyb6chhwszi9bqmzu16w0 2.jp.json", - fileName: "05yqt21kruxwwlhhgrwrdyb6chhwszi9bqmzu16w0 2.jpg", - want: "livePhotoMatch", - }, - { - jsonName: "😀😃😄😁😆😅😂🤣🥲☺️😊😇🙂🙃😉😌😍🥰😘😗😙😚😋.json", - fileName: "😀😃😄😁😆😅😂🤣🥲☺️😊😇🙂🙃😉😌😍🥰😘😗😙😚😋😛.jpg", - want: "matchWithOneCharOmitted", - }, - { - jsonName: "Backyard_ceremony_wedding_photography_xxxxxxx_(494).json", - fileName: "Backyard_ceremony_wedding_photography_xxxxxxx_m(494).jpg", - want: "matchVeryLongNameWithNumber", - }, - { - jsonName: "original_1d4caa6f-16c6-4c3d-901b-9387de10e528_.json", - fileName: "original_1d4caa6f-16c6-4c3d-901b-9387de10e528_P.jpg", - want: "matchWithOneCharOmitted", - }, - { - jsonName: "original_1d4caa6f-16c6-4c3d-901b-9387de10e528_.json", - fileName: "original_1d4caa6f-16c6-4c3d-901b-9387de10e528_P(1).jpg", - want: "matchForgottenDuplicates", - }, - { // #405 - jsonName: "PXL_20210102_221126856.MP~2.jpg.json", - fileName: "PXL_20210102_221126856.MP~2", - want: "livePhotoMatch", - }, - } - for _, tt := range tests { - t.Run(tt.fileName, func(t *testing.T) { - matcher := "" - for _, m := range matchers { - if m.fn(tt.jsonName, tt.fileName, immich.DefaultSupportedMedia) { - matcher = m.name - break - } - } - if matcher != tt.want { - t.Errorf("matcher is '%s', want %v", matcher, tt.want) - } - }) - } -} - func Test_matchVeryLongNameWithNumber(t *testing.T) { tests := []struct { jsonName string @@ -123,7 +25,7 @@ func Test_matchVeryLongNameWithNumber(t *testing.T) { } for _, tt := range tests { t.Run(tt.fileName, func(t *testing.T) { - if got := matchVeryLongNameWithNumber(tt.jsonName, tt.fileName, immich.DefaultSupportedMedia); got != tt.want { + if got := matchVeryLongNameWithNumber(tt.jsonName, tt.fileName, filetypes.DefaultSupportedMedia); got != tt.want { t.Errorf("matchVeryLongNameWithNumber() = %v, want %v", got, tt.want) } }) @@ -152,7 +54,7 @@ func Test_matchDuplicateInYear(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - if got := matchDuplicateInYear(tt.jsonName, tt.fileName, immich.DefaultSupportedMedia); got != tt.want { + if got := matchDuplicateInYear(tt.jsonName, tt.fileName, filetypes.DefaultSupportedMedia); got != tt.want { t.Errorf("matchDuplicateInYear() = %v, want %v", got, tt.want) } }) @@ -181,7 +83,7 @@ func Test_matchForgottenDuplicates(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - if got := matchForgottenDuplicates(tt.jsonName, tt.fileName, immich.DefaultSupportedMedia); got != tt.want { + if got := matchForgottenDuplicates(tt.jsonName, tt.fileName, filetypes.DefaultSupportedMedia); got != tt.want { t.Errorf("matchDuplicateInYear() = %v, want %v", got, tt.want) } }) @@ -221,3 +123,101 @@ func Benchmark_matchDuplicateInYear(b *testing.B) { matchDuplicateInYear("IMG_3479.JPG(2).json", "IMG_3479(2).JPG", nil) } } + +func Test_matchers(t *testing.T) { + tests := []struct { + jsonName string + fileName string + want string + }{ + { + jsonName: "PXL_20211013_220651983.jpg.json", + fileName: "PXL_20211013_220651983.jpg", + want: "normalMatch", + }, + { + jsonName: "PXL_20220405_090123740.PORTRAIT.jpg.json", + fileName: "PXL_20220405_090123740.PORTRAIT-modifié.jpg", + want: "matchEditedName", + }, + { + jsonName: "PXL_20220405_090123740.PORTRAIT.jpg.json", + fileName: "PXL_20220405_100123740.PORTRAIT-modifié.jpg", + want: "", + }, + { + jsonName: "DSC_0238.JPG.json", + fileName: "DSC_0238.JPG", + want: "normalMatch", + }, + { + jsonName: "DSC_0238.JPG(1).json", + fileName: "DSC_0238(1).JPG", + want: "matchDuplicateInYear", + }, + { + jsonName: "IMG_2710.HEIC(1).json", + fileName: "IMG_2710(1).HEIC", + want: "matchDuplicateInYear", + }, + { + jsonName: "PXL_20231118_035751175.MP.jpg.json", + fileName: "PXL_20231118_035751175.MP.jpg", + want: "normalMatch", + }, + { + jsonName: "PXL_20231118_035751175.MP.jpg.json", + fileName: "PXL_20231118_035751175.MP", + want: "livePhotoMatch", + }, + { + jsonName: "PXL_20230809_203449253.LONG_EXPOSURE-02.ORIGIN.json", + fileName: "PXL_20230809_203449253.LONG_EXPOSURE-02.ORIGINA.jpg", + want: "matchWithOneCharOmitted", + }, + { + jsonName: "05yqt21kruxwwlhhgrwrdyb6chhwszi9bqmzu16w0 2.jp.json", + fileName: "05yqt21kruxwwlhhgrwrdyb6chhwszi9bqmzu16w0 2.jpg", + want: "livePhotoMatch", + }, + { + jsonName: "😀😃😄😁😆😅😂🤣🥲☺️😊😇🙂🙃😉😌😍🥰😘😗😙😚😋.json", + fileName: "😀😃😄😁😆😅😂🤣🥲☺️😊😇🙂🙃😉😌😍🥰😘😗😙😚😋😛.jpg", + want: "matchWithOneCharOmitted", + }, + { + jsonName: "Backyard_ceremony_wedding_photography_xxxxxxx_(494).json", + fileName: "Backyard_ceremony_wedding_photography_xxxxxxx_m(494).jpg", + want: "matchVeryLongNameWithNumber", + }, + { + jsonName: "original_1d4caa6f-16c6-4c3d-901b-9387de10e528_.json", + fileName: "original_1d4caa6f-16c6-4c3d-901b-9387de10e528_P.jpg", + want: "matchWithOneCharOmitted", + }, + { + jsonName: "original_1d4caa6f-16c6-4c3d-901b-9387de10e528_.json", + fileName: "original_1d4caa6f-16c6-4c3d-901b-9387de10e528_P(1).jpg", + want: "matchForgottenDuplicates", + }, + { // #405 + jsonName: "PXL_20210102_221126856.MP~2.jpg.json", + fileName: "PXL_20210102_221126856.MP~2", + want: "livePhotoMatch", + }, + } + for _, tt := range tests { + t.Run(tt.fileName, func(t *testing.T) { + matcher := "" + for _, m := range matchers { + if m.fn(tt.jsonName, tt.fileName, filetypes.DefaultSupportedMedia) { + matcher = m.name + break + } + } + if matcher != tt.want { + t.Errorf("matcher is '%s', want %v", matcher, tt.want) + } + }) + } +} diff --git a/adapters/googlePhotos/matchers.go b/adapters/googlePhotos/matchers.go new file mode 100644 index 00000000..1f5664c7 --- /dev/null +++ b/adapters/googlePhotos/matchers.go @@ -0,0 +1,199 @@ +package gp + +import ( + "path" + "strings" + "unicode/utf8" + + "github.com/simulot/immich-go/internal/filetypes" +) + +// normalMatch +// +// PXL_20230922_144936660.jpg.json +// PXL_20230922_144936660.jpg +func normalMatch(jsonName string, fileName string, sm filetypes.SupportedMedia) bool { + base := strings.TrimSuffix(jsonName, path.Ext(jsonName)) + return base == fileName +} + +// livePhotoMatch +// 20231227_152817.jpg.json +// 20231227_152817.MP4 +// +// PXL_20231118_035751175.MP.jpg.json +// PXL_20231118_035751175.MP.jpg +// PXL_20231118_035751175.MP +func livePhotoMatch(jsonName string, fileName string, sm filetypes.SupportedMedia) bool { + fileExt := path.Ext(fileName) + fileName = strings.TrimSuffix(fileName, fileExt) + base := strings.TrimSuffix(jsonName, path.Ext(jsonName)) + base = strings.TrimSuffix(base, path.Ext(base)) + if base == fileName { + return true + } + base = strings.TrimSuffix(base, path.Ext(base)) + return base == fileName +} + +// matchWithOneCharOmitted +// +// PXL_20230809_203449253.LONG_EXPOSURE-02.ORIGIN.json +// PXL_20230809_203449253.LONG_EXPOSURE-02.ORIGINA.jpg +// +// 05yqt21kruxwwlhhgrwrdyb6chhwszi9bqmzu16w0 2.jp.json <-- match also with LivePhoto matcher +// 05yqt21kruxwwlhhgrwrdyb6chhwszi9bqmzu16w0 2.jpg +// +// 😀😃😄😁😆😅😂🤣🥲☺️😊😇🙂🙃😉😌😍🥰😘😗😙😚😋.json +// 😀😃😄😁😆😅😂🤣🥲☺️😊😇🙂🙃😉😌😍🥰😘😗😙😚😋😛.jpg + +func matchWithOneCharOmitted(jsonName string, fileName string, sm filetypes.SupportedMedia) bool { + baseJSON := strings.TrimSuffix(jsonName, path.Ext(jsonName)) + ext := path.Ext(baseJSON) + if sm.IsExtensionPrefix(ext) { + baseJSON = strings.TrimSuffix(baseJSON, ext) + } + fileName = strings.TrimSuffix(fileName, path.Ext(fileName)) + if fileName == baseJSON { + return true + } + if strings.HasPrefix(fileName, baseJSON) { + a, b := utf8.RuneCountInString(fileName), utf8.RuneCountInString(baseJSON) + if a-b <= 1 { + return true + } + } + return false +} + +// matchVeryLongNameWithNumber +// +// Backyard_ceremony_wedding_photography_xxxxxxx_(494).json +// Backyard_ceremony_wedding_photography_xxxxxxx_m(494).jpg +func matchVeryLongNameWithNumber(jsonName string, fileName string, sm filetypes.SupportedMedia) bool { + jsonName = strings.TrimSuffix(jsonName, path.Ext(jsonName)) + + p1JSON := strings.Index(jsonName, "(") + if p1JSON < 0 { + return false + } + p2JSON := strings.Index(jsonName, ")") + if p2JSON < 0 || p2JSON != len(jsonName)-1 { + return false + } + p1File := strings.Index(fileName, "(") + if p1File < 0 || p1File != p1JSON+1 { + return false + } + if jsonName[:p1JSON] != fileName[:p1JSON] { + return false + } + p2File := strings.Index(fileName, ")") + return jsonName[p1JSON+1:p2JSON] == fileName[p1File+1:p2File] +} + +// matchDuplicateInYear +// +// IMG_3479.JPG(2).json +// IMG_3479(2).JPG +// + +// Fast implementation, but does't work with live photos +func matchDuplicateInYear(jsonName string, fileName string, sm filetypes.SupportedMedia) bool { + jsonName = strings.TrimSuffix(jsonName, path.Ext(jsonName)) + p1JSON := strings.Index(jsonName, "(") + if p1JSON < 1 { + return false + } + p1File := strings.Index(fileName, "(") + if p1File < 0 { + return false + } + jsonExt := path.Ext(jsonName[:p1JSON]) + + p2JSON := strings.Index(jsonName, ")") + if p2JSON < 0 || p2JSON != len(jsonName)-1 { + return false + } + + p2File := strings.Index(fileName, ")") + if p2File < 0 || p2File < p1File { + return false + } + + fileExt := path.Ext(fileName) + + if fileExt != jsonExt { + return false + } + + jsonBase := strings.TrimSuffix(jsonName[:p1JSON], path.Ext(jsonName[:p1JSON])) + + if jsonBase != fileName[:p1File] { + return false + } + + if fileName[p1File+1:p2File] != jsonName[p1JSON+1:p2JSON] { + return false + } + + return true +} + +/* +// Regexp implementation, work with live photos, 10 times slower +var ( + reDupInYearJSON = regexp.MustCompile(`(.*)\.(.{2,4})\((\d+)\)\..{2,4}$`) + reDupInYearFile = regexp.MustCompile(`(.*)\((\d+)\)\..{2,4}$`) +) + +func matchDuplicateInYear(jsonName string, fileName string, sm immich.SupportedMedia) bool { + mFile := reDupInYearFile.FindStringSubmatch(fileName) + if len(mFile) < 3 { + return false + } + mJSON := reDupInYearJSON.FindStringSubmatch(jsonName) + if len(mJSON) < 4 { + return false + } + if mFile[1] == mJSON[1] && mFile[2] == mJSON[3] { + return true + } + return false +} +*/ + +// matchEditedName +// PXL_20220405_090123740.PORTRAIT.jpg.json +// PXL_20220405_090123740.PORTRAIT.jpg +// PXL_20220405_090123740.PORTRAIT-modifié.jpg + +func matchEditedName(jsonName string, fileName string, sm filetypes.SupportedMedia) bool { + base := strings.TrimSuffix(jsonName, path.Ext(jsonName)) + ext := path.Ext(base) + if ext != "" && sm.IsMedia(ext) { + base = strings.TrimSuffix(base, ext) + fname := strings.TrimSuffix(fileName, path.Ext(fileName)) + return strings.HasPrefix(fname, base) + } + return false +} + +// TODO: This one interferes with matchVeryLongNameWithNumber + +// matchForgottenDuplicates +// "original_1d4caa6f-16c6-4c3d-901b-9387de10e528_.json" +// original_1d4caa6f-16c6-4c3d-901b-9387de10e528_P.jpg +// original_1d4caa6f-16c6-4c3d-901b-9387de10e528_P(1).jpg + +func matchForgottenDuplicates(jsonName string, fileName string, sm filetypes.SupportedMedia) bool { + jsonName = strings.TrimSuffix(jsonName, path.Ext(jsonName)) + fileName = strings.TrimSuffix(fileName, path.Ext(fileName)) + if strings.HasPrefix(fileName, jsonName) { + a, b := utf8.RuneCountInString(jsonName), utf8.RuneCountInString(fileName) + if b-a < 10 { + return true + } + } + return false +} diff --git a/adapters/googlePhotos/options.go b/adapters/googlePhotos/options.go new file mode 100644 index 00000000..16ebaa83 --- /dev/null +++ b/adapters/googlePhotos/options.go @@ -0,0 +1,117 @@ +// Package gp provides functionality for importing Google Photos takeout into Immich. + +package gp + +import ( + "time" + + cliflags "github.com/simulot/immich-go/internal/cliFlags" + "github.com/simulot/immich-go/internal/filenames" + "github.com/simulot/immich-go/internal/filetypes" + "github.com/simulot/immich-go/internal/filters" + "github.com/simulot/immich-go/internal/namematcher" + "github.com/spf13/cobra" +) + +// ImportFlags represents the command-line flags for the Google Photos takeout import command. +type ImportFlags struct { + // CreateAlbums determines whether to create albums in Immich that match the albums in the Google Photos takeout. + CreateAlbums bool + + // ImportFromAlbum specifies the name of the Google Photos album to import from. If empty, all albums will be imported. + ImportFromAlbum string + + // ImportIntoAlbum specifies the name of the album to import assets into. + ImportIntoAlbum string + + // PartnerSharedAlbum specifies the name of the album to add partner's photos to. + PartnerSharedAlbum string + + // KeepTrashed determines whether to import photos that are marked as trashed in Google Photos. + KeepTrashed bool + + // KeepPartner determines whether to import photos from the partner's Google Photos account. + KeepPartner bool + + // KeepUntitled determines whether to include photos from albums without a title in the import process. + KeepUntitled bool + + // KeepArchived determines whether to import archived Google Photos. + KeepArchived bool + + // KeepJSONLess determines whether to import photos that do not have a matching JSON file in the takeout. + KeepJSONLess bool + + // Flags for controlling the extensions of the files to be uploaded + InclusionFlags cliflags.InclusionFlags + + // List of banned files + BannedFiles namematcher.List // List of banned file name patterns + + // SupportedMedia represents the server's actual list of supported media. This is not a flag. + SupportedMedia filetypes.SupportedMedia + + // InfoCollector collects information about filenames. + InfoCollector *filenames.InfoCollector + + // ManageHEICJPG determines whether to manage HEIC to JPG conversion options. + ManageHEICJPG filters.HeicJpgFlag + + // ManageRawJPG determines how to manage raw and JPEG files. + ManageRawJPG filters.RawJPGFlag + + // BurstFlag determines how to manage burst photos. + ManageBurst filters.BurstFlag + + // ManageEpsonFastFoto enables the management of Epson FastFoto files. + ManageEpsonFastFoto bool + + // Tags is a list of tags to be added to the imported assets. + Tags []string + + // SessionTag indicates whether to add a session tag to the imported assets. + SessionTag bool + session string // Session tag value + + // Add the takeout file name as tag + TakeoutTag bool + TakeoutName string + + // Timezone + TZ *time.Location +} + +func (o *ImportFlags) AddFromGooglePhotosFlags(cmd *cobra.Command, parent *cobra.Command) { + o.BannedFiles, _ = namematcher.New( + `@eaDir/`, + `@__thumb/`, // QNAP + `SYNOFILE_THUMB_*.*`, // SYNOLOGY + `Lightroom Catalog/`, // LR + `thumbnails/`, // Android photo + `.DS_Store/`, // Mac OS custom attributes + `._*.*`, // MacOS resource files + ) + cmd.Flags().BoolVar(&o.CreateAlbums, "sync-albums", true, "Automatically create albums in Immich that match the albums in your Google Photos takeout") + cmd.Flags().StringVar(&o.ImportFromAlbum, "from-album-name", "", "Only import photos from the specified Google Photos album") + cmd.Flags().BoolVar(&o.KeepUntitled, "include-untitled-albums", false, "Include photos from albums without a title in the import process") + cmd.Flags().BoolVarP(&o.KeepTrashed, "include-trashed", "t", false, "Import photos that are marked as trashed in Google Photos") + cmd.Flags().BoolVarP(&o.KeepPartner, "include-partner", "p", true, "Import photos from your partner's Google Photos account") + cmd.Flags().StringVar(&o.PartnerSharedAlbum, "partner-shared-album", "", "Add partner's photo to the specified album name") + cmd.Flags().BoolVarP(&o.KeepArchived, "include-archived", "a", true, "Import archived Google Photos") + cmd.Flags().BoolVarP(&o.KeepJSONLess, "include-unmatched", "u", false, "Import photos that do not have a matching JSON file in the takeout") + cmd.Flags().Var(&o.BannedFiles, "ban-file", "Exclude a file based on a pattern (case-insensitive). Can be specified multiple times.") + cmd.Flags().StringSliceVar(&o.Tags, "tag", nil, "Add tags to the imported assets. Can be specified multiple times. Hierarchy is supported using a / separator (e.g. 'tag1/subtag1')") + cmd.Flags().BoolVar(&o.SessionTag, "session-tag", false, "Tag uploaded photos with a tag \"{immich-go}/YYYY-MM-DD HH-MM-SS\"") + cmd.Flags().BoolVar(&o.TakeoutTag, "takeout-tag", true, "Tag uploaded photos with a tag \"{takeout}/takeout-YYYYMMDDTHHMMSSZ\"") + + cliflags.AddInclusionFlags(cmd, &o.InclusionFlags) + // exif.AddExifToolFlags(cmd, &o.ExifToolFlags) + o.SupportedMedia = filetypes.DefaultSupportedMedia + + if parent != nil && parent.Name() == "upload" { + cmd.Flags().Var(&o.ManageHEICJPG, "manage-heic-jpeg", "Manage coupled HEIC and JPEG files. Possible values: KeepHeic, KeepJPG, StackCoverHeic, StackCoverJPG") + cmd.Flags().Var(&o.ManageRawJPG, "manage-raw-jpeg", "Manage coupled RAW and JPEG files. Possible values: KeepRaw, KeepJPG, StackCoverRaw, StackCoverJPG") + cmd.Flags().Var(&o.ManageBurst, "manage-burst", "Manage burst photos. Possible values: Stack, StackKeepRaw, StackKeepJPEG") + cmd.Flags().BoolVar(&o.ManageEpsonFastFoto, "manage-epson-fastfoto", false, "Manage Epson FastFoto file (default: false)") + } +} diff --git a/browser/gp/testgp_bigread_test.go b/adapters/googlePhotos/testgp_bigread_test.goNo similarity index 60% rename from browser/gp/testgp_bigread_test.go rename to adapters/googlePhotos/testgp_bigread_test.goNo index d3283a6e..f274156e 100644 --- a/browser/gp/testgp_bigread_test.go +++ b/adapters/googlePhotos/testgp_bigread_test.goNo @@ -11,9 +11,9 @@ import ( "path/filepath" "testing" - "github.com/simulot/immich-go/helpers/fileevent" - "github.com/simulot/immich-go/helpers/fshelper" - "github.com/simulot/immich-go/immich" + "github.com/simulot/immich-go/internal/fileevent" + "github.com/simulot/immich-go/internal/fshelper" + "github.com/simulot/immich-go/internal/metadata" "github.com/telemachus/humane" ) @@ -24,7 +24,7 @@ func TestReadBigTakeout(t *testing.T) { } l := slog.New(humane.NewHandler(f, &humane.Options{Level: slog.LevelInfo})) - j := fileevent.NewRecorder(l, false) + j := fileevent.NewRecorder(l) m, err := filepath.Glob("../../../test-data/full_takeout/*.zip") if err != nil { t.Error(err) @@ -32,13 +32,19 @@ func TestReadBigTakeout(t *testing.T) { } cnt := 0 fsyss, err := fshelper.ParsePath(m) - to, err := NewTakeout(context.Background(), j, immich.DefaultSupportedMedia, fsyss...) + flags := &ImportFlags{ + SupportedMedia: filetypes.DefaultSupportedMedia, + } + + to, err := NewTakeout(context.Background(), j, flags, fsyss...) if err != nil { t.Error(err) return } - for range to.Browse(context.Background()) { + assets := to.Browse(context.Background()) + + for range assets { cnt++ } l.Info(fmt.Sprintf("files seen %d", cnt)) diff --git a/browser/gp/testgp_samples_test.go b/adapters/googlePhotos/testgp_samples_test.go similarity index 90% rename from browser/gp/testgp_samples_test.go rename to adapters/googlePhotos/testgp_samples_test.go index e6a2624a..fec6f3ec 100644 --- a/browser/gp/testgp_samples_test.go +++ b/adapters/googlePhotos/testgp_samples_test.go @@ -12,21 +12,27 @@ import ( "time" "github.com/psanford/memfs" - "github.com/simulot/immich-go/immich/metadata" "github.com/simulot/immich-go/internal/fakefs" + "github.com/simulot/immich-go/internal/filenames" ) type inMemFS struct { *memfs.FS - err error + name string + err error } -func newInMemFS() *inMemFS { +func newInMemFS(name string) *inMemFS { // nolint: unparam return &inMemFS{ - FS: memfs.New(), + name: name, + FS: memfs.New(), } } +func (mfs inMemFS) Name() string { + return mfs.name +} + func (mfs *inMemFS) FSs() []fs.FS { return []fs.FS{mfs} } @@ -41,6 +47,16 @@ func (mfs *inMemFS) addFile(name string, content []byte) *inMemFS { return mfs } +func (mfs *inMemFS) addFile2(name string) *inMemFS { // nolint: unused + if mfs.err != nil { + return mfs + } + dir := path.Dir(name) + mfs.err = errors.Join(mfs.err, mfs.MkdirAll(dir, 0o777)) + mfs.err = errors.Join(mfs.err, mfs.WriteFile(name, []byte(name), 0o777)) + return mfs +} + func (mfs *inMemFS) addImage(name string, length int) *inMemFS { b := make([]byte, length) for i := 0; i < length; i++ { @@ -54,16 +70,15 @@ type jsonFn func(md *GoogleMetaData) func takenTime(date string) func(md *GoogleMetaData) { return func(md *GoogleMetaData) { - md.PhotoTakenTime.Timestamp = strconv.FormatInt(metadata.TakeTimeFromName(date).Unix(), 10) + md.PhotoTakenTime.Timestamp = strconv.FormatInt(filenames.TakeTimeFromName(date, time.UTC).Unix(), 10) } } func (mfs *inMemFS) addJSONImage(name string, title string, modifiers ...jsonFn) *inMemFS { md := GoogleMetaData{ - Metablock: Metablock{ - Title: title, - URLPresent: true, - }, + Title: title, + URLPresent: true, + PhotoTakenTime: &googTimeObject{}, } md.PhotoTakenTime.Timestamp = strconv.FormatInt(time.Date(2023, 10, 23, 15, 0, 0, 0, time.Local).Unix(), 10) for _, f := range modifiers { @@ -120,7 +135,7 @@ func sortFileResult(s []fileResult) []fileResult { } func simpleYear() []fs.FS { - return newInMemFS(). + return newInMemFS("filesystem"). addJSONImage("Photos from 2023/PXL_20230922_144936660.jpg.json", "PXL_20230922_144936660.jpg"). addImage("Photos from 2023/PXL_20230922_144936660.jpg", 10). addJSONImage("Photos from 2023/PXL_20230922_144956000.jpg.json", "PXL_20230922_144956000.jpg"). @@ -128,7 +143,7 @@ func simpleYear() []fs.FS { } func simpleAlbum() []fs.FS { - return newInMemFS(). + return newInMemFS("filesystem"). addJSONImage("Photos from 2020/IMG_8172.jpg.json", "IMG_8172.jpg", takenTime("20200101103000")). addImage("Photos from 2020/IMG_8172.jpg", 25). addJSONImage("Photos from 2023/PXL_20230922_144936660.jpg.json", "PXL_20230922_144936660.jpg", takenTime("PXL_20230922_144936660")). @@ -145,7 +160,7 @@ func simpleAlbum() []fs.FS { } func albumWithoutImage() []fs.FS { - return newInMemFS(). + return newInMemFS("filesystem"). addJSONAlbum("Album/anyname.json", "Album"). addJSONImage("Album/PXL_20230922_144936660.jpg.json", "PXL_20230922_144936660.jpg"). addJSONImage("Album/PXL_20230922_144934440.jpg.json", "PXL_20230922_144934440.jpg"). @@ -157,7 +172,7 @@ func albumWithoutImage() []fs.FS { } func namesWithNumbers() []fs.FS { - return newInMemFS(). + return newInMemFS("filesystem"). addJSONImage("Photos from 2009/IMG_3479.JPG.json", "IMG_3479.JPG"). addImage("Photos from 2009/IMG_3479.JPG", 10). addJSONImage("Photos from 2009/IMG_3479.JPG(1).json", "IMG_3479.JPG"). @@ -167,7 +182,7 @@ func namesWithNumbers() []fs.FS { } func namesTruncated() []fs.FS { - return newInMemFS(). + return newInMemFS("filesystem"). addJSONImage("Photos from 2023/😀😃😄😁😆😅😂🤣🥲☺️😊😇🙂🙃😉😌😍🥰😘😗😙😚😋.json", "😀😃😄😁😆😅😂🤣🥲☺️😊😇🙂🙃😉😌😍🥰😘😗😙😚😋😛😝😜🤪🤨🧐🤓😎🥸🤩🥳😏😒😞😔😟😕🙁☹️😣😖😫😩🥺😢😭😤😠😡🤬🤯😳🥵🥶.jpg"). addImage("Photos from 2023/😀😃😄😁😆😅😂🤣🥲☺️😊😇🙂🙃😉😌😍🥰😘😗😙😚😋😛.jpg", 10). addJSONImage("Photos from 2023/PXL_20230809_203449253.LONG_EXPOSURE-02.ORIGIN.json", "PXL_20230809_203449253.LONG_EXPOSURE-02.ORIGINAL.jpg"). @@ -177,7 +192,7 @@ func namesTruncated() []fs.FS { } func imagesEditedJSON() []fs.FS { - return newInMemFS(). + return newInMemFS("filesystem"). addJSONImage("Photos from 2023/PXL_20220405_090123740.PORTRAIT.jpg.json", "PXL_20220405_090123740.PORTRAIT.jpg"). addImage("Photos from 2023/PXL_20220405_090123740.PORTRAIT.jpg", 41). addImage("Photos from 2023/PXL_20220405_090123740.PORTRAIT-modifié.jpg", 21). @@ -185,7 +200,7 @@ func imagesEditedJSON() []fs.FS { } func titlesWithForbiddenChars() []fs.FS { - return newInMemFS(). + return newInMemFS("filesystem"). addJSONImage("Photos from 2012/27_06_12 - 1.mov.json", "27/06/12 - 1", takenTime("20120627")). addImage("Photos from 2012/27_06_12 - 1.mov", 52). addJSONImage("Photos from 2012/27_06_12 - 2.json", "27/06/12 - 2", takenTime("20120627")). @@ -193,7 +208,7 @@ func titlesWithForbiddenChars() []fs.FS { } func namesIssue39() []fs.FS { - return newInMemFS(). + return newInMemFS("filesystem"). addJSONAlbum("Album/anyname.json", "Album"). addJSONImage("Album/Backyard_ceremony_wedding_photography_xxxxxxx_.json", "Backyard_ceremony_wedding_photography_xxxxxxx_magnoliastudios-371.jpg", takenTime("20200101")). addJSONImage("Album/Backyard_ceremony_wedding_photography_xxxxxxx_(1).json", "Backyard_ceremony_wedding_photography_xxxxxxx_magnoliastudios-181.jpg", takenTime("20200101")). @@ -210,14 +225,14 @@ func namesIssue39() []fs.FS { } func issue68MPFiles() []fs.FS { - return newInMemFS(). + return newInMemFS("filesystem"). addJSONImage("Photos from 2022/PXL_20221228_185930354.MP.jpg.json", "PXL_20221228_185930354.MP.jpg", takenTime("20220101")). addImage("Photos from 2022/PXL_20221228_185930354.MP", 1). addImage("Photos from 2022/PXL_20221228_185930354.MP.jpg", 2).FSs() } func issue68LongExposure() []fs.FS { - return newInMemFS(). + return newInMemFS("filesystem"). addJSONImage("Photos from 2023/PXL_20230814_201154491.LONG_EXPOSURE-01.COVER..json", "PXL_20230814_201154491.LONG_EXPOSURE-01.COVER.jpg", takenTime("20230101")). addImage("Photos from 2023/PXL_20230814_201154491.LONG_EXPOSURE-01.COVER.jpg", 1). addJSONImage("Photos from 2023/PXL_20230814_201154491.LONG_EXPOSURE-02.ORIGIN.json", "PXL_20230814_201154491.LONG_EXPOSURE-02.ORIGINAL.jpg", takenTime("20230101")). @@ -225,7 +240,7 @@ func issue68LongExposure() []fs.FS { } func issue68ForgottenDuplicates() []fs.FS { - return newInMemFS(). + return newInMemFS("filesystem"). addJSONImage("Photos from 2022/original_1d4caa6f-16c6-4c3d-901b-9387de10e528_.json", "original_1d4caa6f-16c6-4c3d-901b-9387de10e528_PXL_20220516_164814158.jpg", takenTime("20220101")). addImage("Photos from 2022/original_1d4caa6f-16c6-4c3d-901b-9387de10e528_P.jpg", 1). addImage("Photos from 2022/original_1d4caa6f-16c6-4c3d-901b-9387de10e528_P(1).jpg", 2).FSs() @@ -233,7 +248,7 @@ func issue68ForgottenDuplicates() []fs.FS { // #390 Question: report shows way less images uploaded than scanned func issue390WrongCount() []fs.FS { - return newInMemFS(). + return newInMemFS("filesystem"). addJSONImage("Takeout/Google Photos/Photos from 2021/image000000.jpg.json", "image000000.jpg"). addJSONImage("Takeout/Google Photos/Photos from 2021/image000000.jpg(1).json", "image000000.jpg"). addJSONImage("Takeout/Google Photos/Photos from 2021/image000000.gif.json", "image000000.gif.json"). @@ -242,7 +257,7 @@ func issue390WrongCount() []fs.FS { } func issue390WrongCount2() []fs.FS { - return newInMemFS(). + return newInMemFS("filesystem"). addJSONImage("Takeout/Google Photos/2017 - Croatia/IMG_0170.jpg.json", "IMG_0170.jpg"). addJSONImage("Takeout/Google Photos/Photos from 2018/IMG_0170.JPG.json", "IMG_0170.JPG"). addJSONImage("Takeout/Google Photos/Photos from 2018/IMG_0170.HEIC.json", "IMG_0170.HEIC"). @@ -262,17 +277,17 @@ func issue390WrongCount2() []fs.FS { addImage("Takeout/Google Photos/Photos from 2017/IMG_0170.jpg", 514963).FSs() } -func checkLivePhoto() []fs.FS { - return newInMemFS(). +func checkLivePhoto() []fs.FS { // nolint:unused + return newInMemFS("filesystem"). addJSONImage("Motion test/20231227_152817.jpg.json", "20231227_152817.jpg"). addImage("Motion test/20231227_152817.jpg", 7426453). addImage("Motion test/20231227_152817.MP4", 5192477). - addJSONImage("Motion Test/PXL_20231118_035751175.MP.jpg.json", "20231118_035751175.MP.jpg"). + addJSONImage("Motion Test/PXL_20231118_035751175.MP.jpg.json", "PXL_20231118_035751175.MP.jpg"). addImage("Motion Test/PXL_20231118_035751175.MP", 3478685). addImage("Motion Test/PXL_20231118_035751175.MP.jpg", 8025699).FSs() } -func loadFromString(dateFormat string, s string) []fs.FS { +func loadFromString(dateFormat string, s string) []fs.FS { // nolint:unused fss, err := fakefs.ScanStringList(dateFormat, s) if err != nil { panic(err.Error()) @@ -280,7 +295,7 @@ func loadFromString(dateFormat string, s string) []fs.FS { return fss } -func checkLivePhotoPixil() []fs.FS { +func checkLivePhotoPixil() []fs.FS { // nolint:unused return loadFromString("01-02-2006 15:04", `Part: takeout-20230720T065335Z-001.zip Archive: takeout-20230720T065335Z-001.zip Length Date Time Name @@ -299,7 +314,7 @@ Archive: takeout-20230720T065335Z-001.zip `) } -func checkMissingJSON() []fs.FS { +func checkMissingJSON() []fs.FS { // nolint:unused return loadFromString("01-02-2006 15:04", `Part: takeout-20230720T065335Z-001.zip Archive: takeout-20230720T065335Z-001.zip Length Date Time Name @@ -314,7 +329,7 @@ Archive: takeout-20230720T065335Z-001.zip `) } -func checkDuplicates() []fs.FS { +func checkDuplicates() []fs.FS { // nolint:unused return loadFromString("01-02-2006 15:04", `Part: takeout-20230720T065335Z-001.tgz -rw-r--r-- 0/0 365022 2024-07-19 01:19 Takeout/Google Foto/[E&S] 2016-01-05 - Castello De Albertis e Mostra d/20160105_121621_LLS.jpg -rw-r--r-- 0/0 708 2024-07-19 01:19 Takeout/Google Foto/[E&S] 2016-01-05 - Castello De Albertis e Mostra d/20160105_121621_LLS.jpg.json @@ -328,7 +343,7 @@ Part: takeout-20230720T065335Z-002.tgz `) } -func checkMPissue405() []fs.FS { +func checkMPissue405() []fs.FS { // nolint:unused return loadFromString("2006-01-02 15:04", `Part: takeout-20230720T065335Z-001.zip 895 2024-01-21 16:52 Takeout/Google Photos/Untitled(1)/PXL_20210102_221126856.MP~2.jpg.json 893 2024-01-21 16:52 Takeout/Google Photos/Untitled(1)/PXL_20210102_221126856.MP.jpg.json diff --git a/browser/gp/testgp_test.go b/adapters/googlePhotos/testgp_test.go similarity index 54% rename from browser/gp/testgp_test.go rename to adapters/googlePhotos/testgp_test.go index 96230c42..fb0f018e 100644 --- a/browser/gp/testgp_test.go +++ b/adapters/googlePhotos/testgp_test.go @@ -2,16 +2,18 @@ package gp import ( "context" - "io" "io/fs" - "log/slog" "path" "reflect" "testing" + "time" "github.com/kr/pretty" - "github.com/simulot/immich-go/helpers/fileevent" - "github.com/simulot/immich-go/immich" + "github.com/simulot/immich-go/app" + "github.com/simulot/immich-go/internal/configuration" + "github.com/simulot/immich-go/internal/fileevent" + "github.com/simulot/immich-go/internal/filenames" + "github.com/simulot/immich-go/internal/filetypes" ) func TestBrowse(t *testing.T) { @@ -31,19 +33,16 @@ func TestBrowse(t *testing.T) { { "simpleAlbum", simpleAlbum, sortFileResult([]fileResult{ - {name: "PXL_20230922_144936660.jpg", size: 10, title: "PXL_20230922_144936660.jpg"}, {name: "PXL_20230922_144936660.jpg", size: 10, title: "PXL_20230922_144936660.jpg"}, {name: "PXL_20230922_144934440.jpg", size: 15, title: "PXL_20230922_144934440.jpg"}, {name: "IMG_8172.jpg", size: 25, title: "IMG_8172.jpg"}, {name: "IMG_8172.jpg", size: 52, title: "IMG_8172.jpg"}, - {name: "IMG_8172.jpg", size: 52, title: "IMG_8172.jpg"}, }), }, { "albumWithoutImage", albumWithoutImage, sortFileResult([]fileResult{ - {name: "PXL_20230922_144936660.jpg", size: 10, title: "PXL_20230922_144936660.jpg"}, {name: "PXL_20230922_144936660.jpg", size: 10, title: "PXL_20230922_144936660.jpg"}, {name: "PXL_20230922_144934440.jpg", size: 15, title: "PXL_20230922_144934440.jpg"}, }), @@ -84,11 +83,8 @@ func TestBrowse(t *testing.T) { "namesIssue39", namesIssue39, sortFileResult([]fileResult{ {name: "Backyard_ceremony_wedding_photography_xxxxxxx_m.jpg", size: 1, title: "Backyard_ceremony_wedding_photography_xxxxxxx_magnoliastudios-371.jpg"}, - {name: "Backyard_ceremony_wedding_photography_xxxxxxx_m.jpg", size: 1, title: "Backyard_ceremony_wedding_photography_xxxxxxx_magnoliastudios-371.jpg"}, - {name: "Backyard_ceremony_wedding_photography_xxxxxxx_m(1).jpg", size: 181, title: "Backyard_ceremony_wedding_photography_xxxxxxx_magnoliastudios-181.jpg"}, {name: "Backyard_ceremony_wedding_photography_xxxxxxx_m(1).jpg", size: 181, title: "Backyard_ceremony_wedding_photography_xxxxxxx_magnoliastudios-181.jpg"}, {name: "Backyard_ceremony_wedding_photography_xxxxxxx_m(494).jpg", size: 494, title: "Backyard_ceremony_wedding_photography_markham_magnoliastudios-19.jpg"}, - {name: "Backyard_ceremony_wedding_photography_xxxxxxx_m(494).jpg", size: 494, title: "Backyard_ceremony_wedding_photography_markham_magnoliastudios-19.jpg"}, }), }, { @@ -129,36 +125,48 @@ func TestBrowse(t *testing.T) { {name: "IMG_0170.MP4", size: 6024972, title: "IMG_0170.MP4"}, {name: "IMG_0170.HEIC", size: 4443973, title: "IMG_0170.HEIC"}, {name: "IMG_0170.MP4", size: 2288647, title: "IMG_0170.MP4"}, - {name: "IMG_0170.JPG", size: 4570661, title: "IMG_0170.JPG"}, - {name: "IMG_0170.MP4", size: 6024972, title: "IMG_0170.MP4"}, - {name: "IMG_0170.HEIC", size: 4443973, title: "IMG_0170.HEIC"}, - {name: "IMG_0170.jpg", size: 514963, title: "IMG_0170.jpg"}, }), }, } + + logFile := configuration.DefaultLogFile() for _, c := range tc { t.Run(c.name, func(t *testing.T) { fsys := c.gen() ctx := context.Background() - - log := slog.New(slog.NewTextHandler(io.Discard, nil)) - - b, err := NewTakeout(ctx, fileevent.NewRecorder(log, false), immich.DefaultSupportedMedia, fsys...) + log := app.Log{ + File: logFile, + Level: "INFO", + } + err := log.OpenLogFile() if err != nil { t.Error(err) + return } - - err = b.Prepare(ctx) + flags := &ImportFlags{ + SupportedMedia: filetypes.DefaultSupportedMedia, + CreateAlbums: true, + InfoCollector: filenames.NewInfoCollector(time.Local, filetypes.DefaultSupportedMedia), + } + log.Logger.Info("\n\n\ntest case: " + c.name) + recorder := fileevent.NewRecorder(log.Logger) + b, err := NewTakeout(ctx, recorder, flags, fsys...) if err != nil { t.Error(err) + return } + gChan := b.Browse(ctx) + results := []fileResult{} - for a := range b.Browse(ctx) { - results = append(results, fileResult{name: path.Base(a.FileName), size: a.FileSize, title: a.Title}) - if a.LivePhoto != nil { - results = append(results, fileResult{name: path.Base(a.LivePhoto.FileName), size: a.LivePhoto.FileSize, title: a.LivePhoto.Title}) + for g := range gChan { + if err = g.Validate(); err != nil { + t.Error(err) + return + } + for _, a := range g.Assets { + results = append(results, fileResult{name: path.Base(a.File.Name()), size: a.FileSize, title: a.OriginalFileName}) } } results = sortFileResult(results) @@ -202,6 +210,7 @@ func TestAlbums(t *testing.T) { }), }, }, + { name: "namesIssue39", gen: namesIssue39, @@ -215,27 +224,46 @@ func TestAlbums(t *testing.T) { }, } + logFile := configuration.DefaultLogFile() for _, c := range tc { t.Run(c.name, func(t *testing.T) { ctx := context.Background() - fsys := c.gen() - b, err := NewTakeout(ctx, fileevent.NewRecorder(nil, false), immich.DefaultSupportedMedia, fsys...) + log := app.Log{ + File: logFile, + Level: "INFO", + } + err := log.OpenLogFile() if err != nil { t.Error(err) + return + } + log.Logger.Info("\n\n\ntest case: " + c.name) + recorder := fileevent.NewRecorder(log.Logger) + + fsys := c.gen() + flags := &ImportFlags{ + SupportedMedia: filetypes.DefaultSupportedMedia, + CreateAlbums: true, + InfoCollector: filenames.NewInfoCollector(time.Local, filetypes.DefaultSupportedMedia), } - err = b.Prepare(ctx) + log.Logger.Info("\n\n\ntest case: " + c.name) + b, err := NewTakeout(ctx, recorder, flags, fsys...) if err != nil { t.Error(err) + return } + gChan := b.Browse(ctx) albums := album{} - for a := range b.Browse(ctx) { - if len(a.Albums) > 0 { - for _, al := range a.Albums { - l := albums[al.Title] - l = append(l, fileResult{name: path.Base(a.FileName), size: a.FileSize, title: a.Title}) - albums[al.Title] = l + for g := range gChan { + for _, a := range g.Assets { + if len(g.Albums) > 0 { + for _, al := range g.Albums { + l := albums[al.Title] + l = append(l, fileResult{name: path.Base(a.File.Name()), size: a.FileSize, title: a.OriginalFileName}) + albums[al.Title] = l + } } } } @@ -251,136 +279,3 @@ func TestAlbums(t *testing.T) { }) } } - -func TestArchives(t *testing.T) { - type photo map[string]string - type album map[string][]string - tc := []struct { - name string - gen func() []fs.FS - acceptMissingJSON bool - wantLivePhotos photo - wantAlbum album - wantAsset photo - }{ - { - name: "checkLivePhoto", - gen: checkLivePhoto, - wantAsset: photo{}, - wantLivePhotos: photo{ - "Motion Test/PXL_20231118_035751175.MP.jpg": "Motion Test/PXL_20231118_035751175.MP", - "Motion test/20231227_152817.jpg": "Motion test/20231227_152817.MP4", - }, - wantAlbum: album{}, - }, - { - name: "checkLivePhotoPixil", - gen: checkLivePhotoPixil, - wantAsset: photo{}, - wantLivePhotos: photo{ - "Takeout/Google Photos/2022 - Germany - Private/IMG_4573.HEIC": "Takeout/Google Photos/2022 - Germany - Private/IMG_4573.MP4", - "Takeout/Google Photos/Photos from 2022/IMG_4573.HEIC": "Takeout/Google Photos/Photos from 2022/IMG_4573.MP4", - "Takeout/Google Photos/2022 - Germany/IMG_4573.HEIC": "Takeout/Google Photos/2022 - Germany/IMG_4573.MP4", - }, - wantAlbum: album{ - "2022 - Germany - Private": []string{"IMG_4573.HEIC"}, - "2022 - Germany": []string{"IMG_4573.HEIC"}, - }, - }, - { - name: "checkMissingJSON-No", - gen: checkMissingJSON, - wantAsset: photo{ - "Takeout/Google Photos/Photos from 2022/IMG_4573.HEIC": "", - }, - wantLivePhotos: photo{}, - wantAlbum: album{}, - }, - { - name: "checkMissingJSON-Yes", - gen: checkMissingJSON, - acceptMissingJSON: true, - wantAsset: photo{ - "Takeout/Google Photos/Photos from 2022/IMG_4573.HEIC": "", - "Takeout/Google Foto/Photos from 2016/IMG-20161201-WA0035.jpeg": "", - "Takeout/Google Photos/2022 - Germany - Private/IMG_4553.HEIC": "", - }, - wantLivePhotos: photo{ - "Takeout/Google Photos/2022 - Germany/IMG_1234.HEIC": "Takeout/Google Photos/2022 - Germany/IMG_1234.MP4", - }, - wantAlbum: album{ - "2022 - Germany": []string{"IMG_1234.HEIC"}, - }, - }, - { - name: "checkDuplicates", - gen: checkDuplicates, - wantAsset: photo{ - "Takeout/Google Foto/[E&S] 2016-01-05 - Castello De Albertis e Mostra d/20160105_121621_LLS.jpg": "", - "Takeout/Google Foto/Photos from 2016/20160105_121621_LLS.jpg": "", - "Takeout/Google Foto/2016-01-05 - _3/20160105_121621_LLS.jpg": "", - }, - wantLivePhotos: photo{}, - wantAlbum: album{}, - }, - { // #405 - name: "checkMP_405", - gen: checkMPissue405, - wantLivePhotos: photo{ - "Takeout/Google Photos/Untitled(1)/PXL_20210102_221126856.MP.jpg": "Takeout/Google Photos/Untitled(1)/PXL_20210102_221126856.MP", - "Takeout/Google Photos/Untitled(1)/PXL_20210102_221126856.MP~2.jpg": "Takeout/Google Photos/Untitled(1)/PXL_20210102_221126856.MP~2", - }, - wantAlbum: album{}, - wantAsset: photo{}, - }, - } - for _, c := range tc { - t.Run( - c.name, - func(t *testing.T) { - ctx := context.Background() - fsys := c.gen() - - b, err := NewTakeout(ctx, fileevent.NewRecorder(nil, false), immich.DefaultSupportedMedia, fsys...) - if err != nil { - t.Error(err) - } - b.SetAcceptMissingJSON(c.acceptMissingJSON) - err = b.Prepare(ctx) - if err != nil { - t.Error(err) - } - - livePhotos := photo{} - assets := photo{} - albums := album{} - for a := range b.Browse(ctx) { - if a.LivePhoto != nil { - photo := a.FileName - video := a.LivePhoto.FileName - livePhotos[photo] = video - } else { - assets[a.FileName] = "" - } - for _, al := range a.Albums { - l := albums[al.Title] - l = append(l, path.Base(a.FileName)) - albums[al.Title] = l - } - } - if !reflect.DeepEqual(assets, c.wantAsset) { - t.Errorf("difference assets\n") - pretty.Ldiff(t, c.wantAsset, assets) - } - if !reflect.DeepEqual(livePhotos, c.wantLivePhotos) { - t.Errorf("difference LivePhotos\n") - pretty.Ldiff(t, c.wantLivePhotos, livePhotos) - } - if !reflect.DeepEqual(albums, c.wantAlbum) { - t.Errorf("difference Album\n") - pretty.Ldiff(t, c.wantAlbum, albums) - } - }, - ) - } -} diff --git a/app/app.go b/app/app.go new file mode 100644 index 00000000..79dcd9c2 --- /dev/null +++ b/app/app.go @@ -0,0 +1,83 @@ +package app + +import ( + "context" + "time" + + "github.com/simulot/immich-go/internal/fileevent" + "github.com/spf13/cobra" +) + +type ( + RunE func(cmd *cobra.Command, args []string) error + RunEAdaptor func(ctx context.Context, cmd *cobra.Command, app *Application) error +) + +// Application holds configuration used by commands +type Application struct { + client Client + log *Log + jnl *fileevent.Recorder + tz *time.Location + + // TODO manage configuration file + // ConfigurationFile string // Path to the configuration file to use +} + +func New(ctx context.Context, cmd *cobra.Command) *Application { + // application's context + app := &Application{ + log: &Log{}, + } + // app.PersistentFlags().StringVar(&app.ConfigurationFile, "use-configuration", app.ConfigurationFile, "Specifies the configuration to use") + AddLogFlags(ctx, cmd, app) + return app +} + +func (app *Application) GetTZ() *time.Location { + if app.tz == nil { + app.tz = time.Local + } + return app.tz +} + +func (app *Application) SetTZ(tz *time.Location) { + app.tz = tz +} + +func (app *Application) Client() *Client { + return &app.client +} + +func (app *Application) Jnl() *fileevent.Recorder { + return app.jnl +} + +func (app *Application) SetJnl(jnl *fileevent.Recorder) { + app.jnl = jnl +} + +func (app *Application) Log() *Log { + return app.log +} + +func (app *Application) SetLog(log *Log) { + app.log = log +} + +func ChainRunEFunctions(prev RunE, fn RunEAdaptor, ctx context.Context, cmd *cobra.Command, app *Application) RunE { + if prev == nil { + return func(cmd *cobra.Command, args []string) error { + return fn(ctx, cmd, app) + } + } + return func(cmd *cobra.Command, args []string) error { + if prev != nil { + err := prev(cmd, args) + if err != nil { + return err + } + } + return fn(ctx, cmd, app) + } +} diff --git a/app/client.go b/app/client.go new file mode 100644 index 00000000..317872a2 --- /dev/null +++ b/app/client.go @@ -0,0 +1,196 @@ +package app + +import ( + "context" + "errors" + "fmt" + "io" + "log/slog" + "os" + "path/filepath" + "strings" + "time" + + "github.com/simulot/immich-go/immich" + "github.com/simulot/immich-go/internal/configuration" + "github.com/simulot/immich-go/internal/tzone" + "github.com/spf13/cobra" +) + +// add server flags to the command cmd +func AddClientFlags(ctx context.Context, cmd *cobra.Command, app *Application) { + client := app.Client() + client.DeviceUUID, _ = os.Hostname() + + cmd.PersistentFlags().StringVarP(&client.Server, "server", "s", client.Server, "Immich server address (example http://your-ip:2283 or https://your-domain)") + cmd.PersistentFlags().StringVarP(&client.APIKey, "api-key", "k", "", "API Key") + cmd.PersistentFlags().BoolVar(&client.APITrace, "api-trace", false, "Enable trace of api calls") + cmd.PersistentFlags().BoolVar(&client.SkipSSL, "skip-verify-ssl", false, "Skip SSL verification") + cmd.PersistentFlags().DurationVar(&client.ClientTimeout, "client-timeout", 5*time.Minute, "Set server calls timeout") + cmd.PersistentFlags().StringVar(&client.DeviceUUID, "device-uuid", client.DeviceUUID, "Set a device UUID") + cmd.PersistentFlags().BoolVar(&client.DryRun, "dry-run", false, "Simulate all actions") + cmd.PersistentFlags().StringVar(&client.TimeZone, "time-zone", client.TimeZone, "Override the system time zone") + + cmd.PersistentPreRunE = ChainRunEFunctions(cmd.PersistentPreRunE, OpenClient, ctx, cmd, app) + cmd.PersistentPostRunE = ChainRunEFunctions(cmd.PersistentPostRunE, CloseClient, ctx, cmd, app) +} + +func OpenClient(ctx context.Context, cmd *cobra.Command, app *Application) error { + client := app.Client() + log := app.Log() + + if client.Server != "" { + client.Server = strings.TrimSuffix(client.Server, "/") + } + if client.TimeZone != "" { + _, err := tzone.SetLocal(client.TimeZone) + if err != nil { + return err + } + } + + // Plug the journal on the Log + if log.File != "" { + if log.mainWriter == nil { + err := configuration.MakeDirForFile(log.File) + if err != nil { + return err + } + f, err := os.OpenFile(log.File, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0o664) + if err != nil { + return err + } + err = log.sLevel.UnmarshalText([]byte(strings.ToUpper(log.Level))) + if err != nil { + return err + } + log.setHandlers(f, nil) + // prepare the trace file name + client.APITraceWriterName = strings.TrimSuffix(log.File, filepath.Ext(log.File)) + ".trace.log" + } + } + + err := client.Initialize(ctx, app) + if err != nil { + return err + } + + err = client.Open(ctx) + if err != nil { + return err + } + + if client.APITrace { + if client.APITraceWriter == nil { + client.APITraceWriter, err = os.OpenFile(client.APITraceWriterName, os.O_CREATE|os.O_WRONLY, 0o664) + if err != nil { + return err + } + client.Immich.EnableAppTrace(client.APITraceWriter) + } + } + return nil +} + +func CloseClient(ctx context.Context, cmd *cobra.Command, app *Application) error { + if app.Client() != nil { + if app.Client().APITraceWriter != nil { + app.Client().APITraceWriter.Close() + app.log.Message("Check the API-TRACE file: %s", app.Client().APITraceWriterName) + } + return app.Client().Close() + } + return nil +} + +type Client struct { + Server string // Immich server address (http://:2283/api or https:///api) + // API string // Immich api endpoint (http://container_ip:3301) + APIKey string // API Key + APITrace bool // Enable API call traces + SkipSSL bool // Skip SSL Verification + ClientTimeout time.Duration // Set the client request timeout + DeviceUUID string // Set a device UUID + DryRun bool // Protect the server from changes + TimeZone string // Override default TZ + APITraceWriter io.WriteCloser // API tracer + APITraceWriterName string // API trace log name + Immich immich.ImmichInterface // Immich client + ClientLog *slog.Logger // Logger +} + +func (client *Client) Initialize(ctx context.Context, app *Application) error { + var joinedErr error + + // If the client isn't yet initialized + if client.Immich == nil { + if client.Server == "" { + joinedErr = errors.Join(joinedErr, errors.New("missing --server, Immich server address (http://:2283 or https://)")) + if client.APIKey == "" { + joinedErr = errors.Join(joinedErr, errors.New("missing --API-key")) + } + } + + if client.APITrace { + client.APITraceWriterName = strings.TrimSuffix(app.Log().File, filepath.Ext(app.Log().File)) + ".trace.log" + } + if joinedErr != nil { + return joinedErr + } + } + client.ClientLog = app.log.Logger + return nil +} + +func (client *Client) Open(ctx context.Context) error { + var err error + + client.ClientLog.Info("Connection to the server " + client.Server) + client.Immich, err = immich.NewImmichClient( + client.Server, + client.APIKey, + immich.OptionVerifySSL(client.SkipSSL), + immich.OptionConnectionTimeout(client.ClientTimeout), + immich.OptionDryRun(client.DryRun), + ) + if err != nil { + return err + } + + if client.DeviceUUID != "" { + client.Immich.SetDeviceUUID(client.DeviceUUID) + } + + if client.APITrace { + if client.APITraceWriter == nil { + client.APITraceWriter, err = os.OpenFile(client.APITraceWriterName, os.O_CREATE|os.O_WRONLY, 0o664) + if err != nil { + return err + } + client.Immich.EnableAppTrace(client.APITraceWriter) + } + } + + err = client.Immich.PingServer(ctx) + if err != nil { + return err + } + client.ClientLog.Info("Server status: OK") + + user, err := client.Immich.ValidateConnection(ctx) + if err != nil { + return err + } + client.ClientLog.Info(fmt.Sprintf("Connected, user: %s", user.Email)) + if client.DryRun { + client.ClientLog.Info("Dry-run mode enabled. No changes will be made to the server.") + } + return nil +} + +func (client *Client) Close() error { + if client.DryRun { + client.ClientLog.Info("Dry-run mode enabled. No changes were made to the server.") + } + return nil +} diff --git a/cmd/album/album.go b/app/cmd/_todo/album/album.nogo similarity index 85% rename from cmd/album/album.go rename to app/cmd/_todo/album/album.nogo index 3fbe4a28..202a87dc 100644 --- a/cmd/album/album.go +++ b/app/cmd/_todo/album/album.nogo @@ -12,7 +12,7 @@ import ( "github.com/simulot/immich-go/ui" ) -func AlbumCommand(ctx context.Context, common *cmd.SharedFlags, args []string) error { +func AlbumCommand(ctx context.Context, common *cmd.RootImmichFlags, args []string) error { if len(args) > 0 { cmd := args[0] args = args[1:] @@ -25,17 +25,17 @@ func AlbumCommand(ctx context.Context, common *cmd.SharedFlags, args []string) e } type DeleteAlbumCmd struct { - *cmd.SharedFlags + *cmd.RootImmichFlags pattern *regexp.Regexp // album pattern AssumeYes bool } -func deleteAlbum(ctx context.Context, common *cmd.SharedFlags, args []string) error { +func deleteAlbum(ctx context.Context, common *cmd.RootImmichFlags, args []string) error { app := &DeleteAlbumCmd{ - SharedFlags: common, + RootImmichFlags: common, } cmd := flag.NewFlagSet("album delete", flag.ExitOnError) - app.SharedFlags.SetFlags(cmd) + app.RootImmichFlags.SetFlags(cmd) cmd.BoolFunc("yes", "When true, assume Yes to all actions", func(s string) error { var err error @@ -46,7 +46,7 @@ func deleteAlbum(ctx context.Context, common *cmd.SharedFlags, args []string) er if err != nil { return err } - err = app.SharedFlags.Start(ctx) + err = app.RootImmichFlags.Start(ctx) if err != nil { return err } diff --git a/cmd/duplicate/duplicate.go b/app/cmd/_todo/duplicate/duplicate.nogo similarity index 92% rename from cmd/duplicate/duplicate.go rename to app/cmd/_todo/duplicate/duplicate.nogo index e713ec95..a62e76ce 100644 --- a/cmd/duplicate/duplicate.go +++ b/app/cmd/_todo/duplicate/duplicate.nogo @@ -13,14 +13,14 @@ import ( "time" "github.com/simulot/immich-go/cmd" - "github.com/simulot/immich-go/helpers/gen" + "github.com/simulot/immich-go/internal/gen" "github.com/simulot/immich-go/helpers/myflag" "github.com/simulot/immich-go/immich" "github.com/simulot/immich-go/ui" ) type DuplicateCmd struct { - *cmd.SharedFlags + *cmd.RootImmichFlags AssumeYes bool // When true, doesn't ask to the user DateRange immich.DateRange // Set capture date range IgnoreTZErrors bool // Enable TZ error tolerance @@ -36,18 +36,18 @@ type duplicateKey struct { Type string } -func NewDuplicateCmd(ctx context.Context, common *cmd.SharedFlags, args []string) (*DuplicateCmd, error) { +func NewDuplicateCmd(ctx context.Context, common *cmd.RootImmichFlags, args []string) (*DuplicateCmd, error) { cmd := flag.NewFlagSet("duplicate", flag.ExitOnError) validRange := immich.DateRange{} _ = validRange.Set("1850-01-04,2030-01-01") app := DuplicateCmd{ - SharedFlags: common, + RootImmichFlags: common, DateRange: validRange, assetsByID: map[string]*immich.Asset{}, assetsByBaseAndDate: map[duplicateKey][]*immich.Asset{}, } - app.SharedFlags.SetFlags(cmd) + app.RootImmichFlags.SetFlags(cmd) cmd.BoolFunc("ignore-tz-errors", "Ignore timezone difference to check duplicates (default: FALSE).", myflag.BoolFlagFn(&app.IgnoreTZErrors, false)) cmd.BoolFunc("yes", "When true, assume Yes to all actions", myflag.BoolFlagFn(&app.AssumeYes, false)) @@ -57,14 +57,14 @@ func NewDuplicateCmd(ctx context.Context, common *cmd.SharedFlags, args []string if err != nil { return nil, err } - err = app.SharedFlags.Start(ctx) + err = app.RootImmichFlags.Start(ctx) if err != nil { return nil, err } return &app, err } -func DuplicateCommand(ctx context.Context, common *cmd.SharedFlags, args []string) error { +func DuplicateCommand(ctx context.Context, common *cmd.RootImmichFlags, args []string) error { app, err := NewDuplicateCmd(ctx, common, args) if err != nil { return err diff --git a/cmd/metadata/metadata.go b/app/cmd/_todo/metadata/metadata.nogo similarity index 89% rename from cmd/metadata/metadata.go rename to app/cmd/_todo/metadata/metadata.nogo index 851f5fbf..654ef259 100644 --- a/cmd/metadata/metadata.go +++ b/app/cmd/_todo/metadata/metadata.nogo @@ -16,20 +16,20 @@ import ( ) type MetadataCmd struct { - *cmd.SharedFlags + *cmd.RootImmichFlags DryRun bool MissingDateDespiteName bool MissingDate bool } -func NewMetadataCmd(ctx context.Context, common *cmd.SharedFlags, args []string) (*MetadataCmd, error) { +func NewMetadataCmd(ctx context.Context, common *cmd.RootImmichFlags, args []string) (*MetadataCmd, error) { var err error cmd := flag.NewFlagSet("metadata", flag.ExitOnError) app := MetadataCmd{ - SharedFlags: common, + RootImmichFlags: common, } - app.SharedFlags.SetFlags(cmd) + app.RootImmichFlags.SetFlags(cmd) cmd.BoolFunc("dry-run", "display actions, but don't touch the server assets", myflag.BoolFlagFn(&app.DryRun, false)) cmd.BoolFunc("missing-date", "select all assets where the date is missing", myflag.BoolFlagFn(&app.MissingDate, false)) cmd.BoolFunc("missing-date-with-name", "select all assets where the date is missing but the name contains a the date", myflag.BoolFlagFn(&app.MissingDateDespiteName, false)) @@ -37,18 +37,18 @@ func NewMetadataCmd(ctx context.Context, common *cmd.SharedFlags, args []string) if err != nil { return nil, err } - err = app.SharedFlags.Start(ctx) + err = app.RootImmichFlags.Start(ctx) return &app, err } -func MetadataCommand(ctx context.Context, common *cmd.SharedFlags, args []string) error { +func MetadataCommand(ctx context.Context, common *cmd.RootImmichFlags, args []string) error { app, err := NewMetadataCmd(ctx, common, args) if err != nil { return err } fmt.Println("Get server's assets...") - list, err := app.SharedFlags.Immich.GetAllAssets(ctx) + list, err := app.RootImmichFlags.Immich.GetAllAssets(ctx) if err != nil { return err } @@ -56,7 +56,7 @@ func MetadataCommand(ctx context.Context, common *cmd.SharedFlags, args []string type broken struct { a *immich.Asset - metadata.Metadata + assets.Metadata fixable bool reason []string } diff --git a/cmd/stack/stack.go b/app/cmd/_todo/stack/stack.gono similarity index 52% rename from cmd/stack/stack.go rename to app/cmd/_todo/stack/stack.gono index fdfc491c..b8ed1ab9 100644 --- a/cmd/stack/stack.go +++ b/app/cmd/_todo/stack/stack.gono @@ -1,53 +1,53 @@ package stack import ( - "context" - "flag" "fmt" "sort" - "strconv" + "time" - "github.com/simulot/immich-go/cmd" - "github.com/simulot/immich-go/helpers/stacking" "github.com/simulot/immich-go/immich" + cliflags "github.com/simulot/immich-go/internal/cliFlags" + "github.com/simulot/immich-go/internal/stacking" "github.com/simulot/immich-go/ui" + "github.com/spf13/cobra" ) type StackCmd struct { - *cmd.SharedFlags - AssumeYes bool - DateRange immich.DateRange // Set capture date range + Command *cobra.Command + *cmd.RootImmichFlags // global flags + *cmd.ImmichServerFlags // Immich server flags + AssumeYes bool + DateRange cliflags.DateRange // Set capture date range } -func initStack(ctx context.Context, common *cmd.SharedFlags, args []string) (*StackCmd, error) { - cmd := flag.NewFlagSet("stack", flag.ExitOnError) - validRange := immich.DateRange{} - - _ = validRange.Set("1850-01-04,2030-01-01") - app := StackCmd{ - SharedFlags: common, - DateRange: validRange, - } - app.SharedFlags.SetFlags(cmd) - cmd.BoolFunc("yes", "When true, assume Yes to all actions", func(s string) error { - var err error - app.AssumeYes, err = strconv.ParseBool(s) - return err - }) - cmd.Var(&app.DateRange, "date", "Process only documents having a capture date in that range.") - err := cmd.Parse(args) - if err != nil { - return nil, err +func AddCommand(root *cmd.RootImmichFlags) { + stackCmd := &cobra.Command{ + Use: "stack", + Short: "Stack photos", + Long: `Stack photos taken in the short period of time.`, } - err = app.SharedFlags.Start(ctx) - if err != nil { - return nil, err + now := time.Now().Add(24 * time.Hour) + + ImmichServerFlags := cmd.AddImmichServerFlagSet(stackCmd, root) + + flags := &StackCmd{ + ImmichServerFlags: ImmichServerFlags, + DateRange: cliflags.DateRange{Before: time.Date(1980, 1, 1, 0, 0, 0, 0, time.Local), After: now}, } - return &app, err + stackCmd.Flags().Var(&flags.DateRange, "date-range", "photos must be taken in the date range") + stackCmd.Flags().Bool("force-yes", false, "Assume YES to all questions") + root.Command.AddCommand(stackCmd) + + // TODO: call the run } -func NewStackCommand(ctx context.Context, common *cmd.SharedFlags, args []string) error { - app, err := initStack(ctx, common, args) +func (app *StackCmd) run(cmd *cobra.Command, args []string) error { + ctx := cmd.Context() + err := app.RootImmichFlags.Open(cmd) + if err != nil { + return err + } + err = app.ImmichServerFlags.Open(app.RootImmichFlags) if err != nil { return err } diff --git a/cmd/tool/tool.go b/app/cmd/_todo/tool/tool.nogo similarity index 78% rename from cmd/tool/tool.go rename to app/cmd/_todo/tool/tool.nogo index 1c55a927..7ae8db4d 100644 --- a/cmd/tool/tool.go +++ b/app/cmd/_todo/tool/tool.nogo @@ -8,7 +8,7 @@ import ( "github.com/simulot/immich-go/cmd/album" ) -func CommandTool(ctx context.Context, common *cmd.SharedFlags, args []string) error { +func CommandTool(ctx context.Context, common *cmd.RootImmichFlags, args []string) error { if len(args) > 0 { cmd := args[0] args = args[1:] diff --git a/app/cmd/archive/archive.go b/app/cmd/archive/archive.go new file mode 100644 index 00000000..884a072f --- /dev/null +++ b/app/cmd/archive/archive.go @@ -0,0 +1,186 @@ +package archive + +import ( + "context" + "errors" + "os" + "strings" + + "github.com/simulot/immich-go/adapters/folder" + "github.com/simulot/immich-go/adapters/fromimmich" + gp "github.com/simulot/immich-go/adapters/googlePhotos" + "github.com/simulot/immich-go/app" + "github.com/simulot/immich-go/internal/fileevent" + "github.com/simulot/immich-go/internal/filenames" + "github.com/simulot/immich-go/internal/fshelper" + "github.com/simulot/immich-go/internal/fshelper/osfs" + "github.com/spf13/cobra" +) + +type ArchiveOptions struct { + ArchivePath string +} + +func NewArchiveCommand(ctx context.Context, app *app.Application) *cobra.Command { + cmd := &cobra.Command{ + Use: "archive", + Short: "Archive various sources of photos to a file system", + } + options := &ArchiveOptions{} + + cmd.PersistentFlags().StringVarP(&options.ArchivePath, "write-to-folder", "w", "", "Path where to write the archive") + _ = cmd.MarkPersistentFlagRequired("write-to-folder") + + cmd.AddCommand(NewImportFromFolderCommand(ctx, cmd, app, options)) + cmd.AddCommand(NewFromGooglePhotosCommand(ctx, cmd, app, options)) + cmd.AddCommand(NewFromImmichCommand(ctx, cmd, app, options)) + + return cmd +} + +func NewImportFromFolderCommand(ctx context.Context, parent *cobra.Command, app *app.Application, archOptions *ArchiveOptions) *cobra.Command { + cmd := &cobra.Command{ + Use: "from-folder", + Short: "Archive photos from a folder", + } + + options := &folder.ImportFolderOptions{} + options.AddFromFolderFlags(cmd, parent) + + cmd.RunE = func(cmd *cobra.Command, args []string) error { //nolint:contextcheck + // ready to run + ctx := cmd.Context() + log := app.Log() + if app.Jnl() == nil { + app.SetJnl(fileevent.NewRecorder(app.Log().Logger)) + app.Jnl().SetLogger(app.Log().SetLogWriter(os.Stdout)) + } + p, err := cmd.Flags().GetString("write-to-folder") + if err != nil { + return err + } + + err = os.MkdirAll(p, 0o755) + if err != nil { + return err + } + + destFS := osfs.DirFS(p) + + // parse arguments + fsyss, err := fshelper.ParsePath(args) + if err != nil { + return err + } + if len(fsyss) == 0 { + log.Message("No file found matching the pattern: %s", strings.Join(args, ",")) + return errors.New("No file found matching the pattern: " + strings.Join(args, ",")) + } + options.InfoCollector = filenames.NewInfoCollector(app.GetTZ(), options.SupportedMedia) + source, err := folder.NewLocalFiles(ctx, app.Jnl(), options, fsyss...) + if err != nil { + return err + } + + dest, err := folder.NewLocalAssetWriter(destFS, ".") + if err != nil { + return err + } + return run(ctx, app.Jnl(), app, source, dest) + } + return cmd +} + +func NewFromGooglePhotosCommand(ctx context.Context, parent *cobra.Command, app *app.Application, archOptions *ArchiveOptions) *cobra.Command { + cmd := &cobra.Command{ + Use: "from-google-photos [flags] | ", + Short: "Archive photos either from a zipped Google Photos takeout or decompressed archive", + Args: cobra.MinimumNArgs(1), + } + cmd.SetContext(ctx) + options := &gp.ImportFlags{} + options.AddFromGooglePhotosFlags(cmd, parent) + + cmd.RunE = func(cmd *cobra.Command, args []string) error { //nolint:contextcheck + ctx := cmd.Context() + log := app.Log() + if app.Jnl() == nil { + app.SetJnl(fileevent.NewRecorder(app.Log().Logger)) + app.Jnl().SetLogger(app.Log().SetLogWriter(os.Stdout)) + } + p, err := cmd.Flags().GetString("write-to-folder") + if err != nil { + return err + } + + err = os.MkdirAll(p, 0o755) + if err != nil { + return err + } + + destFS := osfs.DirFS(p) + + fsyss, err := fshelper.ParsePath(args) + if err != nil { + return err + } + if len(fsyss) == 0 { + log.Message("No file found matching the pattern: %s", strings.Join(args, ",")) + return errors.New("No file found matching the pattern: " + strings.Join(args, ",")) + } + source, err := gp.NewTakeout(ctx, app.Jnl(), options, fsyss...) + if err != nil { + return err + } + dest, err := folder.NewLocalAssetWriter(destFS, ".") + if err != nil { + return err + } + return run(ctx, app.Jnl(), app, source, dest) + } + + return cmd +} + +func NewFromImmichCommand(ctx context.Context, parent *cobra.Command, app *app.Application, archOptions *ArchiveOptions) *cobra.Command { + cmd := &cobra.Command{ + Use: "from-immich [from-flags]", + Short: "Archive photos from Immich", + } + cmd.SetContext(ctx) + options := &fromimmich.FromImmichFlags{} + options.AddFromImmichFlags(cmd, parent) + + cmd.RunE = func(cmd *cobra.Command, args []string) error { //nolint:contextcheck + ctx := cmd.Context() + if app.Jnl() == nil { + app.SetJnl(fileevent.NewRecorder(app.Log().Logger)) + app.Jnl().SetLogger(app.Log().SetLogWriter(os.Stdout)) + } + + p, err := cmd.Flags().GetString("write-to-folder") + if err != nil { + return err + } + + err = os.MkdirAll(p, 0o755) + if err != nil { + return err + } + + destFS := osfs.DirFS(p) + + dest, err := folder.NewLocalAssetWriter(destFS, ".") + if err != nil { + return err + } + + source, err := fromimmich.NewFromImmich(ctx, app, app.Jnl(), options) + if err != nil { + return err + } + return run(ctx, app.Jnl(), app, source, dest) + } + + return cmd +} diff --git a/app/cmd/archive/run.go b/app/cmd/archive/run.go new file mode 100644 index 00000000..9f797673 --- /dev/null +++ b/app/cmd/archive/run.go @@ -0,0 +1,39 @@ +package archive + +import ( + "context" + "errors" + + "github.com/simulot/immich-go/adapters" + "github.com/simulot/immich-go/app" + "github.com/simulot/immich-go/internal/fileevent" +) + +func run(ctx context.Context, jnl *fileevent.Recorder, _ *app.Application, source adapters.Reader, dest adapters.AssetWriter) error { // nolint:unparam + gChan := source.Browse(ctx) + errCount := 0 + for { + select { + case <-ctx.Done(): + return ctx.Err() + case g, ok := <-gChan: + if !ok { + return nil + } + for _, a := range g.Assets { + err := dest.WriteAsset(ctx, a) + if err != nil { + jnl.Log().Error(err.Error()) + errCount++ + if errCount > 5 { + err := errors.New("too many errors, aborting") + jnl.Log().Error(err.Error()) + return err + } + } else { + jnl.Record(ctx, fileevent.Written, a) + } + } + } + } +} diff --git a/app/cmd/commands.go b/app/cmd/commands.go new file mode 100644 index 00000000..0753c1f1 --- /dev/null +++ b/app/cmd/commands.go @@ -0,0 +1,17 @@ +package cmd + +import ( + "context" + + "github.com/simulot/immich-go/app" + "github.com/simulot/immich-go/app/cmd/archive" + "github.com/simulot/immich-go/app/cmd/upload" + "github.com/spf13/cobra" +) + +func AddCommands(cmd *cobra.Command, ctx context.Context, app *app.Application) { + cmd.AddCommand( + upload.NewUploadCommand(ctx, app), + archive.NewArchiveCommand(ctx, app), + ) +} diff --git a/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063000139.jpg b/app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063000139.jpg similarity index 100% rename from cmd/upload/TEST_DATA/folder/low/PXL_20231006_063000139.jpg rename to app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063000139.jpg diff --git a/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063000139.jpg.json b/app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063000139.jpg.json similarity index 100% rename from cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063000139.jpg.json rename to app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063000139.jpg.json diff --git a/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063029647.jpg b/app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063029647.jpg similarity index 100% rename from cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063029647.jpg rename to app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063029647.jpg diff --git a/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063029647.jpg.json b/app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063029647.jpg.json similarity index 100% rename from cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063029647.jpg.json rename to app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063029647.jpg.json diff --git a/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063108407.jpg b/app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063108407.jpg similarity index 100% rename from cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063108407.jpg rename to app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063108407.jpg diff --git a/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063108407.jpg.json b/app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063108407.jpg.json similarity index 100% rename from cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063108407.jpg.json rename to app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063108407.jpg.json diff --git a/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063121958.jpg b/app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063121958.jpg similarity index 100% rename from cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063121958.jpg rename to app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063121958.jpg diff --git a/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063121958.jpg.json b/app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063121958.jpg.json similarity index 100% rename from cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063121958.jpg.json rename to app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063121958.jpg.json diff --git a/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063357420.jpg b/app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063357420.jpg similarity index 100% rename from cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063357420.jpg rename to app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063357420.jpg diff --git a/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063357420.jpg.json b/app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063357420.jpg.json similarity index 100% rename from cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063357420.jpg.json rename to app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063357420.jpg.json diff --git a/cmd/upload/TEST_DATA/banned/backup/PXL_20231006_063536303.jpg b/app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063536303.jpg similarity index 100% rename from cmd/upload/TEST_DATA/banned/backup/PXL_20231006_063536303.jpg rename to app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063536303.jpg diff --git a/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063536303.jpg.json b/app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063536303.jpg.json similarity index 100% rename from cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063536303.jpg.json rename to app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063536303.jpg.json diff --git a/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063851485.jpg b/app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063851485.jpg similarity index 100% rename from cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063851485.jpg rename to app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063851485.jpg diff --git a/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063851485.jpg.json b/app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063851485.jpg.json similarity index 100% rename from cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063851485.jpg.json rename to app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063851485.jpg.json diff --git a/app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063909898.LS.mp4 b/app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063909898.LS.mp4 new file mode 100644 index 00000000..c51aa144 Binary files /dev/null and b/app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063909898.LS.mp4 differ diff --git a/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063909898.LS.mp4.json b/app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063909898.LS.mp4.json similarity index 100% rename from cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063909898.LS.mp4.json rename to app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/PXL_20231006_063909898.LS.mp4.json diff --git "a/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/m\303\251tadonn\303\251es.json" "b/app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/m\303\251tadonn\303\251es.json" similarity index 100% rename from "cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/m\303\251tadonn\303\251es.json" rename to "app/cmd/upload/TEST_DATA/Takeout1/Google Photos/Album test 6-10-23/m\303\251tadonn\303\251es.json" diff --git a/app/cmd/upload/TEST_DATA/Takeout2/Google Photos/Photos from 2023/PXL_20231006_063000139.jpg b/app/cmd/upload/TEST_DATA/Takeout2/Google Photos/Photos from 2023/PXL_20231006_063000139.jpg new file mode 100644 index 00000000..07ec1e2e Binary files /dev/null and b/app/cmd/upload/TEST_DATA/Takeout2/Google Photos/Photos from 2023/PXL_20231006_063000139.jpg differ diff --git a/cmd/upload/TEST_DATA/Takeout2/Google Photos/Photos from 2023/PXL_20231006_063000139.jpg.json b/app/cmd/upload/TEST_DATA/Takeout2/Google Photos/Photos from 2023/PXL_20231006_063000139.jpg.json similarity index 100% rename from cmd/upload/TEST_DATA/Takeout2/Google Photos/Photos from 2023/PXL_20231006_063000139.jpg.json rename to app/cmd/upload/TEST_DATA/Takeout2/Google Photos/Photos from 2023/PXL_20231006_063000139.jpg.json diff --git a/cmd/upload/TEST_DATA/Takeout2/Google Photos/Photos from 2023/PXL_20231006_063528961.jpg b/app/cmd/upload/TEST_DATA/Takeout2/Google Photos/Photos from 2023/PXL_20231006_063528961.jpg similarity index 100% rename from cmd/upload/TEST_DATA/Takeout2/Google Photos/Photos from 2023/PXL_20231006_063528961.jpg rename to app/cmd/upload/TEST_DATA/Takeout2/Google Photos/Photos from 2023/PXL_20231006_063528961.jpg diff --git a/cmd/upload/TEST_DATA/Takeout2/Google Photos/Photos from 2023/PXL_20231006_063528961.jpg.json b/app/cmd/upload/TEST_DATA/Takeout2/Google Photos/Photos from 2023/PXL_20231006_063528961.jpg.json similarity index 100% rename from cmd/upload/TEST_DATA/Takeout2/Google Photos/Photos from 2023/PXL_20231006_063528961.jpg.json rename to app/cmd/upload/TEST_DATA/Takeout2/Google Photos/Photos from 2023/PXL_20231006_063528961.jpg.json diff --git a/cmd/upload/TEST_DATA/Takeout2/Google Photos/Sans titre(9)/PXL_20231006_063108407.jpg b/app/cmd/upload/TEST_DATA/Takeout2/Google Photos/Sans titre(9)/PXL_20231006_063108407.jpg similarity index 100% rename from cmd/upload/TEST_DATA/Takeout2/Google Photos/Sans titre(9)/PXL_20231006_063108407.jpg rename to app/cmd/upload/TEST_DATA/Takeout2/Google Photos/Sans titre(9)/PXL_20231006_063108407.jpg diff --git a/cmd/upload/TEST_DATA/Takeout2/Google Photos/Sans titre(9)/PXL_20231006_063108407.jpg.json b/app/cmd/upload/TEST_DATA/Takeout2/Google Photos/Sans titre(9)/PXL_20231006_063108407.jpg.json similarity index 100% rename from cmd/upload/TEST_DATA/Takeout2/Google Photos/Sans titre(9)/PXL_20231006_063108407.jpg.json rename to app/cmd/upload/TEST_DATA/Takeout2/Google Photos/Sans titre(9)/PXL_20231006_063108407.jpg.json diff --git "a/cmd/upload/TEST_DATA/Takeout2/Google Photos/Sans titre(9)/m\303\251tadonn\303\251es.json" "b/app/cmd/upload/TEST_DATA/Takeout2/Google Photos/Sans titre(9)/m\303\251tadonn\303\251es.json" similarity index 100% rename from "cmd/upload/TEST_DATA/Takeout2/Google Photos/Sans titre(9)/m\303\251tadonn\303\251es.json" rename to "app/cmd/upload/TEST_DATA/Takeout2/Google Photos/Sans titre(9)/m\303\251tadonn\303\251es.json" diff --git a/cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238(1).JPG b/app/cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238(1).JPG similarity index 100% rename from cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238(1).JPG rename to app/cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238(1).JPG diff --git a/cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238.JPG b/app/cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238.JPG similarity index 100% rename from cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238.JPG rename to app/cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238.JPG diff --git a/cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238.JPG(1).json b/app/cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238.JPG(1).json similarity index 100% rename from cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238.JPG(1).json rename to app/cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238.JPG(1).json diff --git a/cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238.JPG.json b/app/cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238.JPG.json similarity index 100% rename from cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238.JPG.json rename to app/cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238.JPG.json diff --git a/cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238_1.JPG b/app/cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238_1.JPG similarity index 100% rename from cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238_1.JPG rename to app/cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238_1.JPG diff --git a/cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238_1.JPG.json b/app/cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238_1.JPG.json similarity index 100% rename from cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238_1.JPG.json rename to app/cmd/upload/TEST_DATA/Takeout3/Photos from 2023/DSC_0238_1.JPG.json diff --git a/app/cmd/upload/TEST_DATA/banned/PXL_20231006_063000139.jpg b/app/cmd/upload/TEST_DATA/banned/PXL_20231006_063000139.jpg new file mode 100644 index 00000000..07ec1e2e Binary files /dev/null and b/app/cmd/upload/TEST_DATA/banned/PXL_20231006_063000139.jpg differ diff --git a/cmd/upload/TEST_DATA/banned/PXL_20231006_063029647.jpg b/app/cmd/upload/TEST_DATA/banned/PXL_20231006_063029647.jpg similarity index 100% rename from cmd/upload/TEST_DATA/banned/PXL_20231006_063029647.jpg rename to app/cmd/upload/TEST_DATA/banned/PXL_20231006_063029647.jpg diff --git a/cmd/upload/TEST_DATA/banned/PXL_20231006_063108407.jpg b/app/cmd/upload/TEST_DATA/banned/PXL_20231006_063108407.jpg similarity index 100% rename from cmd/upload/TEST_DATA/banned/PXL_20231006_063108407.jpg rename to app/cmd/upload/TEST_DATA/banned/PXL_20231006_063108407.jpg diff --git a/cmd/upload/TEST_DATA/banned/PXL_20231006_063121958 (another copy).jpg b/app/cmd/upload/TEST_DATA/banned/PXL_20231006_063121958 (another copy).jpg similarity index 100% rename from cmd/upload/TEST_DATA/banned/PXL_20231006_063121958 (another copy).jpg rename to app/cmd/upload/TEST_DATA/banned/PXL_20231006_063121958 (another copy).jpg diff --git a/cmd/upload/TEST_DATA/banned/PXL_20231006_063121958 (copy).jpg b/app/cmd/upload/TEST_DATA/banned/PXL_20231006_063121958 (copy).jpg similarity index 100% rename from cmd/upload/TEST_DATA/banned/PXL_20231006_063121958 (copy).jpg rename to app/cmd/upload/TEST_DATA/banned/PXL_20231006_063121958 (copy).jpg diff --git a/cmd/upload/TEST_DATA/banned/PXL_20231006_063121958.jpg b/app/cmd/upload/TEST_DATA/banned/PXL_20231006_063121958.jpg similarity index 100% rename from cmd/upload/TEST_DATA/banned/PXL_20231006_063121958.jpg rename to app/cmd/upload/TEST_DATA/banned/PXL_20231006_063121958.jpg diff --git a/cmd/upload/TEST_DATA/banned/backup/PXL_20231006_063357420.jpg b/app/cmd/upload/TEST_DATA/banned/backup/PXL_20231006_063357420.jpg similarity index 100% rename from cmd/upload/TEST_DATA/banned/backup/PXL_20231006_063357420.jpg rename to app/cmd/upload/TEST_DATA/banned/backup/PXL_20231006_063357420.jpg diff --git a/cmd/upload/TEST_DATA/banned/backup/PXL_20231006_063528961.jpg b/app/cmd/upload/TEST_DATA/banned/backup/PXL_20231006_063528961.jpg similarity index 100% rename from cmd/upload/TEST_DATA/banned/backup/PXL_20231006_063528961.jpg rename to app/cmd/upload/TEST_DATA/banned/backup/PXL_20231006_063528961.jpg diff --git a/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063536303.jpg b/app/cmd/upload/TEST_DATA/banned/backup/PXL_20231006_063536303.jpg similarity index 100% rename from cmd/upload/TEST_DATA/folder/low/PXL_20231006_063536303.jpg rename to app/cmd/upload/TEST_DATA/banned/backup/PXL_20231006_063536303.jpg diff --git a/cmd/upload/TEST_DATA/banned/backup/PXL_20231006_063851485.jpg b/app/cmd/upload/TEST_DATA/banned/backup/PXL_20231006_063851485.jpg similarity index 100% rename from cmd/upload/TEST_DATA/banned/backup/PXL_20231006_063851485.jpg rename to app/cmd/upload/TEST_DATA/banned/backup/PXL_20231006_063851485.jpg diff --git a/cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063000139.jpg b/app/cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063000139.jpg similarity index 100% rename from cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063000139.jpg rename to app/cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063000139.jpg diff --git a/cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063029647.jpg b/app/cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063029647.jpg similarity index 100% rename from cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063029647.jpg rename to app/cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063029647.jpg diff --git a/cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063108407.jpg b/app/cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063108407.jpg similarity index 100% rename from cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063108407.jpg rename to app/cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063108407.jpg diff --git a/cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063121958.jpg b/app/cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063121958.jpg similarity index 100% rename from cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063121958.jpg rename to app/cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063121958.jpg diff --git a/cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063357420.jpg b/app/cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063357420.jpg similarity index 100% rename from cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063357420.jpg rename to app/cmd/upload/TEST_DATA/folder/high/AlbumA/PXL_20231006_063357420.jpg diff --git a/cmd/upload/TEST_DATA/folder/high/AlbumB/PXL_20231006_063528961.jpg b/app/cmd/upload/TEST_DATA/folder/high/AlbumB/PXL_20231006_063528961.jpg similarity index 100% rename from cmd/upload/TEST_DATA/folder/high/AlbumB/PXL_20231006_063528961.jpg rename to app/cmd/upload/TEST_DATA/folder/high/AlbumB/PXL_20231006_063528961.jpg diff --git a/cmd/upload/TEST_DATA/folder/high/AlbumB/PXL_20231006_063536303.jpg b/app/cmd/upload/TEST_DATA/folder/high/AlbumB/PXL_20231006_063536303.jpg similarity index 100% rename from cmd/upload/TEST_DATA/folder/high/AlbumB/PXL_20231006_063536303.jpg rename to app/cmd/upload/TEST_DATA/folder/high/AlbumB/PXL_20231006_063536303.jpg diff --git a/cmd/upload/TEST_DATA/folder/high/AlbumB/PXL_20231006_063851485.jpg b/app/cmd/upload/TEST_DATA/folder/high/AlbumB/PXL_20231006_063851485.jpg similarity index 100% rename from cmd/upload/TEST_DATA/folder/high/AlbumB/PXL_20231006_063851485.jpg rename to app/cmd/upload/TEST_DATA/folder/high/AlbumB/PXL_20231006_063851485.jpg diff --git a/app/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063000139.jpg b/app/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063000139.jpg new file mode 100644 index 00000000..07ec1e2e Binary files /dev/null and b/app/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063000139.jpg differ diff --git a/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063029647.jpg b/app/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063029647.jpg similarity index 100% rename from cmd/upload/TEST_DATA/folder/low/PXL_20231006_063029647.jpg rename to app/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063029647.jpg diff --git a/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063108407.jpg b/app/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063108407.jpg similarity index 100% rename from cmd/upload/TEST_DATA/folder/low/PXL_20231006_063108407.jpg rename to app/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063108407.jpg diff --git a/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063121958.jpg b/app/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063121958.jpg similarity index 100% rename from cmd/upload/TEST_DATA/folder/low/PXL_20231006_063121958.jpg rename to app/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063121958.jpg diff --git a/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063357420.jpg b/app/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063357420.jpg similarity index 100% rename from cmd/upload/TEST_DATA/folder/low/PXL_20231006_063357420.jpg rename to app/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063357420.jpg diff --git a/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063528961.jpg b/app/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063528961.jpg similarity index 100% rename from cmd/upload/TEST_DATA/folder/low/PXL_20231006_063528961.jpg rename to app/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063528961.jpg diff --git a/app/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063536303.jpg b/app/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063536303.jpg new file mode 100644 index 00000000..535bacea Binary files /dev/null and b/app/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063536303.jpg differ diff --git a/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063851485.jpg b/app/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063851485.jpg similarity index 100% rename from cmd/upload/TEST_DATA/folder/low/PXL_20231006_063851485.jpg rename to app/cmd/upload/TEST_DATA/folder/low/PXL_20231006_063851485.jpg diff --git a/cmd/upload/TEST_DATA/nodate/NO_DATE.jpg b/app/cmd/upload/TEST_DATA/nodate/NO_DATE.jpg similarity index 100% rename from cmd/upload/TEST_DATA/nodate/NO_DATE.jpg rename to app/cmd/upload/TEST_DATA/nodate/NO_DATE.jpg diff --git a/app/cmd/upload/advice.go b/app/cmd/upload/advice.go new file mode 100644 index 00000000..4f3adbe5 --- /dev/null +++ b/app/cmd/upload/advice.go @@ -0,0 +1,158 @@ +package upload + +import ( + "fmt" + "math" + "path" + "path/filepath" + "time" + + "github.com/simulot/immich-go/immich" + "github.com/simulot/immich-go/internal/assets" +) + +// - - go:generate stringer -type=AdviceCode +type AdviceCode int + +func (a AdviceCode) String() string { + switch a { + case IDontKnow: + return "IDontKnow" + // case SameNameOnServerButNotSure: + // return "SameNameOnServerButNotSure" + case SmallerOnServer: + return "SmallerOnServer" + case BetterOnServer: + return "BetterOnServer" + case SameOnServer: + return "SameOnServer" + case NotOnServer: + return "NotOnServer" + } + return fmt.Sprintf("advice(%d)", a) +} + +const ( + IDontKnow AdviceCode = iota + SmallerOnServer + BetterOnServer + SameOnServer + NotOnServer +) + +type Advice struct { + Advice AdviceCode + Message string + ServerAsset *immich.Asset + LocalAsset *assets.Asset +} + +func formatBytes(s int64) string { + suffixes := []string{"B", "KB", "MB", "GB"} + bytes := float64(s) + base := 1024.0 + if bytes < base { + return fmt.Sprintf("%.0f %s", bytes, suffixes[0]) + } + exp := int64(0) + for bytes >= base && exp < int64(len(suffixes)-1) { + bytes /= base + exp++ + } + roundedSize := math.Round(bytes*10) / 10 + return fmt.Sprintf("%.1f %s", roundedSize, suffixes[exp]) +} + +func (ai *AssetIndex) adviceSameOnServer(sa *immich.Asset) *Advice { + return &Advice{ + Advice: SameOnServer, + Message: fmt.Sprintf("An asset with the same name:%q, date:%q and size:%s exists on the server. No need to upload.", sa.OriginalFileName, sa.ExifInfo.DateTimeOriginal.Format(time.DateTime), formatBytes(sa.ExifInfo.FileSizeInByte)), + ServerAsset: sa, + } +} + +func (ai *AssetIndex) adviceSmallerOnServer(sa *immich.Asset) *Advice { + return &Advice{ + Advice: SmallerOnServer, + Message: fmt.Sprintf("An asset with the same name:%q and date:%q but with smaller size:%s exists on the server. Replace it.", sa.OriginalFileName, sa.ExifInfo.DateTimeOriginal.Format(time.DateTime), formatBytes(sa.ExifInfo.FileSizeInByte)), + ServerAsset: sa, + } +} + +func (ai *AssetIndex) adviceBetterOnServer(sa *immich.Asset) *Advice { + return &Advice{ + Advice: BetterOnServer, + Message: fmt.Sprintf("An asset with the same name:%q and date:%q but with bigger size:%s exists on the server. No need to upload.", sa.OriginalFileName, sa.ExifInfo.DateTimeOriginal.Format(time.DateTime), formatBytes(sa.ExifInfo.FileSizeInByte)), + ServerAsset: sa, + } +} + +func (ai *AssetIndex) adviceNotOnServer() *Advice { + return &Advice{ + Advice: NotOnServer, + Message: "This a new asset, upload it.", + } +} + +// ShouldUpload check if the server has this asset +// +// The server may have different assets with the same name. This happens with photos produced by digital cameras. +// The server may have the asset, but in lower resolution. Compare the taken date and resolution + +func (ai *AssetIndex) ShouldUpload(la *assets.Asset) (*Advice, error) { + filename := la.OriginalFileName + if path.Ext(filename) == "" { + filename += path.Ext(la.File.Name()) + } + + ID := la.DeviceAssetID() + + sa := ai.byID[ID] + if sa != nil { + // the same ID exist on the server + return ai.adviceSameOnServer(sa), nil + } + + var l []*immich.Asset + + // check all files with the same name + + n := filepath.Base(filename) + l = ai.byName[n] + if len(l) == 0 { + // n = strings.TrimSuffix(n, filepath.Ext(n)) + l = ai.byName[n] + } + + if len(l) > 0 { + dateTaken := la.CaptureDate + size := la.Size() + + for _, sa = range l { + compareDate := compareDate(dateTaken, sa.ExifInfo.DateTimeOriginal.Time) + compareSize := size - sa.ExifInfo.FileSizeInByte + + switch { + case compareDate == 0 && compareSize == 0: + return ai.adviceSameOnServer(sa), nil + case compareDate == 0 && compareSize > 0: + return ai.adviceSmallerOnServer(sa), nil + case compareDate == 0 && compareSize < 0: + return ai.adviceBetterOnServer(sa), nil + } + } + } + return ai.adviceNotOnServer(), nil +} + +func compareDate(d1 time.Time, d2 time.Time) int { + diff := d1.Sub(d2) + + switch { + case diff < -5*time.Minute: + return -1 + case diff >= 5*time.Minute: + return +1 + } + return 0 +} diff --git a/cmd/upload/assets.go b/app/cmd/upload/assets.go similarity index 72% rename from cmd/upload/assets.go rename to app/cmd/upload/assets.go index 184eea2c..d58522e6 100644 --- a/cmd/upload/assets.go +++ b/app/cmd/upload/assets.go @@ -5,8 +5,8 @@ import ( "path" "strings" - "github.com/simulot/immich-go/browser" "github.com/simulot/immich-go/immich" + "github.com/simulot/immich-go/internal/assets" ) type AssetIndex struct { @@ -40,18 +40,17 @@ func (ai *AssetIndex) Len() int { return len(ai.assets) } -func (ai *AssetIndex) AddLocalAsset(la *browser.LocalAssetFile, immichID string) { +func (ai *AssetIndex) AddLocalAsset(la *assets.Asset, immichID string) { sa := &immich.Asset{ ID: immichID, DeviceAssetID: la.DeviceAssetID(), - OriginalFileName: strings.TrimSuffix(path.Base(la.Title), path.Ext(la.Title)), + OriginalFileName: strings.TrimSuffix(path.Base(la.OriginalFileName), path.Ext(la.OriginalFileName)), ExifInfo: immich.ExifInfo{ - FileSizeInByte: int(la.Size()), - DateTimeOriginal: immich.ImmichTime{Time: la.Metadata.DateTaken}, - Latitude: la.Metadata.Latitude, - Longitude: la.Metadata.Longitude, + FileSizeInByte: la.Size(), + DateTimeOriginal: immich.ImmichTime{Time: la.CaptureDate}, + Latitude: la.Latitude, + Longitude: la.Longitude, }, - JustUploaded: true, } ai.assets = append(ai.assets, sa) ai.byID[sa.DeviceAssetID] = sa diff --git a/cmd/upload/e2e_upload_folder_test.go b/app/cmd/upload/e2e_upload_folder_test.nogo similarity index 86% rename from cmd/upload/e2e_upload_folder_test.go rename to app/cmd/upload/e2e_upload_folder_test.nogo index acf06bb9..562fc834 100644 --- a/cmd/upload/e2e_upload_folder_test.go +++ b/app/cmd/upload/e2e_upload_folder_test.nogo @@ -14,7 +14,7 @@ import ( "github.com/joho/godotenv" "github.com/simulot/immich-go/cmd" - "github.com/simulot/immich-go/helpers/configuration" + "github.com/simulot/immich-go/internal/configuration" "github.com/simulot/immich-go/immich" ) @@ -30,8 +30,8 @@ func initMyEnv(t *testing.T) { t.Fatalf("cant initialize environment variables: %s", err) } myEnv = e - if myEnv["IMMICH_TESTFILES"] == "" { - t.Fatal("missing IMMICH_TESTFILES in .env file") + if myEnv["IMMICHGO_TESTFILES"] == "" { + t.Fatal("missing IMMICHGO_TESTFILES in .env file") } } @@ -106,7 +106,7 @@ func runCase(t *testing.T, tc testCase) { args = append(args, tc.args...) - app := cmd.SharedFlags{ + app := cmd.RootImmichFlags{ Immich: ic, } @@ -126,7 +126,7 @@ func TestE2eUpload(t *testing.T) { { name: "upload folder", args: []string{ - myEnv["IMMICH_TESTFILES"] + "/low_high/high", + myEnv["IMMICHGO_TESTFILES"] + "/low_high/high", }, resetImmich: true, @@ -135,7 +135,7 @@ func TestE2eUpload(t *testing.T) { { name: "upload folder", args: []string{ - myEnv["IMMICH_TESTFILES"] + "/low_high/high", + myEnv["IMMICHGO_TESTFILES"] + "/low_high/high", }, // resetImmich: true, @@ -145,7 +145,7 @@ func TestE2eUpload(t *testing.T) { name: "upload folder *.jpg", args: []string{ "-google-photos", - myEnv["IMMICH_TESTFILES"] + "/test_folder/*.jpg", + myEnv["IMMICHGO_TESTFILES"] + "/test_folder/*.jpg", }, resetImmich: true, @@ -154,7 +154,7 @@ func TestE2eUpload(t *testing.T) { { name: "upload folder *.jpg", args: []string{ - myEnv["IMMICH_TESTFILES"] + "/test_folder/*/*.jpg", + myEnv["IMMICHGO_TESTFILES"] + "/test_folder/*/*.jpg", }, // resetImmich: true, @@ -165,7 +165,7 @@ func TestE2eUpload(t *testing.T) { // name: "upload folder *.jpg - dry run", // args: []string{ // "-dry-run", - // myEnv["IMMICH_TESTFILES"] + "/full_takeout (copy)/Takeout/Google Photos/Photos from 2023", + // myEnv["IMMICHGO_TESTFILES"] + "/full_takeout (copy)/Takeout/Google Photos/Photos from 2023", // }, // // resetImmich: true, @@ -176,7 +176,7 @@ func TestE2eUpload(t *testing.T) { name: "upload google photos", args: []string{ "-google-photos", - myEnv["IMMICH_TESTFILES"] + "/low_high/Takeout", + myEnv["IMMICHGO_TESTFILES"] + "/low_high/Takeout", }, // resetImmich: true, expectError: false, @@ -186,7 +186,7 @@ func TestE2eUpload(t *testing.T) { args: []string{ "-stack-burst=FALSE", "-stack-jpg-raw=TRUE", - myEnv["IMMICH_TESTFILES"] + "/burst/Tel", + myEnv["IMMICHGO_TESTFILES"] + "/burst/Tel", }, resetImmich: true, expectError: false, @@ -246,7 +246,7 @@ func Test_PermissionError(t *testing.T) { tc := testCase{ name: "Test_PermissionError", args: []string{ - myEnv["IMMICH_TESTFILES"] + "/low_high/high", + myEnv["IMMICHGO_TESTFILES"] + "/low_high/high", }, resetImmich: true, expectError: false, @@ -261,7 +261,7 @@ func Test_CreateAlbumFolder(t *testing.T) { name: "Test_CreateAlbumFolder", args: []string{ "-create-album-folder", - myEnv["IMMICH_TESTFILES"] + "/albums", + myEnv["IMMICHGO_TESTFILES"] + "/albums", }, resetImmich: true, expectError: false, @@ -277,7 +277,7 @@ func Test_XMP(t *testing.T) { name: "Test_XMP", args: []string{ "-create-stacks=false", - myEnv["IMMICH_TESTFILES"] + "/xmp", + myEnv["IMMICHGO_TESTFILES"] + "/xmp", }, resetImmich: true, expectError: false, @@ -294,9 +294,9 @@ func Test_XMP2(t *testing.T) { args: []string{ "-create-stacks=false", "-create-album-folder", - // myEnv["IMMICH_TESTFILES"] + "/xmp/files", - // myEnv["IMMICH_TESTFILES"] + "/xmp/files/*.CR2", - myEnv["IMMICH_TESTFILES"] + "/xmp/files*/*.CR2", + // myEnv["IMMICHGO_TESTFILES"] + "/xmp/files", + // myEnv["IMMICHGO_TESTFILES"] + "/xmp/files/*.CR2", + myEnv["IMMICHGO_TESTFILES"] + "/xmp/files*/*.CR2", }, resetImmich: true, expectError: false, @@ -313,7 +313,7 @@ func Test_Album_Issue_119(t *testing.T) { name: "Test_Album 1", args: []string{ "-album", "The Album", - myEnv["IMMICH_TESTFILES"] + "/xmp/files", + myEnv["IMMICHGO_TESTFILES"] + "/xmp/files", }, setup: func(ctx context.Context, t *testing.T, ic *immich.ImmichClient) func(t *testing.T) { _, err := ic.CreateAlbum(ctx, "The Album", "Description", nil) @@ -330,7 +330,7 @@ func Test_Album_Issue_119(t *testing.T) { name: "Test_Album 2", args: []string{ "-album", "The Album", - myEnv["IMMICH_TESTFILES"] + "/albums/Album test 6-10-23", + myEnv["IMMICHGO_TESTFILES"] + "/albums/Album test 6-10-23", }, resetImmich: false, expectError: false, @@ -349,7 +349,7 @@ func Test_Issue_126A(t *testing.T) { args: []string{ "-exclude-types", ".dng,.cr2,.arw,.rw2,.tif,.tiff,.gif,.psd", - myEnv["IMMICH_TESTFILES"] + "/burst/PXL6", + myEnv["IMMICHGO_TESTFILES"] + "/burst/PXL6", }, resetImmich: true, expectError: false, @@ -366,7 +366,7 @@ func Test_Issue_126B(t *testing.T) { args: []string{ "-select-types", ".jpg", - myEnv["IMMICH_TESTFILES"] + "/burst/PXL6", + myEnv["IMMICHGO_TESTFILES"] + "/burst/PXL6", }, resetImmich: true, expectError: false, @@ -382,7 +382,7 @@ func Test_Issue_129(t *testing.T) { name: "Test_Issue_129", args: []string{ "-google-photos", - myEnv["IMMICH_TESTFILES"] + "/Weird file names #88", + myEnv["IMMICHGO_TESTFILES"] + "/Weird file names #88", }, resetImmich: true, expectError: false, @@ -400,7 +400,7 @@ func Test_Issue_128(t *testing.T) { name: "Test_Issue_128", args: []string{ "-google-photos", - myEnv["IMMICH_TESTFILES"] + "/Issue 128", + myEnv["IMMICHGO_TESTFILES"] + "/Issue 128", }, resetImmich: true, expectError: false, @@ -417,7 +417,7 @@ func Test_GP_MultiZip(t *testing.T) { name: "Test_Issue_128", args: []string{ "-google-photos", - myEnv["IMMICH_TESTFILES"] + "/google-photos/zip*.zip", + myEnv["IMMICHGO_TESTFILES"] + "/google-photos/zip*.zip", }, resetImmich: true, expectError: false, @@ -433,7 +433,7 @@ func Test_ExtensionsFromTheServer(t *testing.T) { name: "ExtensionsFromTheServer", args: []string{ // "-log-json", - myEnv["IMMICH_TESTFILES"] + "/low_high/high", + myEnv["IMMICHGO_TESTFILES"] + "/low_high/high", }, // resetImmich: true, @@ -495,7 +495,7 @@ func Test_CreateAlbumFolder_304(t *testing.T) { }, resetImmich: true, expectError: false, - changeCWD: myEnv["IMMICH_TESTFILES"] + "/Error Upload #304", + changeCWD: myEnv["IMMICHGO_TESTFILES"] + "/Error Upload #304", } runCase(t, tc) } @@ -511,7 +511,7 @@ func Test_CreateAlbumFolder_304_2(t *testing.T) { }, resetImmich: true, expectError: false, - changeCWD: myEnv["IMMICH_TESTFILES"] + "/Error Upload #304", + changeCWD: myEnv["IMMICHGO_TESTFILES"] + "/Error Upload #304", } runCase(t, tc) } @@ -523,7 +523,7 @@ func Test_EnrichedAlbum_297(t *testing.T) { name: "Test_EnrichedAlbum_297", args: []string{ "-google-photos", - myEnv["IMMICH_TESTFILES"] + "/#297 Album enrichis #329 #297/Album texts #287/takeout-20240613T094535Z-001.zip", + myEnv["IMMICHGO_TESTFILES"] + "/#297 Album enrichis #329 #297/Album texts #287/takeout-20240613T094535Z-001.zip", }, resetImmich: true, expectError: false, @@ -576,7 +576,7 @@ func Test_SmallTakeout_Better_p1(t *testing.T) { name: "Test_SmallTakeout_Better_p1", args: []string{ "-google-photos", - myEnv["IMMICH_TESTFILES"] + "/low_high/Takeout", + myEnv["IMMICHGO_TESTFILES"] + "/low_high/Takeout", }, resetImmich: true, expectError: false, @@ -591,7 +591,7 @@ func Test_SmallTakeout_Better_p2(t *testing.T) { tc := testCase{ name: "Test_SmallTakeout_Better_p2", args: []string{ - myEnv["IMMICH_TESTFILES"] + "/low_high/high", + myEnv["IMMICHGO_TESTFILES"] + "/low_high/high", }, resetImmich: false, expectError: false, @@ -608,7 +608,7 @@ func Test_MotionPictures_303_280(t *testing.T) { args: []string{ "-api-trace", "-google-photos", - myEnv["IMMICH_TESTFILES"] + "/Motion photo #303 #280/takeout-motion-test.zip", + myEnv["IMMICHGO_TESTFILES"] + "/Motion photo #303 #280/takeout-motion-test.zip", }, resetImmich: true, expectError: false, @@ -625,7 +625,7 @@ func Test_Duplicates_380(t *testing.T) { args: []string{ "-google-photos", "-debug-counters", - myEnv["IMMICH_TESTFILES"] + "/#380 duplicates in GP/Takeout*.zip", + myEnv["IMMICHGO_TESTFILES"] + "/#380 duplicates in GP/Takeout*.zip", }, resetImmich: true, expectError: false, diff --git a/app/cmd/upload/from-folder.go b/app/cmd/upload/from-folder.go new file mode 100644 index 00000000..d6359c3d --- /dev/null +++ b/app/cmd/upload/from-folder.go @@ -0,0 +1,56 @@ +package upload + +import ( + "context" + "errors" + "strings" + + "github.com/simulot/immich-go/adapters/folder" + "github.com/simulot/immich-go/app" + "github.com/simulot/immich-go/internal/filenames" + "github.com/simulot/immich-go/internal/fshelper" + "github.com/spf13/cobra" +) + +func NewFromFolderCommand(ctx context.Context, parent *cobra.Command, app *app.Application, upOptions *UploadOptions) *cobra.Command { + cmd := &cobra.Command{ + Use: "from-folder [flags] ...", + Short: "Upload photos from a folder", + Args: cobra.MinimumNArgs(1), + } + cmd.SetContext(ctx) + options := &folder.ImportFolderOptions{} + options.AddFromFolderFlags(cmd, parent) + + cmd.RunE = func(cmd *cobra.Command, args []string) error { //nolint:contextcheck + // ready to run + ctx := cmd.Context() + log := app.Log() + client := app.Client() + options.TZ = app.GetTZ() + + // parse arguments + fsyss, err := fshelper.ParsePath(args) + if err != nil { + return err + } + if len(fsyss) == 0 { + log.Message("No file found matching the pattern: %s", strings.Join(args, ",")) + return errors.New("No file found matching the pattern: " + strings.Join(args, ",")) + } + + // create the adapter for folders + options.SupportedMedia = client.Immich.SupportedMedia() + upOptions.Filters = append(upOptions.Filters, options.ManageBurst.GroupFilter(), options.ManageRawJPG.GroupFilter(), options.ManageHEICJPG.GroupFilter()) + + options.InfoCollector = filenames.NewInfoCollector(app.GetTZ(), options.SupportedMedia) + adapter, err := folder.NewLocalFiles(ctx, app.Jnl(), options, fsyss...) + if err != nil { + return err + } + + return newUpload(UpModeFolder, app, upOptions).run(ctx, adapter, app) + } + + return cmd +} diff --git a/app/cmd/upload/from-immich.go b/app/cmd/upload/from-immich.go new file mode 100644 index 00000000..2f486043 --- /dev/null +++ b/app/cmd/upload/from-immich.go @@ -0,0 +1,34 @@ +package upload + +import ( + "context" + + "github.com/simulot/immich-go/adapters/fromimmich" + "github.com/simulot/immich-go/app" + "github.com/spf13/cobra" +) + +func NewFromImmichCommand(ctx context.Context, parent *cobra.Command, app *app.Application, upOptions *UploadOptions) *cobra.Command { + cmd := &cobra.Command{ + Use: "from-immich [flags]", + Short: "Upload photos from another Immich server", + Args: cobra.MaximumNArgs(0), + } + cmd.SetContext(ctx) + options := &fromimmich.FromImmichFlags{} + options.AddFromImmichFlags(cmd, parent) + + cmd.RunE = func(cmd *cobra.Command, args []string) error { //nolint:contextcheck + // ready to run + ctx := cmd.Context() + + source, err := fromimmich.NewFromImmich(ctx, app, app.Jnl(), options) + if err != nil { + return err + } + + return newUpload(UpModeFolder, app, upOptions).run(ctx, source, app) + } + + return cmd +} diff --git a/app/cmd/upload/fromGooglePhotos.go b/app/cmd/upload/fromGooglePhotos.go new file mode 100644 index 00000000..9b731f3f --- /dev/null +++ b/app/cmd/upload/fromGooglePhotos.go @@ -0,0 +1,72 @@ +package upload + +import ( + "context" + "errors" + "path/filepath" + "strings" + + gp "github.com/simulot/immich-go/adapters/googlePhotos" + "github.com/simulot/immich-go/app" + "github.com/simulot/immich-go/internal/filenames" + "github.com/simulot/immich-go/internal/fshelper" + "github.com/spf13/cobra" +) + +func NewFromGooglePhotosCommand(ctx context.Context, parent *cobra.Command, app *app.Application, upOptions *UploadOptions) *cobra.Command { + cmd := &cobra.Command{ + Use: "from-google-photos [flags] | ", + Short: "Upload photos either from a zipped Google Photos takeout or decompressed archive", + Args: cobra.MinimumNArgs(1), + } + cmd.SetContext(ctx) + options := &gp.ImportFlags{} + options.AddFromGooglePhotosFlags(cmd, parent) + + cmd.RunE = func(cmd *cobra.Command, args []string) error { //nolint:contextcheck + ctx := cmd.Context() + log := app.Log() + client := app.Client() + + options.TZ = app.GetTZ() + + fsyss, err := fshelper.ParsePath(args) + if err != nil { + return err + } + if len(fsyss) == 0 { + log.Message("No file found matching the pattern: %s", strings.Join(args, ",")) + return errors.New("No file found matching the pattern: " + strings.Join(args, ",")) + } + + if options.TakeoutTag { + gotIt := false + for _, a := range args { + if filepath.Ext(a) == ".zip" { + options.TakeoutName = filepath.Base(a) + if len(options.TakeoutName) > 4+4 { + options.TakeoutName = "{takeout}/" + options.TakeoutName[:len(options.TakeoutName)-4-4] + gotIt = true + break + } + } + } + if !gotIt { + log.Message("Can't set the takeout tag: no .zip file in the arguments") + options.TakeoutTag = false + } + } + + upOptions.Filters = append(upOptions.Filters, options.ManageBurst.GroupFilter(), options.ManageRawJPG.GroupFilter(), options.ManageHEICJPG.GroupFilter()) + + options.SupportedMedia = client.Immich.SupportedMedia() + options.InfoCollector = filenames.NewInfoCollector(app.GetTZ(), options.SupportedMedia) + adapter, err := gp.NewTakeout(ctx, app.Jnl(), options, fsyss...) + if err != nil { + return err + } + return newUpload(UpModeGoogleTakeout, app, upOptions).setTakeoutOptions(options).run(ctx, adapter, app) + } + + return cmd +} diff --git a/cmd/upload/noui.go b/app/cmd/upload/noui.go similarity index 54% rename from cmd/upload/noui.go rename to app/cmd/upload/noui.go index 7fb81075..f081cf33 100644 --- a/cmd/upload/noui.go +++ b/app/cmd/upload/noui.go @@ -8,11 +8,13 @@ import ( "sync/atomic" "time" - "github.com/simulot/immich-go/helpers/fileevent" + "github.com/simulot/immich-go/app" + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/fileevent" "golang.org/x/sync/errgroup" ) -func (app *UpCmd) runNoUI(ctx context.Context) error { +func (upCmd *UpCmd) runNoUI(ctx context.Context, app *app.Application) error { ctx, cancel := context.WithCancelCause(ctx) defer cancel(nil) @@ -28,7 +30,7 @@ func (app *UpCmd) runNoUI(ctx context.Context) error { } progressString := func() string { - counts := app.Jnl.GetCounts() + counts := app.Jnl().GetCounts() defer func() { spinIdx++ if spinIdx == len(spinner) { @@ -42,23 +44,7 @@ func (app *UpCmd) runNoUI(ctx context.Context) error { immichPct = 100 } - if app.GooglePhotos { - gpTotal := app.Jnl.TotalAssets() - gpProcessed := app.Jnl.TotalProcessedGP() - - gpPercent := int(100 * gpProcessed / gpTotal) - upProcessed := int64(0) - if preparationDone.Load() { - upProcessed = app.Jnl.TotalProcessed(app.ForceUploadWhenNoJSON) - } - upTotal := app.Jnl.TotalAssets() - upPercent := 100 * upProcessed / upTotal - - return fmt.Sprintf("\rImmich read %d%%, Assets found: %d, Google Photos Analysis: %d%%, Upload errors: %d, Uploaded %d%% %s", - immichPct, app.Jnl.TotalAssets(), gpPercent, counts[fileevent.UploadServerError], upPercent, string(spinner[spinIdx])) - } - - return fmt.Sprintf("\rImmich read %d%%, Assets found: %d, Upload errors: %d, Uploaded %d %s", immichPct, app.Jnl.TotalAssets(), counts[fileevent.UploadServerError], counts[fileevent.Uploaded], string(spinner[spinIdx])) + return fmt.Sprintf("\rImmich read %d%%, Assets found: %d, Upload errors: %d, Uploaded %d %s", immichPct, app.Jnl().TotalAssets(), counts[fileevent.UploadServerError], counts[fileevent.Uploaded], string(spinner[spinIdx])) } uiGrp := errgroup.Group{} @@ -84,27 +70,26 @@ func (app *UpCmd) runNoUI(ctx context.Context) error { uiGrp.Go(func() error { processGrp := errgroup.Group{} + var groupChan chan *assets.Group + var err error processGrp.Go(func() error { // Get immich asset - err := app.getImmichAssets(ctx, immichUpdate) + err := upCmd.getImmichAssets(ctx, immichUpdate) if err != nil { cancel(err) } return err }) processGrp.Go(func() error { - return app.getImmichAlbums(ctx) + return upCmd.getImmichAlbums(ctx) }) processGrp.Go(func() error { // Run Prepare - err := app.browser.Prepare(ctx) - if err != nil { - cancel(err) - } + groupChan = upCmd.adapter.Browse(ctx) return err }) - err := processGrp.Wait() + err = processGrp.Wait() if err != nil { err := context.Cause(ctx) if err != nil { @@ -113,21 +98,17 @@ func (app *UpCmd) runNoUI(ctx context.Context) error { } } preparationDone.Store(true) - err = app.uploadLoop(ctx) + err = upCmd.uploadLoop(ctx, groupChan) if err != nil { cancel(err) } - counts := app.Jnl.GetCounts() + counts := app.Jnl().GetCounts() messages := strings.Builder{} if counts[fileevent.Error]+counts[fileevent.UploadServerError] > 0 { messages.WriteString("Some errors have occurred. Look at the log file for details\n") } - if app.GooglePhotos && counts[fileevent.AnalysisMissingAssociatedMetadata] > 0 && !app.ForceUploadWhenNoJSON { - messages.WriteString(fmt.Sprintf("\n%d JSON files are missing.\n", counts[fileevent.AnalysisMissingAssociatedMetadata])) - messages.WriteString("- Verify if all takeout parts have been included in the processing.\n") - messages.WriteString("- Request another takeout, either for one year at a time or in smaller increments.\n") - } + if messages.Len() > 0 { cancel(errors.New(messages.String())) } @@ -139,6 +120,6 @@ func (app *UpCmd) runNoUI(ctx context.Context) error { if err != nil { err = context.Cause(ctx) } - app.Jnl.Report() + app.Jnl().Report() return err } diff --git a/app/cmd/upload/run.go b/app/cmd/upload/run.go new file mode 100644 index 00000000..e4f82e15 --- /dev/null +++ b/app/cmd/upload/run.go @@ -0,0 +1,381 @@ +package upload + +import ( + "context" + "errors" + "fmt" + + "github.com/gdamore/tcell/v2" + "github.com/simulot/immich-go/adapters" + gp "github.com/simulot/immich-go/adapters/googlePhotos" + "github.com/simulot/immich-go/app" + "github.com/simulot/immich-go/immich" + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/fileevent" + "github.com/simulot/immich-go/internal/filters" +) + +type UpCmd struct { + Mode UpLoadMode + *UploadOptions + app *app.Application + + AssetIndex *AssetIndex // List of assets present on the server + deleteServerList []*immich.Asset // List of server assets to remove + + adapter adapters.Reader + DebugCounters bool // Enable CSV action counters per file + + Paths []string // Path to explore + albums map[string]assets.Album // Albums by title + + takeoutOptions *gp.ImportFlags +} + +func newUpload(mode UpLoadMode, app *app.Application, options *UploadOptions) *UpCmd { + upCmd := &UpCmd{ + UploadOptions: options, + app: app, + Mode: mode, + } + return upCmd +} + +func (upCmd *UpCmd) setTakeoutOptions(options *gp.ImportFlags) *UpCmd { + upCmd.takeoutOptions = options + return upCmd +} + +func (upCmd *UpCmd) run(ctx context.Context, adapter adapters.Reader, app *app.Application) error { + upCmd.adapter = adapter + + if upCmd.NoUI { + return upCmd.runNoUI(ctx, app) + } + _, err := tcell.NewScreen() + if err != nil { + upCmd.app.Log().Error("can't initialize the screen for the UI mode. Falling back to no-gui mode") + fmt.Println("can't initialize the screen for the UI mode. Falling back to no-gui mode") + return upCmd.runNoUI(ctx, app) + } + return upCmd.runUI(ctx, app) +} + +func (upCmd *UpCmd) getImmichAlbums(ctx context.Context) error { + serverAlbums, err := upCmd.app.Client().Immich.GetAllAlbums(ctx) + upCmd.albums = map[string]assets.Album{} + if err != nil { + return fmt.Errorf("can't get the album list from the server: %w", err) + } + for _, a := range serverAlbums { + select { + case <-ctx.Done(): + return ctx.Err() + default: + upCmd.albums[a.Title] = a + } + } + return nil +} + +func (upCmd *UpCmd) getImmichAssets(ctx context.Context, updateFn progressUpdate) error { + statistics, err := upCmd.app.Client().Immich.GetAssetStatistics(ctx) + if err != nil { + return err + } + totalOnImmich := statistics.Total + received := 0 + + var list []*immich.Asset + + err = upCmd.app.Client().Immich.GetAllAssetsWithFilter(ctx, nil, func(a *immich.Asset) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + received++ + list = append(list, a) + if updateFn != nil { + updateFn(received, totalOnImmich) + } + return nil + } + }) + if err != nil { + return err + } + if updateFn != nil { + updateFn(totalOnImmich, totalOnImmich) + } + upCmd.AssetIndex = &AssetIndex{ + assets: list, + } + upCmd.AssetIndex.ReIndex() + return nil +} + +func (upCmd *UpCmd) uploadLoop(ctx context.Context, groupChan chan *assets.Group) error { + var err error +assetLoop: + for { + select { + case <-ctx.Done(): + return ctx.Err() + + case g, ok := <-groupChan: + if !ok { + break assetLoop + } + err = upCmd.handleGroup(ctx, g) + if err != nil { + return err + } + } + } + + if len(upCmd.deleteServerList) > 0 { + ids := []string{} + for _, da := range upCmd.deleteServerList { + ids = append(ids, da.ID) + } + err := upCmd.DeleteServerAssets(ctx, ids) + if err != nil { + return fmt.Errorf("can't delete server's assets: %w", err) + } + } + + return err +} + +func (upCmd *UpCmd) handleGroup(ctx context.Context, g *assets.Group) error { + var errGroup error + + g = filters.ApplyFilters(g, upCmd.UploadOptions.Filters...) + + // discard rejected assets + for _, a := range g.Removed { + a.Asset.Close() + upCmd.app.Jnl().Record(ctx, fileevent.DiscoveredDiscarded, a.Asset.File, "reason", a.Reason) + } + + // Upload assets from the group + for _, a := range g.Assets { + err := upCmd.handleAsset(ctx, g, a) + errGroup = errors.Join(err) + } + + // Manage albums + if len(g.Albums) > 0 { + upCmd.manageGroupAlbums(ctx, g) + } + + // Manage groups + // after the filtering and the upload, we can stack the assets + + if len(g.Assets) > 1 && g.Grouping != assets.GroupByNone { + client := upCmd.app.Client().Immich.(immich.ImmichStackInterface) + ids := []string{g.Assets[g.CoverIndex].ID} + for i, a := range g.Assets { + upCmd.app.Jnl().Record(ctx, fileevent.Stacked, g.Assets[i].File) + if i != g.CoverIndex { + ids = append(ids, a.ID) + } + } + _, err := client.CreateStack(ctx, ids) + if err != nil { + upCmd.app.Jnl().Log().Error("Can't create stack", "error", err) + } + } + + if errGroup != nil { + return errGroup + } + + switch g.Grouping { + case assets.GroupByNone: + } + + return nil +} + +func (upCmd *UpCmd) handleAsset(ctx context.Context, g *assets.Group, a *assets.Asset) error { + defer func() { + a.Close() // Close and clean resources linked to the local asset + }() + + advice, err := upCmd.AssetIndex.ShouldUpload(a) + if err != nil { + return err + } + + switch advice.Advice { + case NotOnServer: // Upload and manage albums + err = upCmd.uploadAsset(ctx, a) + if err != nil { + return err + } + return upCmd.manageAssetTags(ctx, a) + case SmallerOnServer: // Upload, manage albums and delete the server's asset + upCmd.app.Jnl().Record(ctx, fileevent.UploadUpgraded, a, "reason", advice.Message) + + // Remember existing asset's albums, if any + for _, al := range advice.ServerAsset.Albums { + g.AddAlbum(assets.Album{ + Title: al.AlbumName, + Description: al.Description, + }) + } + + // Upload the superior asset + err = upCmd.uploadAsset(ctx, a) + if err != nil { + return err + } + err = upCmd.manageAssetTags(ctx, a) + if err != nil { + return err + } + + // delete the existing lower quality asset + err = upCmd.app.Client().Immich.DeleteAssets(ctx, []string{advice.ServerAsset.ID}, true) + if err != nil { + upCmd.app.Jnl().Record(ctx, fileevent.Error, nil, "error", err.Error()) + } + return err + + case SameOnServer: + a.ID = advice.ServerAsset.ID + for _, al := range advice.ServerAsset.Albums { + g.AddAlbum(assets.Album{ + Title: al.AlbumName, + Description: al.Description, + }) + } + upCmd.app.Jnl().Record(ctx, fileevent.UploadServerDuplicate, a.File, "reason", advice.Message) + // err = upCmd.manageAssetTags(ctx, a) + // if err != nil { + // return err + // } + + case BetterOnServer: // and manage albums + a.ID = advice.ServerAsset.ID + upCmd.app.Jnl().Record(ctx, fileevent.UploadServerBetter, a.File, "reason", advice.Message) + // err = upCmd.manageAssetTags(ctx, a) + // if err != nil { + // return err + // } + } + return nil +} + +// uploadAsset uploads the asset to the server. +// set the server's asset ID to the asset. +func (upCmd *UpCmd) uploadAsset(ctx context.Context, a *assets.Asset) error { + defer upCmd.app.Log().Debug("", "file", a) + ar, err := upCmd.app.Client().Immich.AssetUpload(ctx, a) + if err != nil { + upCmd.app.Jnl().Record(ctx, fileevent.UploadServerError, a.File, "error", err.Error()) + return err // Must signal the error to the caller + } + if ar.Status == immich.UploadDuplicate { + upCmd.app.Jnl().Record(ctx, fileevent.UploadServerDuplicate, a.File, "reason", "the server has this file") + } else { + upCmd.app.Jnl().Record(ctx, fileevent.Uploaded, a.File) + } + a.ID = ar.ID + + if a.Description != "" || (a.Latitude != 0 && a.Longitude != 0) || a.Rating != 0 || !a.CaptureDate.IsZero() { + _, err := upCmd.app.Client().Immich.UpdateAsset(ctx, a.ID, immich.UpdAssetField{ + Description: a.Description, + Latitude: a.Latitude, + Longitude: a.Longitude, + Rating: a.Rating, + DateTimeOriginal: a.CaptureDate, + }) + if err != nil { + upCmd.app.Jnl().Record(ctx, fileevent.UploadServerError, a.File, "error", err.Error()) + return err + } + } + return nil +} + +// manageGroupAlbums add the assets to the albums listed in the group. +// If an album does not exist, it is created. +// Errors are logged. +func (upCmd *UpCmd) manageGroupAlbums(ctx context.Context, g *assets.Group) { + assetIDs := []string{} + for _, a := range g.Assets { + assetIDs = append(assetIDs, a.ID) + } + + for _, album := range g.Albums { + title := album.Title + l, exist := upCmd.albums[title] + if !exist { + newAl, err := upCmd.app.Client().Immich.CreateAlbum(ctx, title, album.Description, assetIDs) + if err != nil { + upCmd.app.Jnl().Record(ctx, fileevent.Error, nil, "error", err) + } + upCmd.albums[title] = newAl + l = newAl + } else { + _, err := upCmd.app.Client().Immich.AddAssetToAlbum(ctx, l.ID, assetIDs) + if err != nil { + upCmd.app.Jnl().Record(ctx, fileevent.Error, nil, "error", err) + return + } + } + + // Log the action + for _, a := range g.Assets { + upCmd.app.Jnl().Record(ctx, fileevent.UploadAddToAlbum, a.File, "Album", title) + } + } +} + +func (upCmd *UpCmd) manageAssetTags(ctx context.Context, a *assets.Asset) error { // nolint + if len(a.Tags) > 0 { + ss := []string{} + for _, t := range a.Tags { + tags, err := upCmd.app.Client().Immich.UpsertTags(ctx, []string{t.Value}) + if err != nil { + upCmd.app.Jnl().Record(ctx, fileevent.Error, a.File, "error", err.Error()) + continue + } + for _, t := range tags { + _, err = upCmd.app.Client().Immich.TagAssets(ctx, t.ID, []string{a.ID}) + if err != nil { + upCmd.app.Jnl().Record(ctx, fileevent.Error, a.File, "error", err.Error()) + } + ss = append(ss, t.Value) + } + } + upCmd.app.Jnl().Record(ctx, fileevent.Tagged, a.File, "tags", ss) + } + return nil +} + +func (upCmd *UpCmd) DeleteServerAssets(ctx context.Context, ids []string) error { + upCmd.app.Log().Message("%d server assets to delete.", len(ids)) + return upCmd.app.Client().Immich.DeleteAssets(ctx, ids, false) +} + +/* +func (app *UpCmd) DeleteLocalAssets() error { + app.RootImmichFlags.Message(fmt.Sprintf("%d local assets to delete.", len(app.deleteLocalList))) + + for _, a := range app.deleteLocalList { + if !app.DryRun { + app.Log.Info(fmt.Sprintf("delete file %q", a.Title)) + err := a.Remove() + if err != nil { + return err + } + } else { + app.Log.Info(fmt.Sprintf("file %q not deleted, dry run mode.", a.Title)) + } + } + return nil +} +*/ diff --git a/cmd/upload/ui.go b/app/cmd/upload/ui.go similarity index 76% rename from cmd/upload/ui.go rename to app/cmd/upload/ui.go index 5172749f..30e95e87 100644 --- a/cmd/upload/ui.go +++ b/app/cmd/upload/ui.go @@ -4,8 +4,6 @@ import ( "context" "errors" "fmt" - "io" - "log/slog" "strings" "sync/atomic" "time" @@ -13,38 +11,49 @@ import ( "github.com/gdamore/tcell/v2" "github.com/navidys/tvxwidgets" "github.com/rivo/tview" - "github.com/simulot/immich-go/helpers/fileevent" + "github.com/simulot/immich-go/app" + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/fileevent" "golang.org/x/sync/errgroup" ) type uiPage struct { - // app *UpCmd - screen *tview.Grid - footer *tview.Grid - prepareCounts *tview.Grid - uploadCounts *tview.Grid - serverJobs *tvxwidgets.Sparkline - logView *tview.TextView - counts map[fileevent.Code]*tview.TextView - prevSlog *slog.Logger + screen *tview.Grid + footer *tview.Grid + prepareCounts *tview.Grid + uploadCounts *tview.Grid + serverJobs *tvxwidgets.Sparkline + logView *tview.TextView + counts map[fileevent.Code]*tview.TextView + + // server's activity history serverActivity []float64 - // prevLogFile io.WriteCloser + + // detect when the server is idling lastTimeServerActive atomic.Int64 + // gauges immichReading *tvxwidgets.PercentageModeGauge immichPrepare *tvxwidgets.PercentageModeGauge immichUpload *tvxwidgets.PercentageModeGauge - // page *tview.Application watchJobs bool - // quitting chan any } -func (app *UpCmd) runUI(ctx context.Context) error { +func (ui *uiPage) highJackLogger(app *app.Application) { + ui.logView.SetDynamicColors(true) + app.Jnl().SetLogger(app.Log().SetLogWriter(tview.ANSIWriter(ui.logView))) +} + +func (ui *uiPage) restoreLogger(app *app.Application) { + app.Jnl().SetLogger(app.Log().SetLogWriter(nil)) +} + +func (upCmd *UpCmd) runUI(ctx context.Context, app *app.Application) error { ctx, cancel := context.WithCancelCause(ctx) uiApp := tview.NewApplication() - ui := newUI(ctx, app) + ui := upCmd.newUI(ctx, app) defer cancel(nil) pages := tview.NewPages() @@ -69,7 +78,7 @@ func (app *UpCmd) runUI(ctx context.Context) error { uiApp.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey { switch event.Key() { case tcell.KeyCtrlQ, tcell.KeyCtrlC: - app.Log = ui.prevSlog + ui.restoreLogger(app) cancel(errors.New("interrupted: Ctrl+C or Ctrl+Q pressed")) case tcell.KeyEnter: if uploadDone.Load() { @@ -89,7 +98,7 @@ func (app *UpCmd) runUI(ctx context.Context) error { tick.Stop() return case <-tick.C: - jobs, err := app.Immich.GetJobs(ctx) + jobs, err := upCmd.app.Client().Immich.GetJobs(ctx) if err == nil { jobCount := 0 jobWaiting := 0 @@ -123,18 +132,18 @@ func (app *UpCmd) runUI(ctx context.Context) error { return case <-tick.C: uiApp.QueueUpdateDraw(func() { - counts := app.Jnl.GetCounts() + counts := app.Jnl().GetCounts() for c := range ui.counts { ui.getCountView(c, counts[c]) } - if app.GooglePhotos { - ui.immichPrepare.SetMaxValue(int(app.Jnl.TotalAssets())) - ui.immichPrepare.SetValue(int(app.Jnl.TotalProcessedGP())) + if upCmd.Mode == UpModeGoogleTakeout { + ui.immichPrepare.SetMaxValue(int(app.Jnl().TotalAssets())) + ui.immichPrepare.SetValue(int(app.Jnl().TotalProcessedGP())) if preparationDone.Load() { - ui.immichUpload.SetMaxValue(int(app.Jnl.TotalAssets())) + ui.immichUpload.SetMaxValue(int(app.Jnl().TotalAssets())) } - ui.immichUpload.SetValue(int(app.Jnl.TotalProcessed(app.ForceUploadWhenNoJSON))) + ui.immichUpload.SetValue(int(app.Jnl().TotalProcessed(upCmd.takeoutOptions.KeepJSONLess))) } }) } @@ -155,17 +164,19 @@ func (app *UpCmd) runUI(ctx context.Context) error { // start the processes uiGroup.Go(func() error { + var groupChan chan *assets.Group + var err error processGrp := errgroup.Group{} processGrp.Go(func() error { // Get immich asset - err := app.getImmichAssets(ctx, ui.updateImmichReading) + err = upCmd.getImmichAssets(ctx, ui.updateImmichReading) if err != nil { stopUI(err) } return err }) processGrp.Go(func() error { - err := app.getImmichAlbums(ctx) + err = upCmd.getImmichAlbums(ctx) if err != nil { stopUI(err) } @@ -173,35 +184,27 @@ func (app *UpCmd) runUI(ctx context.Context) error { }) processGrp.Go(func() error { // Run Prepare - err := app.browser.Prepare(ctx) - if err != nil { - stopUI(err) - } - return err + groupChan = upCmd.adapter.Browse(ctx) + return nil }) // Wait the end of the preparation: immich assets, albums and first browsing - err := processGrp.Wait() + err = processGrp.Wait() if err != nil { return context.Cause(ctx) } preparationDone.Store(true) // we can upload assets - err = app.uploadLoop(ctx) + err = upCmd.uploadLoop(ctx, groupChan) if err != nil { return context.Cause(ctx) } uploadDone.Store(true) - counts := app.Jnl.GetCounts() + counts := app.Jnl().GetCounts() if counts[fileevent.Error]+counts[fileevent.UploadServerError] > 0 { messages.WriteString("Some errors have occurred. Look at the log file for details\n") } - if app.GooglePhotos && counts[fileevent.AnalysisMissingAssociatedMetadata] > 0 && !app.ForceUploadWhenNoJSON { - messages.WriteString(fmt.Sprintf("\n%d JSON files are missing.\n", counts[fileevent.AnalysisMissingAssociatedMetadata])) - messages.WriteString("- Verify if all takeout parts have been included in the processing.\n") - messages.WriteString("- Request another takeout, either for one year at a time or in smaller increments.\n") - } modal := newModal(messages.String()) pages.AddPage("modal", modal, true, false) @@ -218,7 +221,7 @@ func (app *UpCmd) runUI(ctx context.Context) error { } // Time to leave - app.Jnl.Report() + app.Jnl().Report() if messages.Len() > 0 { return (errors.New(messages.String())) } @@ -247,14 +250,14 @@ func newModal(message string) tview.Primitive { return modal(text, 80, 2+lines) } -func newUI(ctx context.Context, app *UpCmd) *uiPage { +func (upCmd *UpCmd) newUI(ctx context.Context, a *app.Application) *uiPage { ui := &uiPage{ counts: map[fileevent.Code]*tview.TextView{}, } ui.screen = tview.NewGrid() - ui.screen.AddItem(tview.NewTextView().SetText(app.Banner.String()), 0, 0, 1, 1, 0, 0, false) + ui.screen.AddItem(tview.NewTextView().SetText(app.Banner()), 0, 0, 1, 1, 0, 0, false) ui.prepareCounts = tview.NewGrid() ui.prepareCounts.SetBorder(true).SetTitle("Input analysis") @@ -281,7 +284,7 @@ func newUI(ctx context.Context, app *UpCmd) *uiPage { ui.addCounter(ui.uploadCounts, 5, "Server has better quality", fileevent.UploadServerBetter) ui.uploadCounts.SetSize(6, 2, 1, 1).SetColumns(30, 10) - if _, err := app.Immich.GetJobs(ctx); err == nil { + if _, err := a.Client().Immich.GetJobs(ctx); err == nil { ui.watchJobs = true ui.serverJobs = tvxwidgets.NewSparkline() @@ -305,15 +308,8 @@ func newUI(ctx context.Context, app *UpCmd) *uiPage { // Hijack the log ui.logView = tview.NewTextView().SetMaxLines(100).ScrollToEnd() - ui.prevSlog = app.SharedFlags.Log + ui.highJackLogger(a) - if app.SharedFlags.LogWriterCloser != nil { - w := io.MultiWriter(app.SharedFlags.LogWriterCloser, ui.logView) - app.SetLogWriter(w) - } else { - app.SetLogWriter(ui.logView) - } - app.SharedFlags.Jnl.SetLogger(app.SharedFlags.Log) ui.logView.SetBorder(true).SetTitle("Log") ui.screen.AddItem(ui.logView, 2, 0, 1, 1, 0, 0, false) @@ -334,7 +330,8 @@ func newUI(ctx context.Context, app *UpCmd) *uiPage { ui.footer = tview.NewGrid() ui.footer.AddItem(tview.NewTextView().SetText("Immich content:").SetTextAlign(tview.AlignCenter), 0, 0, 1, 1, 0, 0, false).AddItem(ui.immichReading, 0, 1, 1, 1, 0, 0, false) - if app.GooglePhotos { + + if upCmd.Mode == UpModeGoogleTakeout { ui.footer.AddItem(tview.NewTextView().SetText("Google Photo puzzle:").SetTextAlign(tview.AlignCenter), 0, 2, 1, 1, 0, 0, false).AddItem(ui.immichPrepare, 0, 3, 1, 1, 0, 0, false) ui.footer.AddItem(tview.NewTextView().SetText("Uploading:").SetTextAlign(tview.AlignCenter), 0, 4, 1, 1, 0, 0, false).AddItem(ui.immichUpload, 0, 5, 1, 1, 0, 0, false) ui.footer.SetColumns(25, 0, 25, 0, 25, 0) @@ -351,25 +348,25 @@ func newUI(ctx context.Context, app *UpCmd) *uiPage { type progressUpdate func(value, maxValue int) // call back to get the progression -func (p *uiPage) updateImmichReading(value, total int) { +func (ui *uiPage) updateImmichReading(value, total int) { if value == 0 && total == 0 { total, value = 100, 100 } - p.immichReading.SetMaxValue(total) - p.immichReading.SetValue(value) + ui.immichReading.SetMaxValue(total) + ui.immichReading.SetValue(value) } -func (p *uiPage) getCountView(c fileevent.Code, count int64) *tview.TextView { - v, ok := p.counts[c] +func (ui *uiPage) getCountView(c fileevent.Code, count int64) *tview.TextView { + v, ok := ui.counts[c] if !ok { v = tview.NewTextView() - p.counts[c] = v + ui.counts[c] = v } v.SetText(fmt.Sprintf("%6d", count)) return v } -func (p *uiPage) addCounter(g *tview.Grid, row int, label string, counter fileevent.Code) { +func (ui *uiPage) addCounter(g *tview.Grid, row int, label string, counter fileevent.Code) { g.AddItem(tview.NewTextView().SetText(label), row, 0, 1, 1, 0, 0, false) - g.AddItem(p.getCountView(counter, 0), row, 1, 1, 1, 0, 0, false) + g.AddItem(ui.getCountView(counter, 0), row, 1, 1, 1, 0, 0, false) } diff --git a/app/cmd/upload/upload.go b/app/cmd/upload/upload.go new file mode 100644 index 00000000..feef4895 --- /dev/null +++ b/app/cmd/upload/upload.go @@ -0,0 +1,69 @@ +package upload + +import ( + "context" + "time" + + "github.com/simulot/immich-go/app" + "github.com/simulot/immich-go/internal/fileevent" + "github.com/simulot/immich-go/internal/filters" + "github.com/spf13/cobra" +) + +type UpLoadMode int + +const ( + UpModeGoogleTakeout UpLoadMode = iota + UpModeFolder +) + +func (m UpLoadMode) String() string { + switch m { + case UpModeGoogleTakeout: + return "Google Takeout" + case UpModeFolder: + return "Folder" + default: + return "Unknown" + } +} + +// UploadOptions represents a set of common flags used for filtering assets. +type UploadOptions struct { + // TODO place this option at the top + NoUI bool // Disable UI + + Filters []filters.Filter +} + +// NewUploadCommand adds the Upload command +func NewUploadCommand(ctx context.Context, a *app.Application) *cobra.Command { + options := &UploadOptions{} + cmd := &cobra.Command{ + Use: "upload", + Short: "Upload photos to an Immich server from various sources", + } + app.AddClientFlags(ctx, cmd, a) + cmd.TraverseChildren = true + cmd.PersistentFlags().BoolVar(&options.NoUI, "no-ui", false, "Disable the user interface") + cmd.PersistentPreRunE = app.ChainRunEFunctions(cmd.PersistentPreRunE, options.Open, ctx, cmd, a) + + cmd.AddCommand(NewFromFolderCommand(ctx, cmd, a, options)) + cmd.AddCommand(NewFromGooglePhotosCommand(ctx, cmd, a, options)) + cmd.AddCommand(NewFromImmichCommand(ctx, cmd, a, options)) + return cmd +} + +func (options *UploadOptions) Open(ctx context.Context, cmd *cobra.Command, app *app.Application) error { + // Initialize the Journal + if app.Jnl() == nil { + app.SetJnl(fileevent.NewRecorder(app.Log().Logger)) + } + app.SetTZ(time.Local) + if tz, err := cmd.Flags().GetString("time-zone"); err == nil { + if loc, err := time.LoadLocation(tz); err == nil { + app.SetTZ(loc) + } + } + return nil +} diff --git a/app/cmd/upload/upload.gono b/app/cmd/upload/upload.gono new file mode 100644 index 00000000..d9def501 --- /dev/null +++ b/app/cmd/upload/upload.gono @@ -0,0 +1,218 @@ +// Command Upload + +package upload + +import ( + "errors" + + "github.com/simulot/immich-go/adapters" + "github.com/simulot/immich-go/adapters/folder" + gp "github.com/simulot/immich-go/adapters/googlePhotos" + "github.com/simulot/immich-go/cmd" + "github.com/simulot/immich-go/immich" + "github.com/simulot/immich-go/internal/fileevent" + "github.com/spf13/cobra" +) + +type UpCmd struct { + UploadCmd *cobra.Command // The import command + Jnl *fileevent.Recorder // File event recorder + Root *cmd.RootImmichFlags // global flags + Server *cmd.ImmichServerFlags // server flags attached to the import command + *CommonFlags // Common flags between import sub-commands + UploadFolderFlags *folder.ImportFlags // Folder import flags + GooglePhotosFlags *gp.ImportFlags // Google Photos import flags + + AssetIndex *AssetIndex // List of assets present on the server + deleteServerList []*immich.Asset // List of server assets to remove + // deleteLocalList []*adapters.LocalAssetFile // List of local assets to remove + // stacks *stacking.StackBuilder + browser adapters.Adapter + DebugCounters bool // Enable CSV action counters per file + + // fsyss []fs.FS // pseudo file system to browse + Paths []string // Path to explore + albums map[string]immich.AlbumSimplified // Albums by title +} + +func AddCommand(root *cmd.RootImmichFlags) { + upCommand := &cobra.Command{ + Use: "upload", + Short: "upload photos and videos on the immich sever", + } + + upCommand.RunE = func(cmd *cobra.Command, args []string) error { + return errors.New("the upload command need a valid sub command") + } + root.Command.AddCommand(upCommand) + addFromFolderCommand(upCommand, root) + addFromGooglePhotosCommand(upCommand, root) +} + +/* + +func UploadCommand(ctx context.Context, common *cmd.RootImmichFlags, args []string) error { + app, err := newCommand(ctx, common, args, nil) + if err != nil { + return err + } + if len(app.fsyss) == 0 { + return nil + } + return app.run(ctx) +} + +type fsOpener func() ([]fs.FS, error) + +func newCommand(ctx context.Context, common *cmd.RootImmichFlags, args []string, fsOpener fsOpener) (*UpCmd, error) { + var err error + cmd := flag.NewFlagSet("upload", flag.ExitOnError) + + app := UpCmd{ + RootImmichFlags: common, + } + app.BannedFiles, err = namematcher.New( + `@eaDir/`, + `@__thumb/`, // QNAP + `SYNOFILE_THUMB_*.*`, // SYNOLOGY + `Lightroom Catalog/`, // LR + `thumbnails/`, // Android photo + `.DS_Store/`, // Mac OS custom attributes + ) + if err != nil { + return nil, err + } + + // app.RootImmichFlags.SetFlags(cmd) + cmd.BoolFunc( + "dry-run", + "display actions but don't touch source or destination", + myflag.BoolFlagFn(&app.DryRun, false)) + cmd.Var(&app.DateRange, + "date", + "Date of capture range.") + cmd.StringVar(&app.ImportIntoAlbum, + "album", + "", + "All assets will be added to this album.") + cmd.BoolFunc( + "create-album-folder", + " folder import only: Create albums for assets based on the parent folder", + myflag.BoolFlagFn(&app.CreateAlbumAfterFolder, false)) + cmd.BoolFunc( + "use-full-path-album-name", + " folder import only: Use the full path towards the asset for determining the Album name", + myflag.BoolFlagFn(&app.UseFullPathAsAlbumName, false)) + cmd.StringVar(&app.AlbumNamePathSeparator, + "album-name-path-separator", + " ", + " when use-full-path-album-name = true, determines how multiple (sub) folders, if any, will be joined") + cmd.BoolFunc( + "google-photos", + "Import GooglePhotos takeout zip files", + myflag.BoolFlagFn(&app.GooglePhotos, false)) + cmd.BoolFunc( + "create-albums", + " google-photos only: Create albums like there were in the source (default: TRUE)", + myflag.BoolFlagFn(&app.CreateAlbums, true)) + cmd.StringVar(&app.PartnerAlbum, + "partner-album", + "", + " google-photos only: Assets from partner will be added to this album. (ImportIntoAlbum, must already exist)") + cmd.BoolFunc( + "keep-partner", + " google-photos only: Import also partner's items (default: TRUE)", myflag.BoolFlagFn(&app.KeepPartner, true)) + cmd.StringVar(&app.ImportFromAlbum, + "from-album", + "", + " google-photos only: Import only from this album") + + cmd.BoolFunc( + "keep-untitled-albums", + " google-photos only: Keep Untitled albums and imports their contain (default: FALSE)", myflag.BoolFlagFn(&app.KeepUntitled, false)) + + cmd.BoolFunc( + "use-album-folder-as-name", + " google-photos only: Use folder name and ignore albums' title (default:FALSE)", myflag.BoolFlagFn(&app.UseFolderAsAlbumName, false)) + + cmd.BoolFunc( + "discard-archived", + " google-photos only: Do not import archived photos (default FALSE)", myflag.BoolFlagFn(&app.DiscardArchived, false)) + + cmd.BoolFunc( + "auto-archive", + " google-photos only: Automatically archive photos that are also archived in google photos (default TRUE)", myflag.BoolFlagFn(&app.AutoArchive, true)) + + cmd.BoolFunc( + "create-stacks", + "Stack jpg/raw or bursts (default FALSE)", myflag.BoolFlagFn(&app.CreateStacks, false)) + + cmd.BoolFunc( + "stack-jpg-raw", + "Control the stacking of jpg/raw photos (default TRUE)", myflag.BoolFlagFn(&app.StackJpgRaws, false)) + cmd.BoolFunc( + "stack-burst", + "Control the stacking bursts (default TRUE)", myflag.BoolFlagFn(&app.StackBurst, false)) + + // cmd.BoolVar(&app.Delete, "delete", false, "Delete local assets after upload") + + cmd.Var(&app.BrowserConfig.SelectExtensions, "select-types", "list of selected extensions separated by a comma") + cmd.Var(&app.BrowserConfig.ExcludeExtensions, "exclude-types", "list of excluded extensions separated by a comma") + + cmd.StringVar(&app.WhenNoDate, + "when-no-date", + "FILE", + " When the date of take can't be determined, use the FILE's date or the current time NOW. (default: FILE)") + + cmd.Var(&app.BannedFiles, "exclude-files", "Ignore files based on a pattern. Case insensitive. Add one option for each pattern do you need.") + + cmd.BoolVar(&app.ForceUploadWhenNoJSON, "upload-when-missing-JSON", app.ForceUploadWhenNoJSON, "when true, photos are upload even without associated JSON file.") + cmd.BoolVar(&app.DebugFileList, "debug-file-list", app.DebugFileList, "Check how the your file list would be processed") + + err = cmd.Parse(args) + if err != nil { + return nil, err + } + + if app.DebugFileList { + if len(cmd.Args()) < 2 { + return nil, fmt.Errorf("the option -debug-file-list requires a file name and a date format") + } + app.LogFile = strings.TrimSuffix(cmd.Arg(0), filepath.Ext(cmd.Arg(0))) + ".log" + _ = os.Remove(app.LogFile) + + fsOpener = func() ([]fs.FS, error) { + return fakefs.ScanFileList(cmd.Arg(0), cmd.Arg(1)) + } + } else { + } + + app.WhenNoDate = strings.ToUpper(app.WhenNoDate) + switch app.WhenNoDate { + case "FILE", "NOW": + default: + return nil, fmt.Errorf("the -when-no-date accepts FILE or NOW") + } + + app.BrowserConfig.Validate() + err = app.RootImmichFlags.Start(ctx) + if err != nil { + return nil, err + } + + if fsOpener == nil { + fsOpener = func() ([]fs.FS, error) { + return fshelper.ParsePath(cmd.Args()) + } + } + app.fsyss, err = fsOpener() + if err != nil { + return nil, err + } + if len(app.fsyss) == 0 { + fmt.Println("No file found matching the pattern: ", strings.Join(cmd.Args(), ",")) + app.Log.Info("No file found matching the pattern: " + strings.Join(cmd.Args(), ",")) + } + return &app, nil +} +*/ diff --git a/cmd/upload/upload_test.go b/app/cmd/upload/upload_test.nogo similarity index 99% rename from cmd/upload/upload_test.go rename to app/cmd/upload/upload_test.nogo index 9e548008..26204108 100644 --- a/cmd/upload/upload_test.go +++ b/app/cmd/upload/upload_test.nogo @@ -13,7 +13,7 @@ import ( "github.com/simulot/immich-go/browser" "github.com/simulot/immich-go/cmd" "github.com/simulot/immich-go/helpers/fileevent" - "github.com/simulot/immich-go/helpers/gen" + "github.com/simulot/immich-go/internal/gen" "github.com/simulot/immich-go/immich" ) @@ -534,7 +534,7 @@ func TestUpload(t *testing.T) { ctx := context.Background() log := slog.New(slog.NewTextHandler(io.Discard, nil)) - serv := cmd.SharedFlags{ + serv := cmd.RootImmichFlags{ Immich: ic, Jnl: fileevent.NewRecorder(log, false), Log: log, diff --git a/app/log.go b/app/log.go new file mode 100644 index 00000000..3eb60f7a --- /dev/null +++ b/app/log.go @@ -0,0 +1,179 @@ +package app + +import ( + "context" + "fmt" + "io" + "log/slog" + "os" + "strings" + "time" + + "github.com/phsym/console-slog" + slogmulti "github.com/samber/slog-multi" + "github.com/simulot/immich-go/internal/configuration" + "github.com/spf13/cobra" + "github.com/spf13/pflag" + "github.com/spf13/viper" +) + +type Log struct { + *slog.Logger // Logger + + Type string // Log format : text|json + File string // Log file name + Level string // Indicate the log level (string) + + sLevel slog.Level // the log level value + + mainWriter io.Writer // the log writer to file + consoleWriter io.Writer +} + +func AddLogFlags(ctx context.Context, cmd *cobra.Command, app *Application) { + log := app.Log() + cmd.PersistentFlags().StringVar(&log.Level, "log-level", "INFO", "Log level (DEBUG|INFO|WARN|ERROR), default INFO") + cmd.PersistentFlags().StringVarP(&log.File, "log-file", "l", "", "Write log messages into the file") + cmd.PersistentFlags().StringVar(&log.Type, "log-type", "text", "Log formatted as text of JSON file") + + cmd.PersistentPreRunE = ChainRunEFunctions(cmd.PersistentPreRunE, log.Open, ctx, cmd, app) + cmd.PersistentPostRunE = ChainRunEFunctions(cmd.PersistentPostRunE, log.Close, ctx, cmd, app) +} + +func (log *Log) OpenLogFile() error { + var w io.WriteCloser + + if log.File == "" { + log.File = configuration.DefaultLogFile() + } + if log.File != "" { + if log.mainWriter == nil { + err := configuration.MakeDirForFile(log.File) + if err != nil { + return err + } + w, err = os.OpenFile(log.File, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0o664) + if err != nil { + return err + } + err = log.sLevel.UnmarshalText([]byte(strings.ToUpper(log.Level))) + if err != nil { + return err + } + log.Message("Log file: %s", log.File) + } + } else { + w = os.Stdout + } + log.setHandlers(w, nil) + return nil +} + +func (log *Log) Open(ctx context.Context, cmd *cobra.Command, app *Application) error { + if cmd.Name() == "version" { + // No log for version command + return nil + } + fmt.Println(Banner()) + err := log.OpenLogFile() + if err != nil { + return err + } + // List flags + log.Info(GetVersion()) + + log.Info(fmt.Sprintf("Command: %s", cmd.Use)) + log.Info("Flags:") + cmd.Flags().VisitAll(func(flag *pflag.Flag) { + val := flag.Value.String() + if val == "" { + if v := viper.GetString(flag.Name); v != "" { + val = v + } + } + if flag.Name == "api-key" && len(val) > 4 { + val = strings.Repeat("*", len(val)-4) + val[len(val)-4:] + } + log.Info("", "--"+flag.Name, val) + }) + + // List arguments + log.Info("Arguments:") + for _, arg := range cmd.Flags().Args() { + log.Info(fmt.Sprintf(" %q", arg)) + } + return nil +} + +/* +func replaceAttr(groups []string, a slog.Attr) slog.Attr { + if a.Key == slog.LevelKey { + level := a.Value.Any().(slog.Level) + a.Value = slog.StringValue(fmt.Sprintf("%-7s", level.String())) + } + return a +} +*/ + +func (log *Log) setHandlers(file, con io.Writer) { + handlers := []slog.Handler{} + + log.mainWriter = file + if log.Type == "JSON" { + handlers = append(handlers, slog.NewJSONHandler(log.mainWriter, &slog.HandlerOptions{ + Level: log.sLevel, + })) + } else { + handlers = append(handlers, console.NewHandler(log.mainWriter, &console.HandlerOptions{ + // ReplaceAttr: replaceAttr, + Level: log.sLevel, + TimeFormat: time.DateTime, + NoColor: true, + Theme: console.NewDefaultTheme(), + })) + } + + log.consoleWriter = con + if log.consoleWriter != nil { + handlers = append(handlers, console.NewHandler(log.consoleWriter, &console.HandlerOptions{ + // ReplaceAttr: replaceAttr, + Level: log.sLevel, + TimeFormat: time.DateTime, + NoColor: false, + Theme: console.NewDefaultTheme(), + })) + } + + log.Logger = slog.New(slogmulti.Fanout(handlers...)) +} + +func (log *Log) SetLogWriter(w io.Writer) *slog.Logger { + log.setHandlers(log.mainWriter, w) + return log.Logger +} + +func (log *Log) Message(msg string, values ...any) { + s := fmt.Sprintf(msg, values...) + fmt.Println(s) + if log.Logger != nil { + log.Info(s) + } +} + +func (log *Log) Close(ctx context.Context, cmd *cobra.Command, app *Application) error { + if cmd.Name() == "version" { + // No log for version command + return nil + } + if log.File != "" { + log.Message("Check the log file: %s", log.File) + } + if closer, ok := log.mainWriter.(io.Closer); ok { + return closer.Close() + } + return nil +} + +func (log *Log) GetSLog() *slog.Logger { + return log.Logger +} diff --git a/app/version.go b/app/version.go new file mode 100644 index 00000000..2bf5c21e --- /dev/null +++ b/app/version.go @@ -0,0 +1,90 @@ +package app + +import ( + "context" + "fmt" + "runtime/debug" + "strings" + + "github.com/spf13/cobra" +) + +var ( + Version = "dev" + Commit = "none" + Date = "unknown" +) + +// initialize version and commit at the runtime +func init() { + dirty := false + buildvcs := false + + buildinfo, _ := debug.ReadBuildInfo() + for _, s := range buildinfo.Settings { + switch s.Key { + case "vcs.revision": + buildvcs = true + Commit = s.Value + case "vcs.modified": + if s.Value == "true" { + dirty = true + } + case "vcs.time": + Date = s.Value + } + } + if buildvcs && dirty { + Commit += "-dirty" + } +} + +// Banner Ascii art +// Generator : http://patorjk.com/software/taag-v1/ +// Font: Three point + +var _banner = []string{ + ". _ _ _ _ . _|_ _ _ ", + "|| | || | ||(_| | ─ (_|(_)", + " _) ", +} + +// String generate a string with new lines and place the given text on the latest line +func Banner() string { + const lenVersion = 20 + var text string + if Version != "" { + text = fmt.Sprintf("v %s", Version) + } + sb := strings.Builder{} + for i := range _banner { + if i == len(_banner)-1 && text != "" { + if len(text) >= lenVersion { + text = text[:lenVersion] + } + sb.WriteString(_banner[i][:lenVersion-len(text)] + text + _banner[i][lenVersion:]) + } else { + sb.WriteString(_banner[i]) + } + sb.WriteRune('\n') + } + return sb.String() +} + +func GetVersion() string { + return fmt.Sprintf("immich-go version:%s, commit:%s, date:%s", Version, Commit, Date) +} + +// NewUploadCommand adds the Upload command +func NewVersionCommand(ctx context.Context, app *Application) *cobra.Command { + cmd := &cobra.Command{ + Use: "version", + Short: "Give immich-go version", + } + + cmd.RunE = func(cmd *cobra.Command, args []string) error { + fmt.Println(GetVersion()) + return nil + } + return cmd +} diff --git a/browser/album.go b/browser/album.go deleted file mode 100644 index da2bbfde..00000000 --- a/browser/album.go +++ /dev/null @@ -1,8 +0,0 @@ -package browser - -type LocalAlbum struct { - Path string // As found in the files - Title string // either the directory base name, or metadata - Description string // As found in the metadata - Latitude, Longitude float64 // As found in the metadata -} diff --git a/browser/browser.go b/browser/browser.go deleted file mode 100644 index 299c8ffe..00000000 --- a/browser/browser.go +++ /dev/null @@ -1,10 +0,0 @@ -package browser - -import ( - "context" -) - -type Browser interface { - Prepare(cxt context.Context) error - Browse(cxt context.Context) chan *LocalAssetFile -} diff --git a/browser/files/localassets.go b/browser/files/localassets.go deleted file mode 100644 index 24df289e..00000000 --- a/browser/files/localassets.go +++ /dev/null @@ -1,317 +0,0 @@ -package files - -import ( - "context" - "io/fs" - "path" - "path/filepath" - "sort" - "strings" - "time" - - "github.com/simulot/immich-go/browser" - "github.com/simulot/immich-go/helpers/fileevent" - "github.com/simulot/immich-go/helpers/fshelper" - "github.com/simulot/immich-go/helpers/gen" - "github.com/simulot/immich-go/helpers/namematcher" - "github.com/simulot/immich-go/immich" - "github.com/simulot/immich-go/immich/metadata" -) - -type fileLinks struct { - image string - video string - sidecar string -} - -type LocalAssetBrowser struct { - fsyss []fs.FS - albums map[string]string - catalogs map[fs.FS]map[string][]string - log *fileevent.Recorder - sm immich.SupportedMedia - bannedFiles namematcher.List // list of file pattern to be exclude - whenNoDate string -} - -func NewLocalFiles(ctx context.Context, l *fileevent.Recorder, fsyss ...fs.FS) (*LocalAssetBrowser, error) { - return &LocalAssetBrowser{ - fsyss: fsyss, - albums: map[string]string{}, - catalogs: map[fs.FS]map[string][]string{}, - log: l, - whenNoDate: "FILE", - sm: immich.DefaultSupportedMedia, - }, nil -} - -func (la *LocalAssetBrowser) SetSupportedMedia(sm immich.SupportedMedia) *LocalAssetBrowser { - la.sm = sm - return la -} - -func (la *LocalAssetBrowser) SetBannedFiles(banned namematcher.List) *LocalAssetBrowser { - la.bannedFiles = banned - return la -} - -func (la *LocalAssetBrowser) SetWhenNoDate(opt string) *LocalAssetBrowser { - la.whenNoDate = opt - return la -} - -func (la *LocalAssetBrowser) Prepare(ctx context.Context) error { - for _, fsys := range la.fsyss { - err := la.passOneFsWalk(ctx, fsys) - if err != nil { - return err - } - } - - return nil -} - -func (la *LocalAssetBrowser) passOneFsWalk(ctx context.Context, fsys fs.FS) error { - la.catalogs[fsys] = map[string][]string{} - err := fs.WalkDir(fsys, ".", - func(name string, d fs.DirEntry, err error) error { - if err != nil { - return err - } - - if d.IsDir() { - la.catalogs[fsys][name] = []string{} - return nil - } - select { - case <-ctx.Done(): - // If the context has been cancelled, return immediately - return ctx.Err() - default: - dir, base := filepath.Split(name) - dir = strings.TrimSuffix(dir, "/") - if dir == "" { - dir = "." - } - ext := filepath.Ext(base) - mediaType := la.sm.TypeFromExt(ext) - - if mediaType == immich.TypeUnknown { - la.log.Record(ctx, fileevent.DiscoveredUnsupported, nil, name, "reason", "unsupported file type") - return nil - } - - cat := la.catalogs[fsys][dir] - - switch mediaType { - case immich.TypeImage: - la.log.Record(ctx, fileevent.DiscoveredImage, nil, name) - case immich.TypeVideo: - la.log.Record(ctx, fileevent.DiscoveredVideo, nil, name) - case immich.TypeSidecar: - la.log.Record(ctx, fileevent.DiscoveredSidecar, nil, name) - } - - if la.bannedFiles.Match(name) { - la.log.Record(ctx, fileevent.DiscoveredDiscarded, nil, name, "reason", "banned file") - return nil - } - la.catalogs[fsys][dir] = append(cat, name) - } - return nil - }) - return err -} - -func (la *LocalAssetBrowser) Browse(ctx context.Context) chan *browser.LocalAssetFile { - fileChan := make(chan *browser.LocalAssetFile) - // Browse all given FS to collect the list of files - go func(ctx context.Context) { - defer close(fileChan) - var err error - - errFn := func(name string, err error) { - if err != nil { - la.log.Record(ctx, fileevent.Error, nil, name, "error", err.Error()) - } - } - for _, fsys := range la.fsyss { - dirs := gen.MapKeys(la.catalogs[fsys]) - sort.Strings(dirs) - for _, dir := range dirs { - links := map[string]fileLinks{} - files := la.catalogs[fsys][dir] - - if len(files) == 0 { - continue - } - - // Scan images first - for _, file := range files { - ext := path.Ext(file) - if la.sm.TypeFromExt(ext) == immich.TypeImage { - linked := links[file] - linked.image = file - links[file] = linked - } - } - - next: - for _, file := range files { - ext := path.Ext(file) - t := la.sm.TypeFromExt(ext) - if t == immich.TypeImage { - continue next - } - - base := strings.TrimSuffix(file, ext) - switch t { - case immich.TypeSidecar: - if image, ok := links[base]; ok { - // file.ext.XMP -> file.ext - image.sidecar = file - links[base] = image - continue next - } - for f := range links { - if strings.TrimSuffix(f, path.Ext(f)) == base { - if image, ok := links[f]; ok { - // base.XMP -> base.ext - image.sidecar = file - links[f] = image - continue next - } - } - } - case immich.TypeVideo: - if image, ok := links[base]; ok { - // file.MP.ext -> file.ext - image.sidecar = file - links[base] = image - continue next - } - for f := range links { - if strings.TrimSuffix(f, path.Ext(f)) == base { - if image, ok := links[f]; ok { - // base.MP4 -> base.ext - image.video = file - links[f] = image - continue next - } - } - if strings.TrimSuffix(f, path.Ext(f)) == file { - if image, ok := links[f]; ok { - // base.MP4 -> base.ext - image.video = file - links[f] = image - continue next - } - } - } - // Unlinked video - links[file] = fileLinks{video: file} - } - } - - files = gen.MapKeys(links) - sort.Strings(files) - for _, file := range files { - var a *browser.LocalAssetFile - linked := links[file] - - if linked.image != "" { - a, err = la.assetFromFile(fsys, linked.image) - if err != nil { - errFn(linked.image, err) - return - } - if linked.video != "" { - a.LivePhoto, err = la.assetFromFile(fsys, linked.video) - if err != nil { - errFn(linked.video, err) - return - } - } - } else if linked.video != "" { - a, err = la.assetFromFile(fsys, linked.video) - if err != nil { - errFn(linked.video, err) - return - } - } - - if a != nil && linked.sidecar != "" { - a.SideCar = metadata.SideCarFile{ - FSys: fsys, - FileName: linked.sidecar, - } - la.log.Record(ctx, fileevent.AnalysisAssociatedMetadata, nil, linked.sidecar, "main", a.FileName) - } - select { - case <-ctx.Done(): - return - default: - if a != nil { - fileChan <- a - } - } - } - } - } - }(ctx) - - return fileChan -} - -var toOldDate = time.Date(1980, 1, 1, 0, 0, 0, 0, time.UTC) - -func (la *LocalAssetBrowser) assetFromFile(fsys fs.FS, name string) (*browser.LocalAssetFile, error) { - a := &browser.LocalAssetFile{ - FileName: name, - Title: filepath.Base(name), - FSys: fsys, - } - - fullPath := name - if fsys, ok := fsys.(fshelper.NameFS); ok { - fullPath = filepath.Join(fsys.Name(), name) - } - - a.Metadata.DateTaken = metadata.TakeTimeFromPath(fullPath) - - i, err := fs.Stat(fsys, name) - if err != nil { - return nil, err - } - a.FileSize = int(i.Size()) - if a.Metadata.DateTaken.IsZero() { - err = la.ReadMetadataFromFile(a) - if err != nil { - return nil, err - } - if a.Metadata.DateTaken.Before(toOldDate) { - switch la.whenNoDate { - case "FILE": - a.Metadata.DateTaken = i.ModTime() - case "NOW": - a.Metadata.DateTaken = time.Now() - } - } - } - return a, nil -} - -func (la *LocalAssetBrowser) ReadMetadataFromFile(a *browser.LocalAssetFile) error { - ext := strings.ToLower(path.Ext(a.FileName)) - - // Open the file - r, err := a.PartialSourceReader() - if err != nil { - return err - } - m, err := metadata.GetFromReader(r, ext) - if err == nil { - a.Metadata.DateTaken = m.DateTaken - } - return nil -} diff --git a/browser/files/localassets_test.go b/browser/files/localassets_test.go deleted file mode 100644 index 1313b4ec..00000000 --- a/browser/files/localassets_test.go +++ /dev/null @@ -1,150 +0,0 @@ -package files - -import ( - "context" - "errors" - "io/fs" - "path" - "reflect" - "testing" - - "github.com/kr/pretty" - "github.com/psanford/memfs" - "github.com/simulot/immich-go/helpers/fileevent" - "github.com/simulot/immich-go/helpers/namematcher" - "github.com/simulot/immich-go/immich" -) - -type inMemFS struct { - *memfs.FS - err error -} - -func newInMemFS() *inMemFS { - return &inMemFS{ - FS: memfs.New(), - } -} - -func (mfs *inMemFS) addFile(name string) *inMemFS { - if mfs.err != nil { - return mfs - } - dir := path.Dir(name) - mfs.err = errors.Join(mfs.err, mfs.MkdirAll(dir, 0o777)) - mfs.err = errors.Join(mfs.err, mfs.WriteFile(name, []byte(name), 0o777)) - return mfs -} - -func TestLocalAssets(t *testing.T) { - tc := []struct { - name string - fsys fs.FS - expected map[string]fileLinks - }{ - { - name: "simple", - fsys: newInMemFS(). - addFile("root_01.jpg"). - addFile("photos/photo_01.jpg"). - addFile("photos/photo_02.cr3"). - addFile("photos/photo_03.jpg"). - addFile("photos/summer 2023/20230801-001.jpg"). - addFile("photos/summer 2023/20230801-002.jpg"). - addFile("photos/summer 2023/20230801-003.cr3"). - addFile("@eaDir/thb1.jpg"). - addFile("photos/SYNOFILE_THUMB_0001.jpg"). - addFile("photos/summer 2023/.@__thumb/thb2.jpg"), - expected: map[string]fileLinks{ - "root_01.jpg": {image: "root_01.jpg"}, - "photos/photo_01.jpg": {image: "photos/photo_01.jpg"}, - "photos/photo_02.cr3": {image: "photos/photo_02.cr3"}, - "photos/photo_03.jpg": {image: "photos/photo_03.jpg"}, - "photos/summer 2023/20230801-001.jpg": {image: "photos/summer 2023/20230801-001.jpg"}, - "photos/summer 2023/20230801-002.jpg": {image: "photos/summer 2023/20230801-002.jpg"}, - "photos/summer 2023/20230801-003.cr3": {image: "photos/summer 2023/20230801-003.cr3"}, - }, - }, - { - name: "motion picture", - fsys: newInMemFS(). - addFile("motion/PXL_20210102_221126856.MP~2"). - addFile("motion/PXL_20210102_221126856.MP~2.jpg"). - addFile("motion/PXL_20210102_221126856.MP.jpg"). - addFile("motion/PXL_20210102_221126856.MP"). - addFile("motion/20231227_152817.jpg"). - addFile("motion/20231227_152817.MP4"), - expected: map[string]fileLinks{ - "motion/PXL_20210102_221126856.MP.jpg": {image: "motion/PXL_20210102_221126856.MP.jpg", video: "motion/PXL_20210102_221126856.MP"}, - "motion/PXL_20210102_221126856.MP~2.jpg": {image: "motion/PXL_20210102_221126856.MP~2.jpg", video: "motion/PXL_20210102_221126856.MP~2"}, - "motion/20231227_152817.jpg": {image: "motion/20231227_152817.jpg", video: "motion/20231227_152817.MP4"}, - }, - }, - { - name: "sidecar", - fsys: newInMemFS(). - addFile("root_01.jpg"). - addFile("root_01.XMP"). - addFile("root_02.jpg"). - addFile("root_02.jpg.XMP"). - addFile("video_01.mp4"). - addFile("video_01.mp4.XMP"). - addFile("root_03.MP.jpg"). - addFile("root_03.MP.jpg.XMP"). - addFile("root_03.MP"), - expected: map[string]fileLinks{ - "root_01.jpg": {image: "root_01.jpg", sidecar: "root_01.XMP"}, - "root_02.jpg": {image: "root_02.jpg", sidecar: "root_02.jpg.XMP"}, - "root_03.MP.jpg": {image: "root_03.MP.jpg", sidecar: "root_03.MP.jpg.XMP", video: "root_03.MP"}, - "video_01.mp4": {video: "video_01.mp4", sidecar: "video_01.mp4.XMP"}, - }, - }, - } - - for _, c := range tc { - t.Run(c.name, func(t *testing.T) { - fsys := c.fsys - ctx := context.Background() - - b, err := NewLocalFiles(ctx, fileevent.NewRecorder(nil, false), fsys) - if err != nil { - t.Error(err) - } - l, err := namematcher.New(`@eaDir/`, `.@__thumb`, `SYNOFILE_THUMB_*.*`) - if err != nil { - t.Error(err) - } - b.SetBannedFiles(l) - b.SetSupportedMedia(immich.DefaultSupportedMedia) - b.SetWhenNoDate("FILE") - - err = b.Prepare(ctx) - if err != nil { - t.Error(err) - } - - results := map[string]fileLinks{} - for a := range b.Browse(ctx) { - links := fileLinks{} - ext := path.Ext(a.FileName) - if b.sm.TypeFromExt(ext) == immich.TypeImage { - links.image = a.FileName - if a.LivePhoto != nil { - links.video = a.LivePhoto.FileName - } - } else { - links.video = a.FileName - } - if a.SideCar.FileName != "" { - links.sidecar = a.SideCar.FileName - } - results[a.FileName] = links - } - - if !reflect.DeepEqual(results, c.expected) { - t.Errorf("difference\n") - pretty.Ldiff(t, c.expected, results) - } - }) - } -} diff --git a/browser/gp/googlephotos.go b/browser/gp/googlephotos.go deleted file mode 100644 index 14705a9c..00000000 --- a/browser/gp/googlephotos.go +++ /dev/null @@ -1,639 +0,0 @@ -package gp - -import ( - "context" - "io/fs" - "path" - "path/filepath" - "sort" - "strings" - "unicode/utf8" - - "github.com/simulot/immich-go/browser" - "github.com/simulot/immich-go/helpers/fileevent" - "github.com/simulot/immich-go/helpers/fshelper" - "github.com/simulot/immich-go/helpers/gen" - "github.com/simulot/immich-go/helpers/namematcher" - "github.com/simulot/immich-go/immich" - "github.com/simulot/immich-go/immich/metadata" -) - -type Takeout struct { - fsyss []fs.FS - catalogs map[string]directoryCatalog // file catalogs by directory in the set of the all takeout parts - albums map[string]browser.LocalAlbum // track album names by folder - log *fileevent.Recorder - sm immich.SupportedMedia - - banned namematcher.List // Banned files - acceptMissingJSON bool -} - -// directoryCatalog captures all files in a given directory -type directoryCatalog struct { - jsons map[string]*GoogleMetaData // JSONs in the catalog by base name - unMatchedFiles map[string]*assetFile // files to be matched map by base name - matchedFiles map[string]*assetFile // files matched by base name -} - -// assetFile keep information collected during pass one -type assetFile struct { - fsys fs.FS // Remember in which part of the archive the the file - base string // Remember the original file name - length int // file length in bytes - md *GoogleMetaData // will point to the associated metadata -} - -func NewTakeout(ctx context.Context, l *fileevent.Recorder, sm immich.SupportedMedia, fsyss ...fs.FS) (*Takeout, error) { - to := Takeout{ - fsyss: fsyss, - catalogs: map[string]directoryCatalog{}, - albums: map[string]browser.LocalAlbum{}, - log: l, - sm: sm, - } - - return &to, nil -} - -func (to *Takeout) SetBannedFiles(banned namematcher.List) *Takeout { - to.banned = banned - return to -} - -func (to *Takeout) SetAcceptMissingJSON(flag bool) *Takeout { - to.acceptMissingJSON = flag - return to -} - -// Prepare scans all files in all walker to build the file catalog of the archive -// metadata files content is read and kept - -func (to *Takeout) Prepare(ctx context.Context) error { - for _, w := range to.fsyss { - err := to.passOneFsWalk(ctx, w) - if err != nil { - return err - } - } - err := to.solvePuzzle(ctx) - return err -} - -func (to *Takeout) passOneFsWalk(ctx context.Context, w fs.FS) error { - err := fs.WalkDir(w, ".", func(name string, d fs.DirEntry, err error) error { - if err != nil { - return err - } - - select { - case <-ctx.Done(): - return ctx.Err() - default: - - if d.IsDir() { - return nil - } - - dir, base := path.Split(name) - dir = strings.TrimSuffix(dir, "/") - ext := strings.ToLower(path.Ext(base)) - - dirCatalog, ok := to.catalogs[dir] - if !ok { - dirCatalog.jsons = map[string]*GoogleMetaData{} - dirCatalog.unMatchedFiles = map[string]*assetFile{} - dirCatalog.matchedFiles = map[string]*assetFile{} - } - if _, ok := dirCatalog.unMatchedFiles[base]; ok { - to.log.Record(ctx, fileevent.AnalysisLocalDuplicate, nil, name) - return nil - } - - finfo, err := d.Info() - if err != nil { - to.log.Record(ctx, fileevent.Error, nil, name, "error", err.Error()) - return err - } - switch ext { - case ".json": - md, err := fshelper.ReadJSON[GoogleMetaData](w, name) - if err == nil { - switch { - case md.isAsset(): - md.foundInPaths = append(md.foundInPaths, dir) - dirCatalog.jsons[base] = md - to.log.Record(ctx, fileevent.DiscoveredSidecar, nil, name, "type", "asset metadata", "title", md.Title) - case md.isAlbum(): - a := to.albums[dir] - a.Title = md.Title - a.Path = filepath.Base(dir) - if e := md.Enrichments; e != nil { - a.Description = e.Text - a.Latitude = e.Latitude - a.Longitude = e.Longitude - } - to.albums[dir] = a - to.log.Record(ctx, fileevent.DiscoveredSidecar, nil, name, "type", "album metadata", "title", md.Title) - default: - to.log.Record(ctx, fileevent.DiscoveredUnsupported, nil, name, "reason", "unknown JSONfile") - return nil - } - } else { - to.log.Record(ctx, fileevent.DiscoveredUnsupported, nil, name, "reason", "unknown JSONfile") - return nil - } - default: - t := to.sm.TypeFromExt(ext) - switch t { - case immich.TypeUnknown: - to.log.Record(ctx, fileevent.DiscoveredUnsupported, nil, name, "reason", "unsupported file type") - return nil - case immich.TypeVideo: - to.log.Record(ctx, fileevent.DiscoveredVideo, nil, name) - if strings.Contains(name, "Failed Videos") { - to.log.Record(ctx, fileevent.DiscoveredDiscarded, nil, name, "reason", "can't upload failed videos") - return nil - } - case immich.TypeImage: - to.log.Record(ctx, fileevent.DiscoveredImage, nil, name) - } - - if to.banned.Match(name) { - to.log.Record(ctx, fileevent.DiscoveredDiscarded, nil, name, "reason", "banned file") - return nil - } - - dirCatalog.unMatchedFiles[base] = &assetFile{ - fsys: w, - base: base, - length: int(finfo.Size()), - } - } - to.catalogs[dir] = dirCatalog - return nil - } - }) - return err -} - -// solvePuzzle prepares metadata with information collected during pass one for each accepted files -// -// JSON files give important information about the relative photos / movies: -// - The original name (useful when it as been truncated) -// - The date of capture (useful when the files doesn't have this date) -// - The GPS coordinates (will be useful in a future release) -// -// Each JSON is checked. JSON is duplicated in albums folder. -// --Associated files with the JSON can be found in the JSON's folder, or in the Year photos.-- -// ++JSON and files are located in the same folder -/// -// Once associated and sent to the main program, files are tagged for not been associated with an other one JSON. -// Association is done with the help of a set of matcher functions. Each one implement a rule -// -// 1 JSON can be associated with 1+ files that have a part of their name in common. -// - the file is named after the JSON name -// - the file name can be 1 UTF-16 char shorter (🤯) than the JSON name -// - the file name is longer than 46 UTF-16 chars (🤯) is truncated. But the truncation can creates duplicates, then a number is added. -// - if there are several files with same original name, the first instance kept as it is, the next has a sequence number. -// File is renamed as IMG_1234(1).JPG and the JSON is renamed as IMG_1234.JPG(1).JSON -// - of course those rules are likely to collide. They have to be applied from the most common to the least one. -// - sometimes the file isn't in the same folder than the json... It can be found in Year's photos folder -// -// --The duplicates files (same name, same length in bytes) found in the local source are discarded before been presented to the immich server. -// ++ Duplicates are presented to the next layer to allow the album handling -// -// To solve the puzzle, each directory is checked with all matchers in the order of the most common to the least. - -type matcherFn func(jsonName string, fileName string, sm immich.SupportedMedia) bool - -// matchers is a list of matcherFn from the most likely to be used to the least one -var matchers = []struct { - name string - fn matcherFn -}{ - {name: "normalMatch", fn: normalMatch}, - {name: "livePhotoMatch", fn: livePhotoMatch}, - {name: "matchWithOneCharOmitted", fn: matchWithOneCharOmitted}, - {name: "matchVeryLongNameWithNumber", fn: matchVeryLongNameWithNumber}, - {name: "matchDuplicateInYear", fn: matchDuplicateInYear}, - {name: "matchEditedName", fn: matchEditedName}, - {name: "matchForgottenDuplicates", fn: matchForgottenDuplicates}, -} - -func (to *Takeout) solvePuzzle(ctx context.Context) error { - dirs := gen.MapKeys(to.catalogs) - sort.Strings(dirs) - for _, dir := range dirs { - cat := to.catalogs[dir] - jsons := gen.MapKeys(cat.jsons) - sort.Strings(jsons) - for _, matcher := range matchers { - for _, json := range jsons { - md := cat.jsons[json] - for f := range cat.unMatchedFiles { - select { - case <-ctx.Done(): - return ctx.Err() - default: - if matcher.fn(json, f, to.sm) { - i := cat.unMatchedFiles[f] - i.md = md - cat.matchedFiles[f] = i - to.log.Record(ctx, fileevent.AnalysisAssociatedMetadata, cat.unMatchedFiles[f], filepath.Join(dir, f), "json", json, "size", i.length, "matcher", matcher.name) - delete(cat.unMatchedFiles, f) - } - } - } - } - } - to.catalogs[dir] = cat - files := gen.MapKeys(cat.unMatchedFiles) - sort.Strings(files) - for _, f := range files { - to.log.Record(ctx, fileevent.AnalysisMissingAssociatedMetadata, f, filepath.Join(dir, f)) - if to.acceptMissingJSON { - cat.matchedFiles[f] = cat.unMatchedFiles[f] - delete(cat.unMatchedFiles, f) - } else { - } - } - } - return nil -} - -// normalMatch -// -// PXL_20230922_144936660.jpg.json -// PXL_20230922_144936660.jpg -func normalMatch(jsonName string, fileName string, sm immich.SupportedMedia) bool { - base := strings.TrimSuffix(jsonName, path.Ext(jsonName)) - return base == fileName -} - -// livePhotoMatch -// 20231227_152817.jpg.json -// 20231227_152817.MP4 -// -// PXL_20231118_035751175.MP.jpg.json -// PXL_20231118_035751175.MP.jpg -// PXL_20231118_035751175.MP -func livePhotoMatch(jsonName string, fileName string, sm immich.SupportedMedia) bool { - fileExt := path.Ext(fileName) - fileName = strings.TrimSuffix(fileName, fileExt) - base := strings.TrimSuffix(jsonName, path.Ext(jsonName)) - base = strings.TrimSuffix(base, path.Ext(base)) - if base == fileName { - return true - } - base = strings.TrimSuffix(base, path.Ext(base)) - return base == fileName -} - -// matchWithOneCharOmitted -// -// PXL_20230809_203449253.LONG_EXPOSURE-02.ORIGIN.json -// PXL_20230809_203449253.LONG_EXPOSURE-02.ORIGINA.jpg -// -// 05yqt21kruxwwlhhgrwrdyb6chhwszi9bqmzu16w0 2.jp.json <-- match also with LivePhoto matcher -// 05yqt21kruxwwlhhgrwrdyb6chhwszi9bqmzu16w0 2.jpg -// -// 😀😃😄😁😆😅😂🤣🥲☺️😊😇🙂🙃😉😌😍🥰😘😗😙😚😋.json -// 😀😃😄😁😆😅😂🤣🥲☺️😊😇🙂🙃😉😌😍🥰😘😗😙😚😋😛.jpg - -func matchWithOneCharOmitted(jsonName string, fileName string, sm immich.SupportedMedia) bool { - baseJSON := strings.TrimSuffix(jsonName, path.Ext(jsonName)) - ext := path.Ext(baseJSON) - if sm.IsExtensionPrefix(ext) { - baseJSON = strings.TrimSuffix(baseJSON, ext) - } - fileName = strings.TrimSuffix(fileName, path.Ext(fileName)) - if fileName == baseJSON { - return true - } - if strings.HasPrefix(fileName, baseJSON) { - a, b := utf8.RuneCountInString(fileName), utf8.RuneCountInString(baseJSON) - if a-b <= 1 { - return true - } - } - return false -} - -// matchVeryLongNameWithNumber -// -// Backyard_ceremony_wedding_photography_xxxxxxx_(494).json -// Backyard_ceremony_wedding_photography_xxxxxxx_m(494).jpg -func matchVeryLongNameWithNumber(jsonName string, fileName string, sm immich.SupportedMedia) bool { - jsonName = strings.TrimSuffix(jsonName, path.Ext(jsonName)) - - p1JSON := strings.Index(jsonName, "(") - if p1JSON < 0 { - return false - } - p2JSON := strings.Index(jsonName, ")") - if p2JSON < 0 || p2JSON != len(jsonName)-1 { - return false - } - p1File := strings.Index(fileName, "(") - if p1File < 0 || p1File != p1JSON+1 { - return false - } - if jsonName[:p1JSON] != fileName[:p1JSON] { - return false - } - p2File := strings.Index(fileName, ")") - return jsonName[p1JSON+1:p2JSON] == fileName[p1File+1:p2File] -} - -// matchDuplicateInYear -// -// IMG_3479.JPG(2).json -// IMG_3479(2).JPG -// - -// Fast implementation, but does't work with live photos -func matchDuplicateInYear(jsonName string, fileName string, sm immich.SupportedMedia) bool { - jsonName = strings.TrimSuffix(jsonName, path.Ext(jsonName)) - p1JSON := strings.Index(jsonName, "(") - if p1JSON < 1 { - return false - } - p1File := strings.Index(fileName, "(") - if p1File < 0 { - return false - } - jsonExt := path.Ext(jsonName[:p1JSON]) - - p2JSON := strings.Index(jsonName, ")") - if p2JSON < 0 || p2JSON != len(jsonName)-1 { - return false - } - - p2File := strings.Index(fileName, ")") - if p2File < 0 || p2File < p1File { - return false - } - - fileExt := path.Ext(fileName) - - if fileExt != jsonExt { - return false - } - - jsonBase := strings.TrimSuffix(jsonName[:p1JSON], path.Ext(jsonName[:p1JSON])) - - if jsonBase != fileName[:p1File] { - return false - } - - if fileName[p1File+1:p2File] != jsonName[p1JSON+1:p2JSON] { - return false - } - - return true -} - -/* -// Regexp implementation, work with live photos, 10 times slower -var ( - reDupInYearJSON = regexp.MustCompile(`(.*)\.(.{2,4})\((\d+)\)\..{2,4}$`) - reDupInYearFile = regexp.MustCompile(`(.*)\((\d+)\)\..{2,4}$`) -) - -func matchDuplicateInYear(jsonName string, fileName string, sm immich.SupportedMedia) bool { - mFile := reDupInYearFile.FindStringSubmatch(fileName) - if len(mFile) < 3 { - return false - } - mJSON := reDupInYearJSON.FindStringSubmatch(jsonName) - if len(mJSON) < 4 { - return false - } - if mFile[1] == mJSON[1] && mFile[2] == mJSON[3] { - return true - } - return false -} -*/ - -// matchEditedName -// PXL_20220405_090123740.PORTRAIT.jpg.json -// PXL_20220405_090123740.PORTRAIT.jpg -// PXL_20220405_090123740.PORTRAIT-modifié.jpg - -func matchEditedName(jsonName string, fileName string, sm immich.SupportedMedia) bool { - base := strings.TrimSuffix(jsonName, path.Ext(jsonName)) - ext := path.Ext(base) - if ext != "" { - if sm.IsMedia(ext) { - base := strings.TrimSuffix(base, ext) - fname := strings.TrimSuffix(fileName, path.Ext(fileName)) - return strings.HasPrefix(fname, base) - } - } - return false -} - -// TODO: This one interferes with matchVeryLongNameWithNumber - -// matchForgottenDuplicates -// "original_1d4caa6f-16c6-4c3d-901b-9387de10e528_.json" -// original_1d4caa6f-16c6-4c3d-901b-9387de10e528_P.jpg -// original_1d4caa6f-16c6-4c3d-901b-9387de10e528_P(1).jpg - -func matchForgottenDuplicates(jsonName string, fileName string, sm immich.SupportedMedia) bool { - jsonName = strings.TrimSuffix(jsonName, path.Ext(jsonName)) - fileName = strings.TrimSuffix(fileName, path.Ext(fileName)) - if strings.HasPrefix(fileName, jsonName) { - a, b := utf8.RuneCountInString(jsonName), utf8.RuneCountInString(fileName) - if b-a < 10 { - return true - } - } - return false -} - -// Browse return a channel of assets -// -// Walkers are rewind, and scanned again -// each file net yet sent to immich is sent with associated metadata - -func (to *Takeout) Browse(ctx context.Context) chan *browser.LocalAssetFile { - assetChan := make(chan *browser.LocalAssetFile) - - go func() { - defer close(assetChan) - dirs := gen.MapKeys(to.catalogs) - sort.Strings(dirs) - for _, dir := range dirs { - if len(to.catalogs[dir].matchedFiles) > 0 { - err := to.passTwo(ctx, dir, assetChan) - if err != nil { - assetChan <- &browser.LocalAssetFile{Err: err} - } - } - } - }() - return assetChan -} - -// detect livephotos and motion pictures -// 1. get all pictures -// 2. scan vidoes, if a picture matches, this is a live photo -func (to *Takeout) passTwo(ctx context.Context, dir string, assetChan chan *browser.LocalAssetFile) error { - catalog := to.catalogs[dir] - - linkedFiles := map[string]struct { - video *assetFile - image *assetFile - }{} - - // Scan pictures - for _, f := range gen.MapKeys(catalog.matchedFiles) { - ext := path.Ext(f) - if to.sm.TypeFromExt(ext) == immich.TypeImage { - linked := linkedFiles[f] - linked.image = catalog.matchedFiles[f] - linkedFiles[f] = linked - } - } - - // Scan videos -nextVideo: - for _, f := range gen.MapKeys(catalog.matchedFiles) { - fExt := path.Ext(f) - if to.sm.TypeFromExt(fExt) == immich.TypeVideo { - name := strings.TrimSuffix(f, fExt) - for i, linked := range linkedFiles { - if linked.image == nil { - continue - } - if linked.image != nil && linked.video != nil { - continue - } - p := linked.image.base - ext := path.Ext(p) - p = strings.TrimSuffix(p, ext) - ext = path.Ext(p) - if strings.ToUpper(ext) == ".MP" || strings.HasPrefix(strings.ToUpper(ext), ".MP~") { - if fExt != ext { - continue - } - p = strings.TrimSuffix(p, ext) - } - if p == name { - linked.video = catalog.matchedFiles[f] - linkedFiles[i] = linked - continue nextVideo - } - } - linked := linkedFiles[f] - linked.video = catalog.matchedFiles[f] - linkedFiles[f] = linked - } - } - - for _, base := range gen.MapKeys(linkedFiles) { - var a *browser.LocalAssetFile - var err error - - linked := linkedFiles[base] - - if linked.image != nil { - a, err = to.makeAsset(linked.image.md, linked.image.fsys, path.Join(dir, linked.image.base)) - if err != nil { - to.log.Record(ctx, fileevent.Error, nil, path.Join(dir, linked.image.base), "error", err.Error()) - continue - } - if linked.video != nil { - i, err := to.makeAsset(linked.video.md, linked.video.fsys, path.Join(dir, linked.video.base)) - if err != nil { - to.log.Record(ctx, fileevent.Error, nil, path.Join(dir, linked.video.base), "error", err.Error()) - } else { - a.LivePhoto = i - } - } - } else { - a, err = to.makeAsset(linked.video.md, linked.video.fsys, path.Join(dir, linked.video.base)) - if err != nil { - to.log.Record(ctx, fileevent.Error, nil, path.Join(dir, linked.video.base), "error", err.Error()) - continue - } - } - select { - case <-ctx.Done(): - return ctx.Err() - default: - assetChan <- a - } - } - return nil -} - -// makeAsset makes a localAssetFile based on the google metadata -func (to *Takeout) makeAsset(md *GoogleMetaData, fsys fs.FS, name string) (*browser.LocalAssetFile, error) { - i, err := fs.Stat(fsys, name) - if err != nil { - return nil, err - } - - a := browser.LocalAssetFile{ - FileName: name, - FileSize: int(i.Size()), - Title: path.Base(name), - FSys: fsys, - } - - if album, ok := to.albums[path.Dir(name)]; ok { - a.Albums = append(a.Albums, album) - } - - if md != nil { - // Change file's title with the asset's title and the actual file's extension - title := md.Title - titleExt := path.Ext(title) - fileExt := path.Ext(name) - - if titleExt != fileExt { - title = strings.TrimSuffix(title, titleExt) - titleExt = path.Ext(title) - if titleExt != fileExt { - title = strings.TrimSuffix(title, titleExt) + fileExt - } - } - a.Title = title - a.Archived = md.Archived - a.FromPartner = md.isPartner() - a.Trashed = md.Trashed - a.Favorite = md.Favorited - - // Prepare sidecar data to force Immich with Google metadata - - sidecar := metadata.Metadata{ - Description: md.Description, - DateTaken: md.PhotoTakenTime.Time(), - } - - if md.GeoDataExif.Latitude != 0 || md.GeoDataExif.Longitude != 0 { - sidecar.Latitude = md.GeoDataExif.Latitude - sidecar.Longitude = md.GeoDataExif.Longitude - } - - if md.GeoData.Latitude != 0 || md.GeoData.Longitude != 0 { - sidecar.Latitude = md.GeoData.Latitude - sidecar.Longitude = md.GeoData.Longitude - } - for _, p := range md.foundInPaths { - if album, exists := to.albums[p]; exists { - if (album.Latitude != 0 || album.Longitude != 0) && (sidecar.Latitude == 0 && sidecar.Longitude == 0) { - sidecar.Latitude = album.Latitude - sidecar.Longitude = album.Longitude - } - } - } - a.Metadata = sidecar - } - - return &a, nil -} diff --git a/browser/gp/json.go b/browser/gp/json.go deleted file mode 100644 index 342b7556..00000000 --- a/browser/gp/json.go +++ /dev/null @@ -1,176 +0,0 @@ -package gp - -import ( - "encoding/json" - "fmt" - "strconv" - "time" - - "github.com/simulot/immich-go/helpers/tzone" -) - -type Metablock struct { - Title string `json:"title"` - Description string `json:"description"` - Category string `json:"category"` - Date *googTimeObject `json:"date,omitempty"` - PhotoTakenTime googTimeObject `json:"photoTakenTime"` - GeoDataExif googGeoData `json:"geoDataExif"` - GeoData googGeoData `json:"geoData"` - Trashed bool `json:"trashed,omitempty"` - Archived bool `json:"archived,omitempty"` - URLPresent googIsPresent `json:"url,omitempty"` // true when the file is an asset metadata - Favorited bool `json:"favorited,omitempty"` // true when starred in GP - Enrichments *googleEnrichments `json:"enrichments,omitempty"` // Album enrichments -} - -type GoogleMetaData struct { - Metablock - GooglePhotosOrigin struct { - FromPartnerSharing googIsPresent `json:"fromPartnerSharing,omitempty"` // true when this is a partner's asset - } `json:"googlePhotosOrigin"` - AlbumData *Metablock `json:"albumdata"` - // Not in the JSON, for local treatment - foundInPaths []string // keep track of paths where the json has been found -} - -func (gmd *GoogleMetaData) UnmarshalJSON(data []byte) error { - type gmetadata GoogleMetaData - var gg gmetadata - - err := json.Unmarshal(data, &gg) - if err != nil { - return err - } - - // compensate metadata version - if gg.AlbumData != nil { - gg.Metablock = *gg.AlbumData - gg.AlbumData = nil - } - - *gmd = GoogleMetaData(gg) - return nil -} - -func (gmd GoogleMetaData) isAlbum() bool { - return gmd.Date != nil -} - -func (gmd GoogleMetaData) isAsset() bool { - return gmd.PhotoTakenTime.Timestamp != "" -} - -func (gmd GoogleMetaData) isPartner() bool { - return bool(gmd.GooglePhotosOrigin.FromPartnerSharing) -} - -// Key return an expected unique key for the asset -// based on the title and the timestamp -func (gmd GoogleMetaData) Key() string { - return fmt.Sprintf("%s,%s", gmd.Title, gmd.PhotoTakenTime.Timestamp) -} - -// googIsPresent is set when the field is present. The content of the field is not relevant -type googIsPresent bool - -func (p *googIsPresent) UnmarshalJSON(b []byte) error { - var bl bool - err := json.Unmarshal(b, &bl) - if err == nil { - return nil - } - - *p = len(b) > 0 - return nil -} - -func (p googIsPresent) MarshalJSON() ([]byte, error) { - if p { - return json.Marshal("present") - } - return json.Marshal(struct{}{}) -} - -// googGeoData contains GPS coordinates -type googGeoData struct { - Latitude float64 `json:"latitude"` - Longitude float64 `json:"longitude"` - Altitude float64 `json:"altitude"` -} - -// googTimeObject to handle the epoch timestamp -type googTimeObject struct { - Timestamp string `json:"timestamp"` - // Formatted string `json:"formatted"` -} - -// Time return the time.Time of the epoch -func (gt *googTimeObject) Time() time.Time { - if gt == nil { - return time.Time{} - } - ts, _ := strconv.ParseInt(gt.Timestamp, 10, 64) - if ts == 0 { - return time.Time{} - } - t := time.Unix(ts, 0) - local, _ := tzone.Local() - // t = time.Date(t.Year(), t.Month(), t.Day(), t.Hour(), t.Minute(), t.Second(), t.Nanosecond(), time.UTC) - return t.In(local) -} - -type googleEnrichments struct { - Text string - Latitude float64 - Longitude float64 -} - -func (ge *googleEnrichments) UnmarshalJSON(b []byte) error { - type googleEnrichment struct { - NarrativeEnrichment struct { - Text string `json:"text"` - } `json:"narrativeEnrichment,omitempty"` - LocationEnrichment struct { - Location []struct { - Name string `json:"name"` - Description string `json:"description"` - LatitudeE7 int `json:"latitudeE7"` - LongitudeE7 int `json:"longitudeE7"` - } `json:"location"` - } `json:"locationEnrichment,omitempty"` - } - - var enrichments []googleEnrichment - - err := json.Unmarshal(b, &enrichments) - if err != nil { - return err - } - - for _, e := range enrichments { - if e.NarrativeEnrichment.Text != "" { - ge.Text = addString(ge.Text, "\n", e.NarrativeEnrichment.Text) - } - if e.LocationEnrichment.Location != nil { - for _, l := range e.LocationEnrichment.Location { - if l.Name != "" { - ge.Text = addString(ge.Text, "\n", l.Name) - } - if l.Description != "" { - ge.Text = addString(ge.Text, " - ", l.Description) - } - ge.Latitude = float64(l.LatitudeE7) / 10e6 - ge.Longitude = float64(l.LongitudeE7) / 10e6 - } - } - } - return err -} - -func addString(s string, sep string, t string) string { - if s != "" { - return s + sep + t - } - return t -} diff --git a/browser/localfile.go b/browser/localfile.go deleted file mode 100644 index 15c1ecb9..00000000 --- a/browser/localfile.go +++ /dev/null @@ -1,178 +0,0 @@ -package browser - -import ( - "errors" - "fmt" - "io" - "io/fs" - "os" - "time" - - "github.com/simulot/immich-go/helpers/fshelper" - "github.com/simulot/immich-go/immich/metadata" -) - -/* - localFile structure hold information on assets used for building immich assets. - - The asset is taken into a fs.FS system which doesn't implement anything else than a strait - reader. - fsys can be a zip file, a DirFS, or anything else. - - It implements a way to read a minimal quantity of data to be able to take a decision - about chose a file or discard it. - - implements fs.File and fs.FileInfo, Stat - -*/ - -type LocalAssetFile struct { - // Common fields - FileName string // The asset's path in the fsys - Title string // Google Photos may a have title longer than the filename - Albums []LocalAlbum // The asset's album, if any - Err error // keep errors encountered - SideCar metadata.SideCarFile // sidecar file if found - Metadata metadata.Metadata // Metadata fields - - // Google Photos flags - Trashed bool // The asset is trashed - Archived bool // The asset is archived - FromPartner bool // the asset comes from a partner - Favorite bool - - // Live Photos - LivePhoto *LocalAssetFile // Local asset of the movie part - LivePhotoID string // ID of the movie part, just uploaded - - FSys fs.FS // Asset's file system - FileSize int // File size in bytes - - // buffer management - sourceFile fs.File // the opened source file - tempFile *os.File // buffer that keep partial reads available for the full file reading - teeReader io.Reader // write each read from it into the tempWriter - reader io.Reader // the reader that combines the partial read and original file for full file reading -} - -func (l LocalAssetFile) DebugObject() any { - l.FSys = nil - return l -} - -func (l *LocalAssetFile) AddAlbum(album LocalAlbum) { - for _, al := range l.Albums { - if al == album { - return - } - } - l.Albums = append(l.Albums, album) -} - -// Remove the temporary file -func (l *LocalAssetFile) Remove() error { - if fsys, ok := l.FSys.(fshelper.Remover); ok { - return fsys.Remove(l.FileName) - } - return nil -} - -func (l *LocalAssetFile) DeviceAssetID() string { - return fmt.Sprintf("%s-%d", l.Title, l.FileSize) -} - -// PartialSourceReader open a reader on the current asset. -// each byte read from it is saved into a temporary file. -// -// It returns a TeeReader that writes each read byte from the source into the temporary file. -// The temporary file is discarded when the LocalAssetFile is closed - -func (l *LocalAssetFile) PartialSourceReader() (reader io.Reader, err error) { - if l.sourceFile == nil { - l.sourceFile, err = l.FSys.Open(l.FileName) - if err != nil { - return nil, err - } - } - if l.tempFile == nil { - l.tempFile, err = os.CreateTemp("", "immich-go_*.tmp") - if err != nil { - return nil, err - } - if l.teeReader == nil { - l.teeReader = io.TeeReader(l.sourceFile, l.tempFile) - } - } - _, err = l.tempFile.Seek(0, 0) - if err != nil { - return nil, err - } - return io.MultiReader(l.tempFile, l.teeReader), nil -} - -// Open return fs.File that reads previously read bytes followed by the actual file content. -func (l *LocalAssetFile) Open() (fs.File, error) { - var err error - if l.sourceFile == nil { - l.sourceFile, err = l.FSys.Open(l.FileName) - if err != nil { - return nil, err - } - } - if l.tempFile != nil { - _, err = l.tempFile.Seek(0, 0) - if err != nil { - return nil, err - } - l.reader = io.MultiReader(l.tempFile, l.sourceFile) - } else { - l.reader = l.sourceFile - } - return l, nil -} - -// Read -func (l *LocalAssetFile) Read(b []byte) (int, error) { - return l.reader.Read(b) -} - -// Close close the temporary file and close the source -func (l *LocalAssetFile) Close() error { - var err error - if l.sourceFile != nil { - err = errors.Join(err, l.sourceFile.Close()) - l.sourceFile = nil - } - if l.tempFile != nil { - f := l.tempFile.Name() - err = errors.Join(err, l.tempFile.Close()) - err = errors.Join(err, os.Remove(f)) - l.tempFile = nil - } - return err -} - -// Stat implements the fs.FILE interface -func (l *LocalAssetFile) Stat() (fs.FileInfo, error) { - return l, nil -} -func (l *LocalAssetFile) IsDir() bool { return false } - -func (l *LocalAssetFile) Name() string { - return l.FileName -} - -func (l *LocalAssetFile) Size() int64 { - return int64(l.FileSize) -} - -// Mode Implements the fs.FILE interface -func (l *LocalAssetFile) Mode() fs.FileMode { return 0 } - -// ModTime implements the fs.FILE interface -func (l *LocalAssetFile) ModTime() time.Time { - return l.Metadata.DateTaken -} - -// Sys implements the fs.FILE interface -func (l *LocalAssetFile) Sys() any { return nil } diff --git a/browser/readersearch.go b/browser/readersearch.go deleted file mode 100644 index 8217f6a0..00000000 --- a/browser/readersearch.go +++ /dev/null @@ -1,47 +0,0 @@ -package browser - -import ( - "bytes" - "io" -) - -const searchBufferSize = 32 * 1024 - -func searchPattern(r io.Reader, pattern []byte, maxDataLen int) ([]byte, error) { - var err error - pos := 0 - // Create a buffer to hold the chunk of dataZ - buffer := make([]byte, searchBufferSize) - ofs := 0 - - var bytesRead int - for { - // Read a chunk of data into the buffer - bytesRead, err = r.Read(buffer[bytesRead-ofs:]) - if err != nil && err != io.EOF { - return nil, err - } - - // Search for the pattern within the buffer - index := bytes.Index(buffer, pattern) - if index >= 0 { - if index < searchBufferSize-maxDataLen { - return buffer[index : index+maxDataLen], nil - } - ofs = index - } else { - ofs = bytesRead - maxDataLen - 1 - } - - // Check if end of file is reached - if err == io.EOF { - break - } - - // Move the remaining bytes of the current buffer to the beginning - copy(buffer, buffer[ofs:bytesRead]) - pos += bytesRead - } - - return nil, io.EOF -} diff --git a/browser/readersearch_test.go b/browser/readersearch_test.go deleted file mode 100644 index 55b437da..00000000 --- a/browser/readersearch_test.go +++ /dev/null @@ -1,92 +0,0 @@ -package browser - -import ( - "bytes" - "crypto/rand" - "io" - "reflect" - "testing" -) - -func GenRandomBytes(size int) (blk []byte) { - blk = make([]byte, size) - _, _ = rand.Read(blk) - return -} - -func Test_searchPattern(t *testing.T) { - type args struct { - r io.Reader - pattern []byte - maxDataLen int - } - tests := []struct { - name string - args args - want []byte - wantErr bool - }{ - { - name: "notin", - args: args{ - r: bytes.NewReader(append(GenRandomBytes(searchBufferSize/3), "this is the date:2023-08-01T20:20:00 in the middle of the buffer"...)), - pattern: []byte("nothere"), - maxDataLen: 24, - }, - want: nil, - wantErr: true, - }, - { - name: "middle", - args: args{ - r: bytes.NewReader(append(GenRandomBytes(searchBufferSize/3), "this is the date:2023-08-01T20:20:00 in the middle of the buffer"...)), - pattern: []byte("date:"), - maxDataLen: 24, - }, - want: []byte("date:2023-08-01T20:20:00"), - wantErr: false, - }, - { - name: "beginning", - args: args{ - r: bytes.NewReader([]byte("date:2023-08-01T20:20:00 in the middle of the buffer")), - pattern: []byte("date:"), - maxDataLen: 24, - }, - want: []byte("date:2023-08-01T20:20:00"), - wantErr: false, - }, - { - name: "2ndbuffer", - args: args{ - r: bytes.NewReader(append(GenRandomBytes(3*searchBufferSize), "this is the date:2023-08-01T20:20:00 in the middle of the buffer"...)), - pattern: []byte("date:"), - maxDataLen: 24, - }, - want: []byte("date:2023-08-01T20:20:00"), - wantErr: false, - }, - { - name: "crossing buffer boundaries", - args: args{ - r: bytes.NewReader(append(append(GenRandomBytes(2*searchBufferSize-10), "date:2023-08-01T20:20:00 in the middle of the buffer"...), GenRandomBytes(searchBufferSize-10)...)), - pattern: []byte("date:"), - maxDataLen: 24, - }, - want: []byte("date:2023-08-01T20:20:00"), - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := searchPattern(tt.args.r, tt.args.pattern, tt.args.maxDataLen) - if (err != nil) != tt.wantErr { - t.Errorf("searchPattern() error = %v, wantErr %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("searchPattern() = %v, want %v", got, tt.want) - } - }) - } -} diff --git a/cmd/shared.go b/cmd/shared.go deleted file mode 100644 index 14b8fa20..00000000 --- a/cmd/shared.go +++ /dev/null @@ -1,214 +0,0 @@ -package cmd - -import ( - "context" - "errors" - "flag" - "fmt" - "io" - "log/slog" - "os" - "path/filepath" - "strings" - "time" - - "github.com/simulot/immich-go/helpers/configuration" - "github.com/simulot/immich-go/helpers/fileevent" - "github.com/simulot/immich-go/helpers/myflag" - "github.com/simulot/immich-go/helpers/tzone" - "github.com/simulot/immich-go/immich" - fakeimmich "github.com/simulot/immich-go/internal/fakeImmich" - "github.com/simulot/immich-go/ui" - "github.com/telemachus/humane" -) - -// SharedFlags collect all parameters that are common to all commands -type SharedFlags struct { - ConfigurationFile string // Path to the configuration file to use - Server string // Immich server address (http://:2283/api or https:///api) - API string // Immich api endpoint (http://container_ip:3301) - Key string // API Key - DeviceUUID string // Set a device UUID - APITrace bool // Enable API call traces - LogLevel string // Indicate the log level (string) - Level slog.Level // Set the log level - Debug bool // Enable the debug mode - TimeZone string // Override default TZ - SkipSSL bool // Skip SSL Verification - ClientTimeout time.Duration // Set the client request timeout - NoUI bool // Disable user interface - JSONLog bool // Enable JSON structured log - DebugCounters bool // Enable CSV action counters per file - DebugFileList bool // When true, the file argument is a file wile the list of Takeout files - - Immich immich.ImmichInterface // Immich client - Log *slog.Logger // Logger - Jnl *fileevent.Recorder // Program's logger - LogFile string // Log file name - LogWriterCloser io.WriteCloser // the log writer - APITraceWriter io.WriteCloser // API tracer - APITraceWriterName string - Banner ui.Banner -} - -func (app *SharedFlags) InitSharedFlags() { - app.ConfigurationFile = configuration.DefaultConfigFile() - app.LogFile = configuration.DefaultLogFile() - app.APITrace = false - app.Debug = false - app.SkipSSL = false - app.LogLevel = "INFO" - app.NoUI = false - app.JSONLog = false - app.ClientTimeout = 5 * time.Minute -} - -// SetFlag add common flags to a flagset -func (app *SharedFlags) SetFlags(fs *flag.FlagSet) { - fs.StringVar(&app.ConfigurationFile, "use-configuration", app.ConfigurationFile, "Specifies the configuration to use") - fs.StringVar(&app.Server, "server", app.Server, "Immich server address (http://:2283 or https://)") - fs.StringVar(&app.API, "api", app.API, "Immich api endpoint (http://container_ip:3301)") - fs.StringVar(&app.Key, "key", app.Key, "API Key") - fs.StringVar(&app.DeviceUUID, "device-uuid", app.DeviceUUID, "Set a device UUID") - fs.StringVar(&app.LogLevel, "log-level", app.LogLevel, "Log level (DEBUG|INFO|WARN|ERROR), default INFO") - fs.StringVar(&app.LogFile, "log-file", app.LogFile, "Write log messages into the file") - fs.BoolFunc("log-json", "Output line-delimited JSON file, default FALSE", myflag.BoolFlagFn(&app.JSONLog, app.JSONLog)) - fs.BoolFunc("api-trace", "enable trace of api calls", myflag.BoolFlagFn(&app.APITrace, app.APITrace)) - fs.BoolFunc("debug", "enable debug messages", myflag.BoolFlagFn(&app.Debug, app.Debug)) - fs.StringVar(&app.TimeZone, "time-zone", app.TimeZone, "Override the system time zone") - fs.BoolFunc("skip-verify-ssl", "Skip SSL verification", myflag.BoolFlagFn(&app.SkipSSL, app.SkipSSL)) - fs.BoolFunc("no-ui", "Disable the user interface", myflag.BoolFlagFn(&app.NoUI, app.NoUI)) - fs.Func("client-timeout", "Set server calls timeout, default 1m", myflag.DurationFlagFn(&app.ClientTimeout, app.ClientTimeout)) - fs.BoolFunc("debug-counters", "generate a CSV file with actions per handled files", myflag.BoolFlagFn(&app.DebugCounters, false)) -} - -func (app *SharedFlags) Start(ctx context.Context) error { - var joinedErr error - if app.Server != "" { - app.Server = strings.TrimSuffix(app.Server, "/") - } - if app.TimeZone != "" { - _, err := tzone.SetLocal(app.TimeZone) - joinedErr = errors.Join(joinedErr, err) - } - - if app.Jnl == nil { - app.Jnl = fileevent.NewRecorder(nil, app.DebugCounters) - } - - if app.DebugFileList { - app.Immich = &fakeimmich.MockedCLient{} - _ = os.Remove(app.LogFile) - } - - if app.LogFile != "" { - if app.LogWriterCloser == nil { - err := configuration.MakeDirForFile(app.LogFile) - if err != nil { - return err - } - f, err := os.OpenFile(app.LogFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0o664) - if err != nil { - return err - } - err = app.Level.UnmarshalText([]byte(strings.ToUpper(app.LogLevel))) - if err != nil { - return err - } - app.SetLogWriter(f) - app.LogWriterCloser = f - } - } - - // If the client isn't yet initialized - if app.Immich == nil { - if app.Server == "" && app.API == "" && app.Key == "" { - conf, err := configuration.ConfigRead(app.ConfigurationFile) - confExist := err == nil - if confExist && app.Server == "" && app.Key == "" && app.API == "" { - app.Server = conf.ServerURL - app.Key = conf.APIKey - app.API = conf.APIURL - } - } - - switch { - case app.Server == "" && app.API == "": - joinedErr = errors.Join(joinedErr, errors.New("missing -server, Immich server address (http://:2283 or https://)")) - case app.Server != "" && app.API != "": - joinedErr = errors.Join(joinedErr, errors.New("give either the -server or the -api option")) - } - if app.Key == "" { - joinedErr = errors.Join(joinedErr, errors.New("missing -key")) - } - - if joinedErr != nil { - return joinedErr - } - - // Connection details are saved into the configuration file - conf := configuration.Configuration{ - ServerURL: app.Server, - APIKey: app.Key, - APIURL: app.API, - } - err := configuration.MakeDirForFile(app.ConfigurationFile) - if err != nil { - return err - } - err = conf.Write(app.ConfigurationFile) - if err != nil { - return fmt.Errorf("can't write into the configuration file: %w", err) - } - app.Log.Info("Connection to the server " + app.Server) - - app.Immich, err = immich.NewImmichClient(app.Server, app.Key, immich.OptionVerifySSL(app.SkipSSL), immich.OptionConnectionTimeout(app.ClientTimeout)) - if err != nil { - return err - } - if app.API != "" { - app.Immich.SetEndPoint(app.API) - } - if app.DeviceUUID != "" { - app.Immich.SetDeviceUUID(app.DeviceUUID) - } - - if app.APITrace { - if app.APITraceWriter == nil { - err := configuration.MakeDirForFile(app.LogFile) - if err != nil { - return err - } - app.APITraceWriterName = strings.TrimSuffix(app.LogFile, filepath.Ext(app.LogFile)) + ".trace.log" - app.APITraceWriter, err = os.OpenFile(app.APITraceWriterName, os.O_CREATE|os.O_WRONLY, 0o664) - if err != nil { - return err - } - app.Immich.EnableAppTrace(app.APITraceWriter) - } - } - - err = app.Immich.PingServer(ctx) - if err != nil { - return err - } - app.Log.Info("Server status: OK") - - user, err := app.Immich.ValidateConnection(ctx) - if err != nil { - return err - } - app.Log.Info(fmt.Sprintf("Connected, user: %s", user.Email)) - } - - return nil -} - -func (app *SharedFlags) SetLogWriter(w io.Writer) { - if app.JSONLog { - app.Log = slog.New(slog.NewJSONHandler(w, &slog.HandlerOptions{})) - } else { - app.Log = slog.New(humane.NewHandler(w, &humane.Options{Level: app.Level})) - } - app.Jnl.SetLogger(app.Log) -} diff --git a/cmd/upload/configuration.go b/cmd/upload/configuration.go deleted file mode 100644 index 9ceb0fe2..00000000 --- a/cmd/upload/configuration.go +++ /dev/null @@ -1,56 +0,0 @@ -package upload - -import ( - "slices" - "strings" -) - -type Configuration struct { - SelectExtensions StringList - ExcludeExtensions StringList - Recursive bool -} - -func (c *Configuration) Validate() { - c.SelectExtensions = checkExtensions(c.SelectExtensions) - c.ExcludeExtensions = checkExtensions(c.ExcludeExtensions) -} - -func checkExtensions(l StringList) StringList { - var r StringList - - for _, e := range l { - if !strings.HasPrefix(e, ".") { - e = "." + e - } - e = strings.ToLower(e) - r = append(r, e) - } - return r -} - -type StringList []string - -func (sl *StringList) Set(s string) error { - l := strings.Split(s, ",") - (*sl) = append((*sl), l...) - return nil -} - -func (sl StringList) String() string { - return strings.Join(sl, ", ") -} - -func (sl StringList) Include(s string) bool { - if len(sl) == 0 { - return true - } - return slices.Contains(sl, strings.ToLower(s)) -} - -func (sl StringList) Exclude(s string) bool { - if len(sl) == 0 { - return false - } - return slices.Contains(sl, strings.ToLower(s)) -} diff --git a/cmd/upload/e2e_takeout_test.go b/cmd/upload/e2e_takeout_test.go deleted file mode 100644 index 3c570f97..00000000 --- a/cmd/upload/e2e_takeout_test.go +++ /dev/null @@ -1,85 +0,0 @@ -//go:build e2e -// +build e2e - -package upload - -import ( - "context" - "io/fs" - "os" - "path/filepath" - "testing" - - "github.com/simulot/immich-go/cmd" - "github.com/simulot/immich-go/internal/fakefs" -) - -// Simulate a takeout archive with the list of zipped files -func simulate_upload(t *testing.T, zipList string, dateFormat string, forceMissingJSON bool) { - ic := &icCatchUploadsAssets{ - albums: map[string][]string{}, - } - ctx := context.Background() - - // log := slog.New(slog.NewTextHandler(io.Discard, nil)) - serv := cmd.SharedFlags{ - Immich: ic, - LogLevel: "INFO", - // Jnl: fileevent.NewRecorder(log, false), - // Log: log, - } - - fsOpener := func() ([]fs.FS, error) { - return fakefs.ScanFileList(zipList, dateFormat) - } - os.Remove(filepath.Dir(zipList) + "/debug.log") - args := []string{"-google-photos", "-no-ui", "-debug-counters", "-log-file=" + filepath.Dir(zipList) + "/debug.log"} - - app, err := newCommand(ctx, &serv, args, fsOpener) - if err != nil { - t.Errorf("can't instantiate the UploadCmd: %s", err) - return - } - app.ForceUploadWhenNoJSON = forceMissingJSON - err = app.run(ctx) - if err != nil { - t.Errorf("can't run the UploadCmd: %s", err) - return - } -} - -func TestPixilTakeOut(t *testing.T) { - initMyEnv(t) - - simulate_upload(t, myEnv["IMMICH_TESTFILES"]+"/User Files/pixil/list.lst", "01-02-2006 15:04", false) -} - -func TestPhyl404TakeOut(t *testing.T) { - initMyEnv(t) - - simulate_upload(t, myEnv["IMMICH_TESTFILES"]+"/User Files/Phyl404/list.lst", "2006-01-02 15:04", false) -} - -func TestPhyl404_2TakeOut(t *testing.T) { - initMyEnv(t) - - simulate_upload(t, myEnv["IMMICH_TESTFILES"]+"/User Files/Phy404#2/list.lst", "2006-01-02 15:04", false) -} - -func TestSteve81TakeOut(t *testing.T) { - initMyEnv(t) - - simulate_upload(t, myEnv["IMMICH_TESTFILES"]+"/User Files/Steve81/list.list", "2006-01-02 15:04", false) -} - -func TestMuetyTakeOut(t *testing.T) { - initMyEnv(t) - - simulate_upload(t, myEnv["IMMICH_TESTFILES"]+"/User Files/muety/list.lst", "01-02-2006 15:04", false) -} - -func TestMissingJSONTakeOut(t *testing.T) { - initMyEnv(t) - - simulate_upload(t, myEnv["IMMICH_TESTFILES"]+"/User Files/MissingJSON/list.lst", "01-02-2006 15:04", true) -} diff --git a/cmd/upload/upload.go b/cmd/upload/upload.go deleted file mode 100644 index f1a5bf53..00000000 --- a/cmd/upload/upload.go +++ /dev/null @@ -1,944 +0,0 @@ -// Command Upload - -package upload - -import ( - "context" - "flag" - "fmt" - "io/fs" - "math" - "os" - "path" - "path/filepath" - "strings" - "time" - - "github.com/gdamore/tcell/v2" - "github.com/google/uuid" - "github.com/simulot/immich-go/browser" - "github.com/simulot/immich-go/browser/files" - "github.com/simulot/immich-go/browser/gp" - "github.com/simulot/immich-go/cmd" - "github.com/simulot/immich-go/helpers/fileevent" - "github.com/simulot/immich-go/helpers/fshelper" - "github.com/simulot/immich-go/helpers/gen" - "github.com/simulot/immich-go/helpers/myflag" - "github.com/simulot/immich-go/helpers/namematcher" - "github.com/simulot/immich-go/helpers/stacking" - "github.com/simulot/immich-go/immich" - "github.com/simulot/immich-go/internal/fakefs" -) - -type UpCmd struct { - *cmd.SharedFlags // shared flags and immich client - - fsyss []fs.FS // pseudo file system to browse - - GooglePhotos bool // For reading Google Photos takeout files - Delete bool // Delete original file after import - CreateAlbumAfterFolder bool // Create albums for assets based on the parent folder or a given name - UseFullPathAsAlbumName bool // Create albums for assets based on the full path to the asset - AlbumNamePathSeparator string // Determines how multiple (sub) folders, if any, will be joined - ImportIntoAlbum string // All assets will be added to this album - PartnerAlbum string // Partner's assets will be added to this album - Import bool // Import instead of upload - DeviceUUID string // Set a device UUID - Paths []string // Path to explore - DateRange immich.DateRange // Set capture date range - ImportFromAlbum string // Import assets from this albums - CreateAlbums bool // Create albums when exists in the source - KeepTrashed bool // Import trashed assets - KeepPartner bool // Import partner's assets - KeepUntitled bool // Keep untitled albums - UseFolderAsAlbumName bool // Use folder's name instead of metadata's title as Album name - DryRun bool // Display actions but don't change anything - CreateStacks bool // Stack jpg/raw/burst (Default: TRUE) - StackJpgRaws bool // Stack jpg/raw (Default: TRUE) - StackBurst bool // Stack burst (Default: TRUE) - DiscardArchived bool // Don't import archived assets (Default: FALSE) - AutoArchive bool // Automatically archive photos that are also archived in google photos (Default: TRUE) - WhenNoDate string // When the date can't be determined use the FILE's date or NOW (default: FILE) - ForceUploadWhenNoJSON bool // Some takeout don't supplies all JSON. When true, files are uploaded without any additional metadata - BannedFiles namematcher.List // List of banned file name patterns - - BrowserConfig Configuration - - albums map[string]immich.AlbumSimplified // Albums by title - - AssetIndex *AssetIndex // List of assets present on the server - deleteServerList []*immich.Asset // List of server assets to remove - deleteLocalList []*browser.LocalAssetFile // List of local assets to remove - // updateAlbums map[string]map[string]any // track immich albums changes - stacks *stacking.StackBuilder - browser browser.Browser -} - -func UploadCommand(ctx context.Context, common *cmd.SharedFlags, args []string) error { - app, err := newCommand(ctx, common, args, nil) - if err != nil { - return err - } - if len(app.fsyss) == 0 { - return nil - } - return app.run(ctx) -} - -type fsOpener func() ([]fs.FS, error) - -func newCommand(ctx context.Context, common *cmd.SharedFlags, args []string, fsOpener fsOpener) (*UpCmd, error) { - var err error - cmd := flag.NewFlagSet("upload", flag.ExitOnError) - - app := UpCmd{ - SharedFlags: common, - } - app.BannedFiles, err = namematcher.New( - `@eaDir/`, - `@__thumb/`, // QNAP - `SYNOFILE_THUMB_*.*`, // SYNOLOGY - `Lightroom Catalog/`, // LR - `thumbnails/`, // Android photo - `.DS_Store/`, // Mac OS custom attributes - ) - if err != nil { - return nil, err - } - - app.SharedFlags.SetFlags(cmd) - cmd.BoolFunc( - "dry-run", - "display actions but don't touch source or destination", - myflag.BoolFlagFn(&app.DryRun, false)) - cmd.Var(&app.DateRange, - "date", - "Date of capture range.") - cmd.StringVar(&app.ImportIntoAlbum, - "album", - "", - "All assets will be added to this album.") - cmd.BoolFunc( - "create-album-folder", - " folder import only: Create albums for assets based on the parent folder", - myflag.BoolFlagFn(&app.CreateAlbumAfterFolder, false)) - cmd.BoolFunc( - "use-full-path-album-name", - " folder import only: Use the full path towards the asset for determining the Album name", - myflag.BoolFlagFn(&app.UseFullPathAsAlbumName, false)) - cmd.StringVar(&app.AlbumNamePathSeparator, - "album-name-path-separator", - " ", - " when use-full-path-album-name = true, determines how multiple (sub) folders, if any, will be joined") - cmd.BoolFunc( - "google-photos", - "Import GooglePhotos takeout zip files", - myflag.BoolFlagFn(&app.GooglePhotos, false)) - cmd.BoolFunc( - "create-albums", - " google-photos only: Create albums like there were in the source (default: TRUE)", - myflag.BoolFlagFn(&app.CreateAlbums, true)) - cmd.StringVar(&app.PartnerAlbum, - "partner-album", - "", - " google-photos only: Assets from partner will be added to this album. (ImportIntoAlbum, must already exist)") - cmd.BoolFunc( - "keep-partner", - " google-photos only: Import also partner's items (default: TRUE)", myflag.BoolFlagFn(&app.KeepPartner, true)) - cmd.StringVar(&app.ImportFromAlbum, - "from-album", - "", - " google-photos only: Import only from this album") - - cmd.BoolFunc( - "keep-untitled-albums", - " google-photos only: Keep Untitled albums and imports their contain (default: FALSE)", myflag.BoolFlagFn(&app.KeepUntitled, false)) - - cmd.BoolFunc( - "use-album-folder-as-name", - " google-photos only: Use folder name and ignore albums' title (default:FALSE)", myflag.BoolFlagFn(&app.UseFolderAsAlbumName, false)) - - cmd.BoolFunc( - "discard-archived", - " google-photos only: Do not import archived photos (default FALSE)", myflag.BoolFlagFn(&app.DiscardArchived, false)) - - cmd.BoolFunc( - "auto-archive", - " google-photos only: Automatically archive photos that are also archived in google photos (default TRUE)", myflag.BoolFlagFn(&app.AutoArchive, true)) - - cmd.BoolFunc( - "create-stacks", - "Stack jpg/raw or bursts (default FALSE)", myflag.BoolFlagFn(&app.CreateStacks, false)) - - cmd.BoolFunc( - "stack-jpg-raw", - "Control the stacking of jpg/raw photos (default TRUE)", myflag.BoolFlagFn(&app.StackJpgRaws, false)) - cmd.BoolFunc( - "stack-burst", - "Control the stacking bursts (default TRUE)", myflag.BoolFlagFn(&app.StackBurst, false)) - - // cmd.BoolVar(&app.Delete, "delete", false, "Delete local assets after upload") - - cmd.Var(&app.BrowserConfig.SelectExtensions, "select-types", "list of selected extensions separated by a comma") - cmd.Var(&app.BrowserConfig.ExcludeExtensions, "exclude-types", "list of excluded extensions separated by a comma") - - cmd.StringVar(&app.WhenNoDate, - "when-no-date", - "FILE", - " When the date of take can't be determined, use the FILE's date or the current time NOW. (default: FILE)") - - cmd.Var(&app.BannedFiles, "exclude-files", "Ignore files based on a pattern. Case insensitive. Add one option for each pattern do you need.") - - cmd.BoolVar(&app.ForceUploadWhenNoJSON, "upload-when-missing-JSON", app.ForceUploadWhenNoJSON, "when true, photos are upload even without associated JSON file.") - cmd.BoolVar(&app.DebugFileList, "debug-file-list", app.DebugFileList, "Check how the your file list would be processed") - - err = cmd.Parse(args) - if err != nil { - return nil, err - } - - if app.DebugFileList { - if len(cmd.Args()) < 2 { - return nil, fmt.Errorf("the option -debug-file-list requires a file name and a date format") - } - app.LogFile = strings.TrimSuffix(cmd.Arg(0), filepath.Ext(cmd.Arg(0))) + ".log" - _ = os.Remove(app.LogFile) - - fsOpener = func() ([]fs.FS, error) { - return fakefs.ScanFileList(cmd.Arg(0), cmd.Arg(1)) - } - } else { - } - - app.WhenNoDate = strings.ToUpper(app.WhenNoDate) - switch app.WhenNoDate { - case "FILE", "NOW": - default: - return nil, fmt.Errorf("the -when-no-date accepts FILE or NOW") - } - - app.BrowserConfig.Validate() - err = app.SharedFlags.Start(ctx) - if err != nil { - return nil, err - } - - if fsOpener == nil { - fsOpener = func() ([]fs.FS, error) { - return fshelper.ParsePath(cmd.Args()) - } - } - app.fsyss, err = fsOpener() - if err != nil { - return nil, err - } - if len(app.fsyss) == 0 { - fmt.Println("No file found matching the pattern: ", strings.Join(cmd.Args(), ",")) - app.Log.Info("No file found matching the pattern: " + strings.Join(cmd.Args(), ",")) - } - return &app, nil -} - -func (app *UpCmd) run(ctx context.Context) error { - defer func() { - _ = fshelper.CloseFSs(app.fsyss) - }() - - if app.CreateStacks || app.StackBurst || app.StackJpgRaws { - app.stacks = stacking.NewStackBuilder(app.Immich.SupportedMedia()) - } - - var err error - switch { - case app.GooglePhotos: - app.Log.Info("Browsing google take out archive...") - app.browser, err = app.ReadGoogleTakeOut(ctx, app.fsyss) - default: - app.Log.Info("Browsing folder(s)...") - app.browser, err = app.ExploreLocalFolder(ctx, app.fsyss) - } - - if err != nil { - return err - } - - defer func() { - if app.DebugCounters { - fn := strings.TrimSuffix(app.LogFile, filepath.Ext(app.LogFile)) + ".csv" - f, err := os.Create(fn) - if err == nil { - _ = app.Jnl.WriteFileCounts(f) - fmt.Println("\nCheck the counters file: ", f.Name()) - f.Close() - } - } - }() - - if app.NoUI { - return app.runNoUI(ctx) - } - - _, err = tcell.NewScreen() - if err != nil { - app.Log.Error("can't initialize the screen for the UI mode. Falling back to no-gui mode") - fmt.Println("can't initialize the screen for the UI mode. Falling back to no-gui mode") - return app.runNoUI(ctx) - } - return app.runUI(ctx) -} - -func (app *UpCmd) getImmichAlbums(ctx context.Context) error { - serverAlbums, err := app.Immich.GetAllAlbums(ctx) - app.albums = map[string]immich.AlbumSimplified{} - if err != nil { - return fmt.Errorf("can't get the album list from the server: %w", err) - } - for _, a := range serverAlbums { - select { - case <-ctx.Done(): - return ctx.Err() - default: - app.albums[a.AlbumName] = a - } - } - return nil -} - -func (app *UpCmd) getImmichAssets(ctx context.Context, updateFn progressUpdate) error { - statistics, err := app.Immich.GetAssetStatistics(ctx) - if err != nil { - return err - } - totalOnImmich := statistics.Total - received := 0 - - var list []*immich.Asset - - err = app.Immich.GetAllAssetsWithFilter(ctx, func(a *immich.Asset) error { - select { - case <-ctx.Done(): - return ctx.Err() - default: - received++ - list = append(list, a) - if updateFn != nil { - updateFn(received, totalOnImmich) - } - return nil - } - }) - if err != nil { - return err - } - if updateFn != nil { - updateFn(totalOnImmich, totalOnImmich) - } - app.AssetIndex = &AssetIndex{ - assets: list, - } - app.AssetIndex.ReIndex() - return nil -} - -func (app *UpCmd) uploadLoop(ctx context.Context) error { - var err error - assetChan := app.browser.Browse(ctx) -assetLoop: - for { - select { - case <-ctx.Done(): - return ctx.Err() - - case a, ok := <-assetChan: - if !ok { - break assetLoop - } - if a.Err != nil { - app.Jnl.Record(ctx, fileevent.Error, a, a.FileName, a.Err.Error()) - } else { - err = app.handleAsset(ctx, a) - if err != nil { - app.Jnl.Record(ctx, fileevent.Error, a, a.FileName, a.Err.Error()) - } - } - } - } - - if app.CreateStacks { - stacks := app.stacks.Stacks() - if len(stacks) > 0 { - app.Log.Info("Creating stacks") - nextStack: - for _, s := range stacks { - switch { - case !app.StackBurst && s.StackType == stacking.StackBurst: - continue nextStack - case !app.StackJpgRaws && s.StackType == stacking.StackRawJpg: - continue nextStack - } - app.Log.Info(fmt.Sprintf("Stacking %s...", strings.Join(s.Names, ", "))) - if !app.DryRun { - err = app.Immich.StackAssets(ctx, s.CoverID, s.IDs) - if err != nil { - app.Log.Error(fmt.Sprintf("Can't stack images: %s", err)) - } - } - } - } - } - - // if app.CreateAlbums || app.CreateAlbumAfterFolder || (app.KeepPartner && app.PartnerAlbum != "") || app.ImportIntoAlbum != "" { - // app.Log.Info("Managing albums") - // err = app.ManageAlbums(ctx) - // if err != nil { - // app.Log.Error(err.Error()) - // err = nil - // } - // } - - if len(app.deleteServerList) > 0 { - ids := []string{} - for _, da := range app.deleteServerList { - ids = append(ids, da.ID) - } - err := app.DeleteServerAssets(ctx, ids) - if err != nil { - return fmt.Errorf("can't delete server's assets: %w", err) - } - } - - if len(app.deleteLocalList) > 0 { - err = app.DeleteLocalAssets() - } - - return err -} - -func (app *UpCmd) handleAsset(ctx context.Context, a *browser.LocalAssetFile) error { - defer func() { - a.Close() - }() - ext := path.Ext(a.FileName) - if app.BrowserConfig.ExcludeExtensions.Exclude(ext) { - app.Jnl.Record(ctx, fileevent.UploadNotSelected, a, a.FileName, "reason", "extension in rejection list") - return nil - } - if !app.BrowserConfig.SelectExtensions.Include(ext) { - app.Jnl.Record(ctx, fileevent.UploadNotSelected, a, a.FileName, "reason", "extension not in selection list") - return nil - } - - if !app.KeepPartner && a.FromPartner { - app.Jnl.Record(ctx, fileevent.UploadNotSelected, a, a.FileName, "reason", "partners asset excluded") - return nil - } - - if !app.KeepTrashed && a.Trashed { - app.Jnl.Record(ctx, fileevent.UploadNotSelected, a, a.FileName, "reason", "trashed asset excluded") - return nil - } - - if app.ImportFromAlbum != "" && !app.isInAlbum(a, app.ImportFromAlbum) { - app.Jnl.Record(ctx, fileevent.UploadNotSelected, a.FileName, "reason", "doesn't belong to required album") - return nil - } - - if app.DiscardArchived && a.Archived { - app.Jnl.Record(ctx, fileevent.UploadNotSelected, a, a.FileName, "reason", "archived asset are discarded") - return nil - } - - if app.DateRange.IsSet() { - d := a.Metadata.DateTaken - if d.IsZero() { - app.Jnl.Record(ctx, fileevent.UploadNotSelected, a, a.FileName, "reason", "date of capture is unknown") - return nil - } - if !app.DateRange.InRange(d) { - app.Jnl.Record(ctx, fileevent.UploadNotSelected, a, a.FileName, "reason", "date of capture is out of the given range") - return nil - } - } - - if !app.KeepUntitled { - a.Albums = gen.Filter(a.Albums, func(i browser.LocalAlbum) bool { - return i.Title != "" - }) - } - - advice, err := app.AssetIndex.ShouldUpload(a) - if err != nil { - return err - } - - switch advice.Advice { - case NotOnServer: // Upload and manage albums - ID, err := app.UploadAsset(ctx, a) - if err != nil { - return nil - } - app.manageAssetAlbum(ctx, ID, a, advice) - - case SmallerOnServer: // Upload, manage albums and delete the server's asset - app.Jnl.Record(ctx, fileevent.UploadUpgraded, a, a.FileName, "reason", advice.Message) - // add the superior asset into albums of the original asset. - ID, err := app.UploadAsset(ctx, a) - if err != nil { - return nil - } - app.manageAssetAlbum(ctx, ID, a, advice) - // delete the existing lower quality asset - err = app.deleteAsset(ctx, advice.ServerAsset.ID) - if err != nil { - app.Jnl.Record(ctx, fileevent.Error, a, a.FileName, "error", err.Error()) - } - - case SameOnServer: // manage albums - // Set add the server asset into albums determined locally - if !advice.ServerAsset.JustUploaded { - app.Jnl.Record(ctx, fileevent.UploadServerDuplicate, a, a.FileName, "reason", advice.Message) - } else { - app.Jnl.Record(ctx, fileevent.AnalysisLocalDuplicate, a, a.FileName) - } - app.manageAssetAlbum(ctx, advice.ServerAsset.ID, a, advice) - - case BetterOnServer: // and manage albums - app.Jnl.Record(ctx, fileevent.UploadServerBetter, a, a.FileName, "reason", advice.Message) - app.manageAssetAlbum(ctx, advice.ServerAsset.ID, a, advice) - } - - return nil -} - -func (app *UpCmd) deleteAsset(ctx context.Context, id string) error { - return app.Immich.DeleteAssets(ctx, []string{id}, true) -} - -// manageAssetAlbum keep the albums updated -// errors are logged, but not returned -func (app *UpCmd) manageAssetAlbum(ctx context.Context, assetID string, a *browser.LocalAssetFile, advice *Advice) { - addedTo := map[string]any{} - if advice.ServerAsset != nil { - for _, al := range advice.ServerAsset.Albums { - app.Jnl.Record(ctx, fileevent.UploadAddToAlbum, a, a.FileName, "album", al.AlbumName, "reason", "lower quality asset's album") - if !app.DryRun { - err := app.AddToAlbum(ctx, assetID, browser.LocalAlbum{Title: al.AlbumName, Description: al.Description}) - if err != nil { - app.Jnl.Record(ctx, fileevent.Error, a, a.FileName, "error", err.Error()) - } - } - addedTo[al.AlbumName] = nil - } - } - - if app.CreateAlbums { - for _, al := range a.Albums { - album := al.Title - if app.GooglePhotos && (app.CreateAlbumAfterFolder || app.UseFolderAsAlbumName || album == "") { - album = filepath.Base(al.Path) - } - if _, exist := addedTo[album]; !exist { - app.Jnl.Record(ctx, fileevent.UploadAddToAlbum, a, a.FileName, "album", album) - if !app.DryRun { - err := app.AddToAlbum(ctx, assetID, browser.LocalAlbum{Title: album}) - if err != nil { - app.Jnl.Record(ctx, fileevent.Error, a, a.FileName, "error", err.Error()) - } - } - } - } - } - if app.ImportIntoAlbum != "" { - app.Jnl.Record(ctx, fileevent.UploadAddToAlbum, a, a.FileName, "album", app.ImportIntoAlbum, "reason", "option -album") - if !app.DryRun { - err := app.AddToAlbum(ctx, assetID, browser.LocalAlbum{Title: app.ImportIntoAlbum}) - if err != nil { - app.Jnl.Record(ctx, fileevent.Error, a, a.FileName, "error", err.Error()) - } - } - } - - if app.GooglePhotos { - if app.PartnerAlbum != "" && a.FromPartner { - app.Jnl.Record(ctx, fileevent.UploadAddToAlbum, a, a.FileName, "album", app.PartnerAlbum, "reason", "option -partner-album") - if !app.DryRun { - err := app.AddToAlbum(ctx, assetID, browser.LocalAlbum{Title: app.PartnerAlbum}) - if err != nil { - app.Jnl.Record(ctx, fileevent.Error, a, a.FileName, "error", err.Error()) - } - } - } - } else { - if app.CreateAlbumAfterFolder { - album := path.Base(path.Dir(a.FileName)) - if !app.GooglePhotos && app.UseFullPathAsAlbumName { - // full path - album = strings.Replace(filepath.Dir(a.FileName), string(os.PathSeparator), app.AlbumNamePathSeparator, -1) - } - if album == "" || album == "." { - if fsys, ok := a.FSys.(fshelper.NameFS); ok { - album = fsys.Name() - } else { - album = "no-folder-name" - } - } - app.Jnl.Record(ctx, fileevent.UploadAddToAlbum, a, a.FileName, "album", album, "reason", "option -create-album-folder") - if !app.DryRun { - err := app.AddToAlbum(ctx, assetID, browser.LocalAlbum{Title: album}) - if err != nil { - app.Jnl.Record(ctx, fileevent.Error, a, a.FileName, "error", err.Error()) - } - } - } - } -} - -func (app *UpCmd) isInAlbum(a *browser.LocalAssetFile, album string) bool { - for _, al := range a.Albums { - if app.albumName(al) == album { - return true - } - } - return false -} - -func (app *UpCmd) ReadGoogleTakeOut(ctx context.Context, fsyss []fs.FS) (browser.Browser, error) { - app.Delete = false - b, err := gp.NewTakeout(ctx, app.Jnl, app.Immich.SupportedMedia(), fsyss...) - if err != nil { - return nil, err - } - b.SetBannedFiles(app.BannedFiles) - b.SetAcceptMissingJSON(app.ForceUploadWhenNoJSON) - return b, err -} - -func (app *UpCmd) ExploreLocalFolder(ctx context.Context, fsyss []fs.FS) (browser.Browser, error) { - b, err := files.NewLocalFiles(ctx, app.Jnl, fsyss...) - if err != nil { - return nil, err - } - b.SetSupportedMedia(app.Immich.SupportedMedia()) - b.SetWhenNoDate(app.WhenNoDate) - b.SetBannedFiles(app.BannedFiles) - return b, nil -} - -// UploadAsset upload the asset on the server -// Add the assets into listed albums -// return ID of the asset -func (app *UpCmd) UploadAsset(ctx context.Context, a *browser.LocalAssetFile) (string, error) { - var resp, liveResp immich.AssetResponse - var err error - if !app.AutoArchive && a.Archived { - a.Archived = false - } - if !app.DryRun { - if a.LivePhoto != nil { - liveResp, err = app.Immich.AssetUpload(ctx, a.LivePhoto) - if err == nil { - if liveResp.Status == immich.UploadDuplicate { - app.Jnl.Record(ctx, fileevent.UploadServerDuplicate, a.LivePhoto, a.LivePhoto.FileName, "info", "the server has this file") - } else { - app.Jnl.Record(ctx, fileevent.Uploaded, a.LivePhoto, a.LivePhoto.FileName) - } - a.LivePhotoID = liveResp.ID - } else { - app.Jnl.Record(ctx, fileevent.UploadServerError, a.LivePhoto, a.LivePhoto.FileName, "error", err.Error()) - } - } - b := *a // Keep a copy of the asset to log errors specifically on the image - resp, err = app.Immich.AssetUpload(ctx, a) - if err == nil { - if resp.Status == immich.UploadDuplicate { - app.Jnl.Record(ctx, fileevent.UploadServerDuplicate, a, a.FileName, "info", "the server has this file") - } else { - b.LivePhoto = nil - app.Jnl.Record(ctx, fileevent.Uploaded, &b, b.FileName, "capture date", b.Metadata.DateTaken.String()) - } - } else { - app.Jnl.Record(ctx, fileevent.UploadServerError, a, a.FileName, "error", err.Error()) - return "", err - } - } else { - // dry-run mode - if a.LivePhoto != nil { - liveResp.ID = uuid.NewString() - } - resp.ID = uuid.NewString() - app.Jnl.Record(ctx, fileevent.Uploaded, a, a.FileName, "capture date", a.Metadata.DateTaken.String()) - } - if resp.Status != immich.UploadDuplicate { - if a.LivePhoto != nil && liveResp.ID != "" { - app.AssetIndex.AddLocalAsset(a, liveResp.ID) - } - app.AssetIndex.AddLocalAsset(a, resp.ID) - if app.CreateStacks { - app.stacks.ProcessAsset(resp.ID, a.FileName, a.Metadata.DateTaken) - } - } - - return resp.ID, nil -} - -func (app *UpCmd) albumName(al browser.LocalAlbum) string { - Name := al.Title - if app.GooglePhotos { - switch { - case app.UseFolderAsAlbumName: - Name = path.Base(al.Path) - case app.KeepUntitled && Name == "": - Name = path.Base(al.Path) - } - } - return Name -} - -// AddToAlbum add the ID to the immich album having the same name as the local album -func (app *UpCmd) AddToAlbum(ctx context.Context, id string, album browser.LocalAlbum) error { - title := album.Title - - l, exist := app.albums[title] - if !exist { - a, err := app.Immich.CreateAlbum(ctx, title, album.Description, []string{id}) - if err != nil { - return err - } - app.albums[title] = immich.AlbumSimplified{ID: a.ID, AlbumName: a.AlbumName, Description: a.Description} - } else { - _, err := app.Immich.AddAssetToAlbum(ctx, l.ID, []string{id}) - if err != nil { - return err - } - } - return nil -} - -func (app *UpCmd) DeleteLocalAssets() error { - app.Log.Info(fmt.Sprintf("%d local assets to delete.", len(app.deleteLocalList))) - - for _, a := range app.deleteLocalList { - if !app.DryRun { - app.Log.Info(fmt.Sprintf("delete file %q", a.Title)) - err := a.Remove() - if err != nil { - return err - } - } else { - app.Log.Info(fmt.Sprintf("file %q not deleted, dry run mode.", a.Title)) - } - } - return nil -} - -func (app *UpCmd) DeleteServerAssets(ctx context.Context, ids []string) error { - app.Log.Info(fmt.Sprintf("%d server assets to delete.", len(ids))) - - if !app.DryRun { - err := app.Immich.DeleteAssets(ctx, ids, false) - return err - } - app.Log.Info(fmt.Sprintf("%d server assets to delete. skipped dry-run mode", len(ids))) - return nil -} - -/* -func (app *UpCmd) ManageAlbums(ctx context.Context) error { - if len(app.updateAlbums) > 0 { - serverAlbums, err := app.Immich.GetAllAlbums(ctx) - if err != nil { - return fmt.Errorf("can't get the album list from the server: %w", err) - } - for album, list := range app.updateAlbums { - found := false - for _, sal := range serverAlbums { - if sal.AlbumName == album { - found = true - if !app.DryRun { - app.Log.Info(fmt.Sprintf("Update the album %s", album)) - rr, err := app.Immich.AddAssetToAlbum(ctx, sal.ID, gen.MapKeys(list)) - if err != nil { - return fmt.Errorf("can't update the album list from the server: %w", err) - } - added := 0 - for _, r := range rr { - if r.Success { - added++ - } - if !r.Success && r.Error != "duplicate" { - app.Log.Info(fmt.Sprintf("%s: %s", r.ID, r.Error)) - } - } - if added > 0 { - app.Log.Info(fmt.Sprintf("%d asset(s) added to the album %q", added, album)) - } - } else { - app.Log.Info(fmt.Sprintf("Update album %s skipped - dry run mode", album)) - } - } - } - if found { - continue - } - if list != nil { - if !app.DryRun { - app.Log.Info(fmt.Sprintf("Create the album %s", album)) - - _, err := app.Immich.CreateAlbum(ctx, album, gen.MapKeys(list)) - if err != nil { - return fmt.Errorf("can't create the album list from the server: %w", err) - } - } else { - app.Log.Info(fmt.Sprintf("Create the album %s skipped - dry run mode", album)) - } - } - } - } - return nil -} -*/ -// - - go:generate stringer -type=AdviceCode -type AdviceCode int - -func (a AdviceCode) String() string { - switch a { - case IDontKnow: - return "IDontKnow" - // case SameNameOnServerButNotSure: - // return "SameNameOnServerButNotSure" - case SmallerOnServer: - return "SmallerOnServer" - case BetterOnServer: - return "BetterOnServer" - case SameOnServer: - return "SameOnServer" - case NotOnServer: - return "NotOnServer" - } - return fmt.Sprintf("advice(%d)", a) -} - -const ( - IDontKnow AdviceCode = iota - SmallerOnServer - BetterOnServer - SameOnServer - NotOnServer -) - -type Advice struct { - Advice AdviceCode - Message string - ServerAsset *immich.Asset - LocalAsset *browser.LocalAssetFile -} - -func formatBytes(s int) string { - suffixes := []string{"B", "KB", "MB", "GB"} - bytes := float64(s) - base := 1024.0 - if bytes < base { - return fmt.Sprintf("%.0f %s", bytes, suffixes[0]) - } - exp := int64(0) - for bytes >= base && exp < int64(len(suffixes)-1) { - bytes /= base - exp++ - } - roundedSize := math.Round(bytes*10) / 10 - return fmt.Sprintf("%.1f %s", roundedSize, suffixes[exp]) -} - -func (ai *AssetIndex) adviceSameOnServer(sa *immich.Asset) *Advice { - return &Advice{ - Advice: SameOnServer, - Message: fmt.Sprintf("An asset with the same name:%q, date:%q and size:%s exists on the server. No need to upload.", sa.OriginalFileName, sa.ExifInfo.DateTimeOriginal.Format(time.DateTime), formatBytes(sa.ExifInfo.FileSizeInByte)), - ServerAsset: sa, - } -} - -func (ai *AssetIndex) adviceSmallerOnServer(sa *immich.Asset) *Advice { - return &Advice{ - Advice: SmallerOnServer, - Message: fmt.Sprintf("An asset with the same name:%q and date:%q but with smaller size:%s exists on the server. Replace it.", sa.OriginalFileName, sa.ExifInfo.DateTimeOriginal.Format(time.DateTime), formatBytes(sa.ExifInfo.FileSizeInByte)), - ServerAsset: sa, - } -} - -func (ai *AssetIndex) adviceBetterOnServer(sa *immich.Asset) *Advice { - return &Advice{ - Advice: BetterOnServer, - Message: fmt.Sprintf("An asset with the same name:%q and date:%q but with bigger size:%s exists on the server. No need to upload.", sa.OriginalFileName, sa.ExifInfo.DateTimeOriginal.Format(time.DateTime), formatBytes(sa.ExifInfo.FileSizeInByte)), - ServerAsset: sa, - } -} - -func (ai *AssetIndex) adviceNotOnServer() *Advice { - return &Advice{ - Advice: NotOnServer, - Message: "This a new asset, upload it.", - } -} - -// ShouldUpload check if the server has this asset -// -// The server may have different assets with the same name. This happens with photos produced by digital cameras. -// The server may have the asset, but in lower resolution. Compare the taken date and resolution - -func (ai *AssetIndex) ShouldUpload(la *browser.LocalAssetFile) (*Advice, error) { - filename := la.Title - if path.Ext(filename) == "" { - filename += path.Ext(la.FileName) - } - - ID := la.DeviceAssetID() - - sa := ai.byID[ID] - if sa != nil { - // the same ID exist on the server - return ai.adviceSameOnServer(sa), nil - } - - var l []*immich.Asset - - // check all files with the same name - - n := filepath.Base(filename) - l = ai.byName[n] - if len(l) == 0 { - // n = strings.TrimSuffix(n, filepath.Ext(n)) - l = ai.byName[n] - } - - if len(l) > 0 { - dateTaken := la.Metadata.DateTaken - size := int(la.Size()) - - for _, sa = range l { - compareDate := compareDate(dateTaken, sa.ExifInfo.DateTimeOriginal.Time) - compareSize := size - sa.ExifInfo.FileSizeInByte - - switch { - case compareDate == 0 && compareSize == 0: - return ai.adviceSameOnServer(sa), nil - case compareDate == 0 && compareSize > 0: - return ai.adviceSmallerOnServer(sa), nil - case compareDate == 0 && compareSize < 0: - return ai.adviceBetterOnServer(sa), nil - } - } - } - return ai.adviceNotOnServer(), nil -} - -func compareDate(d1 time.Time, d2 time.Time) int { - diff := d1.Sub(d2) - - switch { - case diff < -5*time.Minute: - return -1 - case diff >= 5*time.Minute: - return +1 - } - return 0 -} diff --git a/cpu.prof b/cpu.prof deleted file mode 100644 index 829e26ee..00000000 Binary files a/cpu.prof and /dev/null differ diff --git a/docs/environment.md b/docs/environment.md new file mode 100644 index 00000000..52d4f5a6 --- /dev/null +++ b/docs/environment.md @@ -0,0 +1,9 @@ +# Immich-go environment variables + +The following environment variables can be set to configure the behavior of Immich-go: + + +| Environment Variable | Description | +| -------------------- | --------------------------------------------------------------------------------------------------------------------------------- | +| `IMMICHGO_SERVER` | Immich server URL with the format `http://:`. Example: `https://mynas:2283` | +| `IMMICHGO_APIKEY` | Immich API key. Check the [documentation](https://immich.app/docs/features/command-line-interface#obtain-the-api-key) to get one. | diff --git a/docs/generate.go b/docs/generate.go new file mode 100644 index 00000000..93f80485 --- /dev/null +++ b/docs/generate.go @@ -0,0 +1,30 @@ +package main + +import ( + "context" + "fmt" + + "github.com/simulot/immich-go/app" + "github.com/simulot/immich-go/app/cmd" + "github.com/spf13/cobra" + "github.com/spf13/cobra/doc" +) + +/* Generate documentation for the command */ + +func main() { + ctx := context.Background() + c := &cobra.Command{ + Use: "immich-go", + Short: "Immich-go is a command line application to interact with the Immich application using its API", + Long: `An alternative to the immich-CLI command that doesn't depend on nodejs installation. It tries its best for importing google photos takeout archives.`, + Version: app.Version, + } + cobra.EnableTraverseRunHooks = true // doc: cobra/site/content/user_guide.md + a := app.New(ctx, c) + + // add immich-go commands + c.AddCommand(app.NewVersionCommand(ctx, a)) + cmd.AddCommands(c, ctx, a) + fmt.Println(doc.GenMarkdownTree(c, "../docs")) +} diff --git a/docs/google-takeout.md b/docs/google-takeout.md index c7ceabcf..ace36edf 100644 --- a/docs/google-takeout.md +++ b/docs/google-takeout.md @@ -84,6 +84,32 @@ takeout-20240712T112341Z-010.zip: | Russian | Google Фото | метаданные.json | | + + +## The "-edited" suffix in different languages +TODO: verify the translations + +Here is the list of translations for the "-edited" suffix in the requested languages presented in a table: + +| Language | Translation | Confirmed | +| ---------- | ---------------- | --------- | +| Spanish | -editado | [ ] | +| French | -modifié | [X] | +| German | -bearbeitet | [X] | +| Italian | -modificato | [X] | +| Portuguese | -editado | [ ] | +| Russian | -отредактировано | [ ] | +| Chinese | -编辑 | [ ] | +| Japanese | -編集済み | [ ] | +| Korean | -편집됨 | [ ] | +| Dutch | -bewerkt | [ ] | +| Finnish | -muokattu | [ ] | +| Ukrainian | -відредаговано | [ ] | +| Polish | -edytowane | [ ] | +| Danish | -redigeret | [ ] | +| Slovak | -upravené | [ ] | + + # What if I have problems with a takeout archive? Please open an issue with details. You can share your files using Discord DM `@simulot`. I'll check if I can improve the program. diff --git a/docs/refactoring.md b/docs/refactoring.md new file mode 100644 index 00000000..68a16d3f --- /dev/null +++ b/docs/refactoring.md @@ -0,0 +1,34 @@ +# Refactoring + +One year later, the necessity to refactor the code is obvious: +- spaghetti code +- poor adherence to single responsibility rule +- new requests from the users + + +## For better architecture + +- adapters: adapters for reading a writing photo collection +- cmd: immich-go commands +- immich: immich client +- internal: better not look inside + + +## Refactoring of the command line + +### Use of linux traditional command-line options with a double-dash `--option` + + + +### Reorganization of the commands + +immich-go [global flags] command sub-command [flags] arguments + +ex: +immich-go --log-file=file import from-folder --server=xxxx --key=qqqqq --folder-as-album-name=PATH path/to/photos + + +## Better logging + + + diff --git a/docs/releases.md b/docs/releases.md index 71a29a33..540054b4 100644 --- a/docs/releases.md +++ b/docs/releases.md @@ -5,6 +5,304 @@ - [Github Sponsor page](https://github.com/sponsors/simulot) - [paypal donor page](https://www.paypal.com/donate/?hosted_button_id=VGU2SQE88T2T4) +## Release v0.23.0-alpha6 🏗️ Work in progress 🏗️ + +### New features + +**Folder import tags** +Its now possible to assign tags to photos and videos: +```sh +--folder-as-tags Use the folder structure as tags, (ex: the file "holidays/summer 2024/file.jpg" get the tag holidays/summer 2024) +--session-tag Tag uploaded photos with a tag "{immich-go}/YYYY-MM-DD HH-MM-SS" +--tag strings Add tags to the imported assets. Can be specified multiple times. Hierarchy is supported using a / separator (e.g. 'tag1/subtag1') +``` + +The session tag is useful to identify all photos imported at the same time. It's easy to remove them from the tag screen + +**Google photos import tags** + +```sh +--takeout-tag Tag uploaded photos with the takeout file name: "{takeout}/takeout-YYYYMMDDTHHMMSSZ" +--session-tag Tag uploaded photos with a tag "{immich-go}/YYYY-MM-DD HH-MM-SS" +--tag strings Add tags to the imported assets. Can be specified multiple times. Hierarchy is supported using a / separator (e.g. 'tag1/subtag1') +``` + + +#### Breaking change since v0.23.0-alpha5 +A metadata file is created withe same name as the main file, but with the extension `.json`. The XMP file is left untouched. + + +### Fixes +* [#533](https://github.com/simulot/immich-go/issues/533) RAW file metadata +The efforts for determining the capture date from the file name are useless. +Now the file date is provided to Immich as if the file was dropped on the immich's page. +The `--capture-date-method` is now set to `NONE` by default. +* [[#534](https://github.com/simulot/immich-go/issues/534)] Errors on windows + + +## Release 0.23.0-alpha5 🏗️ Work in progress 🏗️ + +### New features + +##### The command `archive --from-immich` archives the user content from an Immich into a folder structure + +```sh +Archive photos from Immich + +Usage: + immich-go archive from-immich [from-flags] [flags] + +Flags: + --from-album strings Get assets only from those albums, can be used multiple times + --from-api-key string API Key + --from-api-trace Enable trace of api calls + --from-client-timeout duration Set server calls timeout (default 5m0s) + --from-date-range date-range Get assets only within this date range (fromat: YYYY[-MM[-DD[,YYYY-MM-DD]]]) (default unset) + --from-server string Immich server address (example http://your-ip:2283 or https://your-domain) + --from-skip-verify-ssl Skip SSL verification + -h, --help help for from-immich + +Global Flags: + -l, --log-file string Write log messages into the file + --log-level string Log level (DEBUG|INFO|WARN|ERROR), default INFO (default "INFO") + --log-type string Log formatted as text of JSON file (default "text") + -w, --write-to-folder string Path where to write the archive +``` +Comming soon: +--minimal-rating +--from-favorite +--from-trashed +--from-archived + +##### The command `upload --from-immich` uploads the user's content from another Immich +This command accepts the same flags as the `archive --from-immich` command. +It preserves albums and tags from the source Immich. + + + +## Release 0.23.0-alpha4 🏗️ Work in progress 🏗️ + +### New features + +#### New command `archive` +This command aims is to store photos and videos into a plain folder structure. The folder structure is YYYY/YYYY-MM/files, as following: + +```sh +tree . +. +├── 2011 +│   └── 2011-04 +│   ├── 20110430.CR2 +│   ├── 20110430.CR2.xmp +│   ├── 20110430.jpg +│   ├── 20110430.jpg.xmp +│   ├── IMG_2477.CR2 +│   ├── IMG_2477.CR2.xmp +│   ├── IMG_2478.CR2 +│   ├── IMG_2478.CR2.xmp +│   ├── IMG_2479.CR2 +│   └── IMG_2479.CR2.xmp +└── 2023 + ├── 2023-06 + │   ├── PXL_20230607_063000139.jpg + │   └── PXL_20230607_063000139.jpg.xmp + └── 2023-10 + ├── PXL_20231006_063029647.jpg + ├── PXL_20231006_063029647.jpg.xmp + ├── PXL_20231006_063851485.jpg + └── PXL_20231006_063851485.jpg.xmp +``` + +XMP files present in the source folder are copied in the destination folder. +Google Photos takeout JSON files are translated into customized XMP files and copied in the destination folder. +Those XMP files use a custom schema to store the Google Photos metadata: +```xml + + + + + + This is a title + 2023-10-10T01:11:00.000-04:00 + False + False + False + True + 3 + + + + + Vacation 2024 + Vacation 2024 hawaii and more + 19,49.23661N + 155,28.39525W + + + + + + + + + +``` + + + +The general syntax is: +```sh +.\immich-go archive from-xxx [from-xxx flags...] --write-to-folder +``` + +##### The command `archive --from-google-photos` archives a Google Photos takeout into a folder structure + +This command create a folder structure in `/path/to/destination` with the result of the takeout analysis. +The resulting folder structure can be re-imported into immich-go with the command `upload from-google-photo path/to/archived-folder`. + +##### The command `archive --from-` archives a Google Photos takeout into a folder structure + +Example: +```sh +.\immich-go archive from-google-photos --include-partner --write-to-folder /path/to/destination /path/to/takeout*.zip +``` + +Coming soon: +- archiving an immich server into a folder. + + +#### Handling of scanned photos by Epson FastFoto +- `--manage-epson-fastfoto` Manage Epson FastFoto file (default: false) +
Group scanned photos in stacks + - Scan_0001.jpg Original photo + - Scan_0001_a.jpg Enhanced photo, the cover of the stack + - Scan_0001_b.jpg Back of the photo + +## Release 0.23.0-alpha3 🏗️ Work in progress 🏗️ + +### New features + +- `--manage-burst=BurstFlag` Manage burst photos. Possible values are: + - `StackKeepRaw` Discard JPEG files, and stack the RAW files (default) + - `StackKeepJPEG` Discard RAW files, and stack the JPEG files + - `Stack` Stack all photos, RAW and JPEG photos are imported in the same stack + +- `--manage-heic-jpeg=HeicJpgFlag` Manage coupled HEIC and JPEG files. Possible values: + - `KeepHeic` Keep only the HEIC files (default) + - `KeepJPG` Keep only the JPEG files + - `StackCoverHeic` Stack both, the HEIC file is the cover + - `StackCoverJPG` Stack both, the JPEG file is the cover + +- `--manage-raw-jpeg=RawJPGFlag` Manage coupled RAW and JPEG files. Possible values: + - `KeepRaw` Keep only the RAW files (default) + - `KeepJPG` Keep only the JPEG files + - `StackCoverRaw` Stack both, the RAW file is the cover + - `StackCoverJPG` Stack both, the JPEG file is the cover + + +## Release 0.23.0-alpha2 🏗️ Work in progress 🏗️ +This an early version of immich-go version v0.23.0-alpha2 +Yes, v0.23.0-alpha2, and not v1.0.0-alpha2. Let's stick to the semantic versioning. + +- [x] better logging + - log level are effectives + - adoption of the structured log package + - the level DEBUG give file details and metadata + - colored log on screen +- [x] clear separation between folder import and google import +- [x] adoption of the linux convention of double dashes flags +- [x] priority of EXIF data over file name for date capture +- code restructuration to enable further possibilities + - [ ] Upload from Picasa + - [ ] Exporting of google photos archive as a folder + +### Big changes for the best + +The toy project has grown up. The code has been refactored to be more modular and to allow further development. The code is now more readable and maintainable. This opens the door to new features and new import possibilities. The down side is that the code is not backward compatible. The command line options have changed. + + +### Upload from folder options +``` +Upload photos from a folder + +Usage: + immich-go upload from-folder [flags] ... + +Flags: + --album-path-joiner string Specify a string to use when joining multiple folder names to create an album name (e.g. ' ',' - ') (default " / ") + --ban-file FileList Exclude a file based on a pattern (case-insensitive). Can be specified multiple times. (default '@eaDir/', '@__thumb/', 'SYNOFILE_THUMB_*.*', 'Lightroom Catalog/', 'thumbnails/', '.DS_Store/') + --capture-date-method DateMethod Specify the method to determine the capture date when not provided in a sidecar file. Options: NONE (do not attempt to determine), FILENAME (extract from filename), EXIF (extract from EXIF metadata), FILENAME-EXIF (try filename first, then EXIF), EXIF-FILENAME (try EXIF first, then filename) (default EXIF-FILENAME) + --date-range date-range Only import photos taken within the specified date range (default unset) + --exclude-extensions ExtensionList Comma-separated list of extension to exclude. (e.g. .gif,.PM) (default: none) + --exiftool-enabled Enable the use of the external 'exiftool' program (if installed and available in the system path) to extract EXIF metadata + --exiftool-path string Path to the ExifTool executable (default: search in system's PATH) + --exiftool-timezone timezone Timezone to use when parsing exif timestamps without timezone Options: LOCAL (use the system's local timezone), UTC (use UTC timezone), or a valid timezone name (e.g. America/New_York) (default Local) + --filename-timezone timezone Specify the timezone to use when detecting the date from the filename. Options: Local (use the system's local timezone), UTC (use UTC timezone), or a valid timezone name (e.g. America/New_York) (default Local) + --folder-as-album folderMode Import all files in albums defined by the folder structure. Can be set to 'FOLDER' to use the folder name as the album name, or 'PATH' to use the full path as the album name (default NONE) + -h, --help help for from-folder + --ignore-sidecar-files Don't upload sidecar with the photo. + --include-extensions ExtensionList Comma-separated list of extension to include. (e.g. .jpg,.heic) (default: all) + --into-album string Specify an album to import all files into + --recursive Explore the folder and all its sub-folders (default true) + +Global Flags: + --api string Immich api endpoint (example http://container_ip:3301) + -k, --api-key string API Key + --api-trace Enable trace of api calls + --client-timeout duration Set server calls timeout (default 5m0s) + --device-uuid string Set a device UUID (default "gl65") + --dry-run Simulate all actions + -l, --log-file string Write log messages into the file + --log-level string Log level (DEBUG|INFO|WARN|ERROR), default INFO (default "INFO") + --log-type string Log formatted as text of JSON file (default "text") + --no-ui Disable the user interface + -s, --server string Immich server address (example http://your-ip:2283 or https://your-domain) + --skip-verify-ssl Skip SSL verification + --time-zone string Override the system time zone +``` + +### Upload from a google-photos +``` +Upload photos either from a zipped Google Photos takeout or decompressed archive + +Usage: + immich-go upload from-google-photos [flags] | + +Flags: + --ban-file FileList Exclude a file based on a pattern (case-insensitive). Can be specified multiple times. + --date-range date-range Only import photos taken within the specified date range (default unset) + --exclude-extensions ExtensionList Comma-separated list of extension to exclude. (e.g. .gif,.PM) (default: none) + --from-album-name string Only import photos from the specified Google Photos album + -h, --help help for from-google-photos + -a, --include-archived Import archived Google Photos (default true) + --include-extensions ExtensionList Comma-separated list of extension to include. (e.g. .jpg,.heic) (default: all) + -p, --include-partner Import photos from your partner's Google Photos account (default true) + -t, --include-trashed Import photos that are marked as trashed in Google Photos + -u, --include-unmatched Import photos that do not have a matching JSON file in the takeout + --include-untitled-albums Include photos from albums without a title in the import process + --partner-shared-album string Add partner's photo to the specified album name + --sync-albums Automatically create albums in Immich that match the albums in your Google Photos takeout (default true) + +Global Flags: + --api string Immich api endpoint (example http://container_ip:3301) + -k, --api-key string API Key + --api-trace Enable trace of api calls + --client-timeout duration Set server calls timeout (default 5m0s) + --device-uuid string Set a device UUID (default "gl65") + --dry-run Simulate all actions + -l, --log-file string Write log messages into the file + --log-level string Log level (DEBUG|INFO|WARN|ERROR), default INFO (default "INFO") + --log-type string Log formatted as text of JSON file (default "text") + --no-ui Disable the user interface + -s, --server string Immich server address (example http://your-ip:2283 or https://your-domain) + --skip-verify-ssl Skip SSL verification + --time-zone string Override the system time zone +``` + + +## Release 0.23.0-alpha1 🏗️ Work in progress 🏗️ + + ## Release 0.22.1 ### Fixes: diff --git a/go.mod b/go.mod index f7424d00..a7529777 100644 --- a/go.mod +++ b/go.mod @@ -1,8 +1,6 @@ module github.com/simulot/immich-go -go 1.22 - -toolchain go1.22.5 +go 1.23 require ( github.com/gdamore/tcell/v2 v2.7.4 @@ -10,28 +8,55 @@ require ( github.com/joho/godotenv v1.5.1 github.com/kr/pretty v0.3.1 github.com/melbahja/goph v1.4.0 - github.com/navidys/tvxwidgets v0.7.0 + github.com/navidys/tvxwidgets v0.9.0 + github.com/phsym/console-slog v0.3.1 github.com/psanford/memfs v0.0.0-20230130182539-4dbf7e3e865e - github.com/rivo/tview v0.0.0-20240616192244-23476fa0bab2 + github.com/rivo/tview v0.0.0-20241103174730-c76f7879f592 github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd - github.com/telemachus/humane v0.6.0 + github.com/samber/slog-multi v1.2.4 + github.com/spf13/cobra v1.8.1 github.com/thlib/go-timezone-local v0.0.3 github.com/ttacon/chalk v0.0.0-20160626202418-22c06c80ed31 - golang.org/x/sync v0.8.0 + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f + golang.org/x/sync v0.9.0 +) + +require ( + github.com/cpuguy83/go-md2man/v2 v2.0.4 // indirect + github.com/fsnotify/fsnotify v1.8.0 // indirect + github.com/hashicorp/hcl v1.0.0 // indirect + github.com/magiconair/properties v1.8.7 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/pelletier/go-toml/v2 v2.2.3 // indirect + github.com/russross/blackfriday/v2 v2.1.0 // indirect + github.com/sagikazarmark/locafero v0.6.0 // indirect + github.com/sagikazarmark/slog-shim v0.1.0 // indirect + github.com/samber/lo v1.47.0 // indirect + github.com/sourcegraph/conc v0.3.0 // indirect + github.com/spf13/afero v1.11.0 // indirect + github.com/spf13/cast v1.7.0 // indirect + github.com/subosito/gotenv v1.6.0 // indirect + go.uber.org/multierr v1.11.0 // indirect + gopkg.in/ini.v1 v1.67.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect ) require ( - github.com/gdamore/encoding v1.0.0 // indirect + github.com/clbanning/mxj/v2 v2.7.0 + github.com/gdamore/encoding v1.0.1 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/kr/fs v0.1.0 // indirect github.com/kr/text v0.2.0 // indirect github.com/lucasb-eyer/go-colorful v1.2.0 // indirect - github.com/mattn/go-runewidth v0.0.15 // indirect + github.com/mattn/go-runewidth v0.0.16 // indirect github.com/pkg/errors v0.9.1 // indirect - github.com/pkg/sftp v1.13.6 // indirect + github.com/pkg/sftp v1.13.7 // indirect github.com/rivo/uniseg v0.4.7 // indirect github.com/rogpeppe/go-internal v1.9.0 // indirect - golang.org/x/crypto v0.17.0 // indirect - golang.org/x/sys v0.20.0 // indirect - golang.org/x/term v0.20.0 // indirect - golang.org/x/text v0.15.0 // indirect + github.com/spf13/pflag v1.0.5 + github.com/spf13/viper v1.19.0 + golang.org/x/crypto v0.29.0 // indirect + golang.org/x/sys v0.27.0 // indirect + golang.org/x/term v0.26.0 // indirect + golang.org/x/text v0.20.0 // indirect ) diff --git a/go.sum b/go.sum index 501ed337..3b34edc5 100644 --- a/go.sum +++ b/go.sum @@ -1,22 +1,36 @@ +github.com/clbanning/mxj/v2 v2.7.0 h1:WA/La7UGCanFe5NpHF0Q3DNtnCsVoxbPKuyBNHWRyME= +github.com/clbanning/mxj/v2 v2.7.0/go.mod h1:hNiWqW14h+kc+MdF9C6/YoRfjEJoR3ou6tn/Qo+ve2s= +github.com/cpuguy83/go-md2man/v2 v2.0.4 h1:wfIWP927BUkWJb2NmU/kNDYIBTh/ziUX91+lVfRxZq4= +github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/gdamore/encoding v1.0.0 h1:+7OoQ1Bc6eTm5niUzBa0Ctsh6JbMW6Ra+YNuAtDBdko= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= +github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= +github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M= +github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= github.com/gdamore/encoding v1.0.0/go.mod h1:alR0ol34c49FCSBLjhosxzcPHQbf2trDkoo5dl+VrEg= +github.com/gdamore/encoding v1.0.1 h1:YzKZckdBL6jVt2Gc+5p82qhrGiqMdG/eNs6Wy0u3Uhw= +github.com/gdamore/encoding v1.0.1/go.mod h1:0Z0cMFinngz9kS1QfMjCP8TY7em3bZYeeklsSDPivEo= github.com/gdamore/tcell/v2 v2.7.4 h1:sg6/UnTM9jGpZU+oFYAsDahfchWAFW8Xx2yFinNSAYU= github.com/gdamore/tcell/v2 v2.7.4/go.mod h1:dSXtXTSK0VsW1biw65DZLZ2NKr7j0qP/0J7ONmsraWg= -github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= -github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/pprof v0.0.0-20240424215950-a892ee059fd6 h1:k7nVchz72niMH6YLQNvHSdIE7iqsQxK1P41mySCvssg= -github.com/google/pprof v0.0.0-20240424215950-a892ee059fd6/go.mod h1:kf6iHlnVGwgKolg33glAes7Yg/8iWP8ukqeldJSO7jw= +github.com/google/pprof v0.0.0-20240827171923-fa2c70bbbfe5 h1:5iH8iuqE5apketRbSFBy+X1V0o+l+8NF1avt4HWl7cA= +github.com/google/pprof v0.0.0-20240827171923-fa2c70bbbfe5/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8= @@ -27,71 +41,108 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= -github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= +github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= +github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= +github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/melbahja/goph v1.4.0 h1:z0PgDbBFe66lRYl3v5dGb9aFgPy0kotuQ37QOwSQFqs= github.com/melbahja/goph v1.4.0/go.mod h1:uG+VfK2Dlhk+O32zFrRlc3kYKTlV6+BtvPWd/kK7U68= -github.com/navidys/tvxwidgets v0.7.0 h1:ls5tikzqXnsHwAAV/8zwnRwx/DvSybepUih9txkwjwE= -github.com/navidys/tvxwidgets v0.7.0/go.mod h1:hzFnllDl4o2Ten/67T0F8ZgC1NiLrZYqWxLVjxWu+zo= -github.com/onsi/ginkgo/v2 v2.19.0 h1:9Cnnf7UHo57Hy3k6/m5k3dRfGTMXGvxhHFvkDTCTpvA= -github.com/onsi/ginkgo/v2 v2.19.0/go.mod h1:rlwLi9PilAFJ8jCg9UE1QP6VBpd6/xj3SRC0d6TU0To= -github.com/onsi/gomega v1.33.1 h1:dsYjIxxSR755MDmKVsaFQTE22ChNBcuuTWgkUDSubOk= -github.com/onsi/gomega v1.33.1/go.mod h1:U4R44UsT+9eLIaYRB2a5qajjtQYn0hauxvRm16AVYg0= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/navidys/tvxwidgets v0.9.0 h1:0+VfFdgF7GE7n5KohsP7OQv3t92F+fLLVfT7TXNNpTs= +github.com/navidys/tvxwidgets v0.9.0/go.mod h1:k7nn+EZ3SxPJX0w0d94onfadAugUTwHLyujyfI1DTxE= +github.com/onsi/ginkgo/v2 v2.20.2 h1:7NVCeyIWROIAheY21RLS+3j2bb52W0W82tkberYytp4= +github.com/onsi/ginkgo/v2 v2.20.2/go.mod h1:K9gyxPIlb+aIvnZ8bd9Ak+YP18w3APlR+5coaZoE2ag= +github.com/onsi/gomega v1.34.2 h1:pNCwDkzrsv7MS9kpaQvVb1aVLahQXyJ/Tv5oAZMI3i8= +github.com/onsi/gomega v1.34.2/go.mod h1:v1xfxRgk0KIsG+QOdm7p8UosrOzPYRo60fd3B/1Dukc= +github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M= +github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc= +github.com/phsym/console-slog v0.3.1 h1:Fuzcrjr40xTc004S9Kni8XfNsk+qrptQmyR+wZw9/7A= +github.com/phsym/console-slog v0.3.1/go.mod h1:oJskjp/X6e6c0mGpfP8ELkfKUsrkDifYRAqJQgmdDS0= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/sftp v1.13.5/go.mod h1:wHDZ0IZX6JcBYRK1TH9bcVq8G7TLpVHYIGJRFnmPfxg= -github.com/pkg/sftp v1.13.6 h1:JFZT4XbOU7l77xGSpOdW+pwIMqP044IyjXX6FGyEKFo= -github.com/pkg/sftp v1.13.6/go.mod h1:tz1ryNURKu77RL+GuCzmoJYxQczL3wLNNpPWagdg4Qk= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pkg/sftp v1.13.7 h1:uv+I3nNJvlKZIQGSr8JVQLNHFU9YhhNpvC14Y6KgmSM= +github.com/pkg/sftp v1.13.7/go.mod h1:KMKI0t3T6hfA+lTR/ssZdunHo+uwq7ghoN09/FSu3DY= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/psanford/memfs v0.0.0-20230130182539-4dbf7e3e865e h1:51xcRlSMBU5rhM9KahnJGfEsBPVPz3182TgFRowA8yY= github.com/psanford/memfs v0.0.0-20230130182539-4dbf7e3e865e/go.mod h1:tcaRap0jS3eifrEEllL6ZMd9dg8IlDpi2S1oARrQ+NI= -github.com/rivo/tview v0.0.0-20240616192244-23476fa0bab2 h1:LXMiBMxtuXw8e2paN61dI2LMp8JZYyH4UXDwssRI3ys= -github.com/rivo/tview v0.0.0-20240616192244-23476fa0bab2/go.mod h1:02iFIz7K/A9jGCvrizLPvoqr4cEIx7q54RH5Qudkrss= +github.com/rivo/tview v0.0.0-20241103174730-c76f7879f592 h1:YIJ+B1hePP6AgynC5TcqpO0H9k3SSoZa2BGyL6vDUzM= +github.com/rivo/tview v0.0.0-20241103174730-c76f7879f592/go.mod h1:02iFIz7K/A9jGCvrizLPvoqr4cEIx7q54RH5Qudkrss= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.3/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd h1:CmH9+J6ZSsIjUK3dcGsnCnO41eRBOnY12zwkn5qVwgc= github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk= +github.com/sagikazarmark/locafero v0.6.0 h1:ON7AQg37yzcRPU69mt7gwhFEBwxI6P9T4Qu3N51bwOk= +github.com/sagikazarmark/locafero v0.6.0/go.mod h1:77OmuIc6VTraTXKXIs/uvUxKGUXjE1GbemJYHqdNjX0= +github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE= +github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= +github.com/samber/lo v1.47.0 h1:z7RynLwP5nbyRscyvcD043DWYoOcYRv3mV8lBeqOCLc= +github.com/samber/lo v1.47.0/go.mod h1:RmDH9Ct32Qy3gduHQuKJ3gW1fMHAnE/fAzQuf6He5cU= +github.com/samber/slog-multi v1.2.4 h1:k9x3JAWKJFPKffx+oXZ8TasaNuorIW4tG+TXxkt6Ry4= +github.com/samber/slog-multi v1.2.4/go.mod h1:ACuZ5B6heK57TfMVkVknN2UZHoFfjCwRxR0Q2OXKHlo= +github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= +github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= +github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= +github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= +github.com/spf13/cast v1.7.0 h1:ntdiHjuueXFgm5nzDRdOS4yfT43P5Fnud6DH50rz/7w= +github.com/spf13/cast v1.7.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= +github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= +github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI= +github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+Ntkg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/telemachus/humane v0.6.0 h1:JNT5SWeg8pOHTRo3STy24E247LpQYBy2vxD2HwYwyvU= -github.com/telemachus/humane v0.6.0/go.mod h1:T2XzA97m+JPk/WDe9VHamk/JOArXlOy4jlIGDKte3ic= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= +github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= github.com/thlib/go-timezone-local v0.0.3 h1:ie5XtZWG5lQ4+1MtC5KZ/FeWlOKzW2nPoUnXYUbV/1s= github.com/thlib/go-timezone-local v0.0.3/go.mod h1:/Tnicc6m/lsJE0irFMA0LfIwTBo4QP7A8IfyIv4zZKI= github.com/ttacon/chalk v0.0.0-20160626202418-22c06c80ed31 h1:OXcKh35JaYsGMRzpvFkLv/MEyPuL49CThT1pZ8aSml4= github.com/ttacon/chalk v0.0.0-20160626202418-22c06c80ed31/go.mod h1:onvgF043R+lC5RZ8IT9rBXDaEDnpnw/Cl+HFiw+v/7Q= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= +go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= -golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ= +golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac= -golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE= +golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -100,36 +151,42 @@ golang.org/x/sys v0.0.0-20210831042530-f4d43177bf5e/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y= -golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= -golang.org/x/term v0.20.0 h1:VnkxpohqXaOBYJtBmEppKUG6mXpi+4O6purfc2+sMhw= -golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= +golang.org/x/term v0.26.0 h1:WEQa6V3Gja/BhNxg540hBip/kkaYtRg3cxg4oXSw4AU= +golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk= -golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= +golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/tools v0.21.0 h1:qc0xYgIbsSDt9EyWz05J5wfa7LOVW0YTLOXrqdLAWIw= -golang.org/x/tools v0.21.0/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= +gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/helpers/fshelper/extensions.nongo b/helpers/fshelper/extensions.nongo deleted file mode 100644 index 3e7b69fe..00000000 --- a/helpers/fshelper/extensions.nongo +++ /dev/null @@ -1,111 +0,0 @@ -package fshelper - -import ( - "fmt" - "slices" - "strings" - - "github.com/simulot/immich-go/helpers/gen" -) - -// List from immich code: -// https://github.com/immich-app/immich/blob/8d5bf933601a3f2787a78c40e4c11862b96566e0/server/src/domain/domain.constant.ts#L26C17-L89C3 - -var supportedExtensionsAndMime = map[string][]string{ - ".3fr": {"image/3fr", "image/x-hasselblad-3fr"}, - ".ari": {"image/ari", "image/x-arriflex-ari"}, - ".arw": {"image/arw", "image/x-sony-arw"}, - ".avif": {"image/avif"}, - ".cap": {"image/cap", "image/x-phaseone-cap"}, - ".cin": {"image/cin", "image/x-phantom-cin"}, - ".cr2": {"image/cr2", "image/x-canon-cr2"}, - ".cr3": {"image/cr3", "image/x-canon-cr3"}, - ".crw": {"image/crw", "image/x-canon-crw"}, - ".dcr": {"image/dcr", "image/x-kodak-dcr"}, - ".dng": {"image/dng", "image/x-adobe-dng"}, - ".erf": {"image/erf", "image/x-epson-erf"}, - ".fff": {"image/fff", "image/x-hasselblad-fff"}, - ".gif": {"image/gif"}, - ".heic": {"image/heic"}, - ".heif": {"image/heif"}, - ".iiq": {"image/iiq", "image/x-phaseone-iiq"}, - ".insp": {"image/jpeg"}, - ".jpeg": {"image/jpeg"}, - ".jpg": {"image/jpeg"}, - ".jpe": {"image/jpeg"}, - ".jxl": {"image/jxl"}, - ".k25": {"image/k25", "image/x-kodak-k25"}, - ".kdc": {"image/kdc", "image/x-kodak-kdc"}, - ".mrw": {"image/mrw", "image/x-minolta-mrw"}, - ".nef": {"image/nef", "image/x-nikon-nef"}, - ".orf": {"image/orf", "image/x-olympus-orf"}, - ".ori": {"image/ori", "image/x-olympus-ori"}, - ".pef": {"image/pef", "image/x-pentax-pef"}, - ".png": {"image/png"}, - ".psd": {"image/psd", "image/vnd.adobe.photoshop"}, - ".raf": {"image/raf", "image/x-fuji-raf"}, - ".raw": {"image/raw", "image/x-panasonic-raw"}, - ".rw2": {"image/rw2', 'image/x-panasonic-rw2"}, - ".rwl": {"image/rwl", "image/x-leica-rwl"}, - ".sr2": {"image/sr2", "image/x-sony-sr2"}, - ".srf": {"image/srf", "image/x-sony-srf"}, - ".srw": {"image/srw", "image/x-samsung-srw"}, - ".tif": {"image/tiff"}, - ".tiff": {"image/tiff"}, - ".webp": {"image/webp"}, - ".x3f": {"image/x3f", "image/x-sigma-x3f"}, - - ".3gp": {"video/3gpp"}, - ".avi": {"video/avi", "video/msvideo", "video/vnd.avi", "video/x-msvideo"}, - ".flv": {"video/x-flv"}, - ".insv": {"video/mp4"}, - ".m2ts": {"video/mp2t"}, - ".m4v": {"video/x-m4v"}, - ".mkv": {"video/x-matroska"}, - ".mov": {"video/quicktime"}, - ".mp4": {"video/mp4"}, - ".mpg": {"video/mpeg"}, - ".mts": {"video/mp2t"}, - ".webm": {"video/webm"}, - ".wmv": {"video/x-ms-wmv"}, -} - -var supportedExtensions = gen.MapKeys(supportedExtensionsAndMime) - -// MimeFromExt return the mime type of the extension. Return an error is the extension is not handled by the server. -func MimeFromExt(ext string) ([]string, error) { - ext = strings.ToLower(ext) - if l, ok := supportedExtensionsAndMime[ext]; ok { - return l, nil - } - return nil, fmt.Errorf("unsupported extension %s", ext) -} - -// IsExtensionPrefix -// Check if the string is first part of an known extension as needed for Google Takeout - -func IsExtensionPrefix(ext string) bool { - ext = strings.ToLower(ext) - for _, e := range supportedExtensions { - if ext == e[:len(e)-1] { - return true - } - } - return false -} - -var ignoredExtensions = []string{ - ".html", ".mp", -} - -func IsIgnoredExt(ext string) bool { - return slices.Contains(ignoredExtensions, ext) -} - -var metaDataExtensions = []string{ - ".json", ".xmp", -} - -func IsMetadataExt(ext string) bool { - return slices.Contains(metaDataExtensions, ext) -} diff --git a/helpers/myflag/boolfn.go b/helpers/myflag/boolfn.go deleted file mode 100644 index 1bdf47c6..00000000 --- a/helpers/myflag/boolfn.go +++ /dev/null @@ -1,28 +0,0 @@ -package myflag - -import ( - "fmt" - "strconv" - "strings" -) - -// BoolFlagFn returns a convenient function for handling boolean option on CLI to be used as parameter of the flag.BoolFn. -// It works has the flag.BoolVar but, the presence of the flag, without value, set the flag to True - -func BoolFlagFn(b *bool, defaultValue bool) func(string) error { - *b = defaultValue - return func(v string) error { - switch strings.ToLower(v) { - case "": - *b = true - return nil - default: - var err error - *b, err = strconv.ParseBool(v) - if err != nil { - err = fmt.Errorf("can't parse the parameter value: %w", err) - } - return err - } - } -} diff --git a/helpers/myflag/boolfn_test.go b/helpers/myflag/boolfn_test.go deleted file mode 100644 index cd4e20a4..00000000 --- a/helpers/myflag/boolfn_test.go +++ /dev/null @@ -1,67 +0,0 @@ -package myflag - -import "testing" - -func Test_BoolFn(t *testing.T) { - tc := []struct { - name string - defaultValue bool - want bool - wantErr bool - }{ - { - name: "", - want: true, - defaultValue: true, - }, - { - name: "", - want: true, - defaultValue: false, - }, - { - name: "true", - want: true, - }, - { - name: "false", - want: false, - }, - { - name: "1", - want: true, - }, - { - name: "T", - want: true, - }, - { - name: "F", - want: false, - }, - { - name: "0", - want: false, - }, - { - name: "let's be affirmative", - want: false, - wantErr: true, - }, - } - for _, c := range tc { - t.Run(c.name, func(t *testing.T) { - var b bool - fn := BoolFlagFn(&b, c.defaultValue) - - err := fn(c.name) - if (err == nil && c.wantErr) || (err != nil && !c.wantErr) { - t.Errorf("fn(%q)=%v, expecting error: %v", c.name, err, c.wantErr) - return - } - if b != c.want { - t.Errorf("fn(%q) set b to %v, expecting: %v", c.name, b, c.want) - } - }) - } -} diff --git a/helpers/myflag/duration.go b/helpers/myflag/duration.go deleted file mode 100644 index f6f192ca..00000000 --- a/helpers/myflag/duration.go +++ /dev/null @@ -1,20 +0,0 @@ -package myflag - -import ( - "fmt" - "strings" - "time" -) - -func DurationFlagFn(flag *time.Duration, defaultValue time.Duration) func(string) error { - *flag = defaultValue - return func(v string) error { - v = strings.ToLower(v) - d, err := time.ParseDuration(v) - if err != nil { - return fmt.Errorf("can't parse the duration parameter: %w", err) - } - *flag = d - return nil - } -} diff --git a/immich/albums.go b/immich/albums.go index e4d0a7e9..51686fce 100644 --- a/immich/albums.go +++ b/immich/albums.go @@ -3,6 +3,9 @@ package immich import ( "context" "fmt" + + "github.com/google/uuid" + "github.com/simulot/immich-go/internal/assets" ) type AlbumSimplified struct { @@ -21,23 +24,39 @@ type AlbumSimplified struct { AssetIds []string `json:"assetIds,omitempty"` } -func (ic *ImmichClient) GetAllAlbums(ctx context.Context) ([]AlbumSimplified, error) { +func AlbumsFromAlbumSimplified(albums []AlbumSimplified) []assets.Album { + result := make([]assets.Album, 0, len(albums)) + for _, a := range albums { + result = append(result, assets.Album{ + ID: a.ID, + Title: a.AlbumName, + Description: a.Description, + }) + } + return result +} + +func (ic *ImmichClient) GetAllAlbums(ctx context.Context) ([]assets.Album, error) { var albums []AlbumSimplified - err := ic.newServerCall(ctx, EndPointGetAllAlbums).do(getRequest("/albums", setAcceptJSON()), responseJSON(&albums)) + err := ic.newServerCall(ctx, EndPointGetAllAlbums). + do( + getRequest("/albums", setAcceptJSON()), + responseJSON(&albums), + ) if err != nil { return nil, err } - return albums, nil + return AlbumsFromAlbumSimplified(albums), nil } type AlbumContent struct { ID string `json:"id,omitempty"` // OwnerID string `json:"ownerId"` - AlbumName string `json:"albumName"` - Description string `json:"description"` - Shared bool `json:"shared"` - Assets []AssetSimplified `json:"assets,omitempty"` - AssetIDs []string `json:"assetIds,omitempty"` + AlbumName string `json:"albumName"` + Description string `json:"description"` + Shared bool `json:"shared"` + Assets []*Asset `json:"assets,omitempty"` + AssetIDs []string `json:"assetIds,omitempty"` // CreatedAt time.Time `json:"createdAt"` // UpdatedAt time.Time `json:"updatedAt"` // AlbumThumbnailAssetID string `json:"albumThumbnailAssetId"` @@ -76,18 +95,20 @@ func (ic *ImmichClient) GetAlbumInfo(ctx context.Context, id string, withoutAsse query := id if withoutAssets { query += "?withoutAssets=true" + } else { + query += "?withoutAssets=false" } err := ic.newServerCall(ctx, EndPointGetAlbumInfo).do(getRequest("/albums/"+query, setAcceptJSON()), responseJSON(&album)) return album, err } -func (ic *ImmichClient) GetAssetsAlbums(ctx context.Context, id string) ([]AlbumSimplified, error) { +func (ic *ImmichClient) GetAssetsAlbums(ctx context.Context, id string) ([]assets.Album, error) { var albums []AlbumSimplified err := ic.newServerCall(ctx, EndPointGetAlbumInfo).do(getRequest("/albums", setAcceptJSON()), responseJSON(&albums)) if err != nil { return nil, err } - return albums, nil + return AlbumsFromAlbumSimplified(albums), nil } type UpdateAlbum struct { @@ -101,6 +122,9 @@ type UpdateAlbumResult struct { } func (ic *ImmichClient) AddAssetToAlbum(ctx context.Context, albumID string, assets []string) ([]UpdateAlbumResult, error) { + if ic.dryRun { + return []UpdateAlbumResult{}, nil + } var r []UpdateAlbumResult body := UpdateAlbum{ IDS: assets, @@ -115,7 +139,13 @@ func (ic *ImmichClient) AddAssetToAlbum(ctx context.Context, albumID string, ass return r, nil } -func (ic *ImmichClient) CreateAlbum(ctx context.Context, name string, description string, assetsIDs []string) (AlbumSimplified, error) { +func (ic *ImmichClient) CreateAlbum(ctx context.Context, name string, description string, assetsIDs []string) (assets.Album, error) { + if ic.dryRun { + return assets.Album{ + ID: uuid.NewString(), + Title: name, + }, nil + } body := AlbumContent{ AlbumName: name, Description: description, @@ -126,19 +156,26 @@ func (ic *ImmichClient) CreateAlbum(ctx context.Context, name string, descriptio postRequest("/albums", "application/json", setAcceptJSON(), setJSONBody(body)), responseJSON(&r)) if err != nil { - return AlbumSimplified{}, err + return assets.Album{}, err } - return r, nil + return assets.Album{ + ID: r.ID, + Title: r.AlbumName, + Description: r.Description, + }, nil } -func (ic *ImmichClient) GetAssetAlbums(ctx context.Context, id string) ([]AlbumSimplified, error) { +func (ic *ImmichClient) GetAssetAlbums(ctx context.Context, assetID string) ([]assets.Album, error) { var r []AlbumSimplified err := ic.newServerCall(ctx, EndPointGetAssetAlbums).do( - getRequest("/albums?assetId="+id, setAcceptJSON()), + getRequest("/albums?assetId="+assetID, setAcceptJSON()), responseJSON(&r)) - return r, err + return AlbumsFromAlbumSimplified(r), err } func (ic *ImmichClient) DeleteAlbum(ctx context.Context, id string) error { + if ic.dryRun { + return nil + } return ic.newServerCall(ctx, EndPointDeleteAlbum).do(deleteRequest("/albums/" + id)) } diff --git a/immich/asset.go b/immich/asset.go index 21090f5f..1350704b 100644 --- a/immich/asset.go +++ b/immich/asset.go @@ -14,7 +14,8 @@ import ( "strings" "time" - "github.com/simulot/immich-go/browser" + "github.com/google/uuid" + "github.com/simulot/immich-go/internal/assets" ) type AssetResponse struct { @@ -43,22 +44,32 @@ func formatDuration(duration time.Duration) string { return fmt.Sprintf("%02d:%02d:%02d.%06d", hours, minutes, seconds, milliseconds) } -func (ic *ImmichClient) AssetUpload(ctx context.Context, la *browser.LocalAssetFile) (AssetResponse, error) { +const ( + TimeFormat = "2006-01-02T15:04:05Z" +) + +func (ic *ImmichClient) AssetUpload(ctx context.Context, la *assets.Asset) (AssetResponse, error) { + if ic.dryRun { + return AssetResponse{ + ID: uuid.NewString(), + Status: UploadCreated, + }, nil + } var ar AssetResponse - ext := path.Ext(la.FileName) - if strings.TrimSuffix(la.Title, ext) == "" { - la.Title = "No Name" + ext // fix #88, #128 + ext := path.Ext(la.OriginalFileName) + if strings.TrimSuffix(la.OriginalFileName, ext) == "" { + la.OriginalFileName = "No Name" + ext // fix #88, #128 } if strings.ToUpper(ext) == ".MP" { ext = ".MP4" // #405 - la.Title = la.Title + ".MP4" + la.OriginalFileName = la.OriginalFileName + ".MP4" } mtype := ic.TypeFromExt(ext) switch mtype { case "video", "image": default: - return ar, fmt.Errorf("type file not supported: %s", path.Ext(la.FileName)) + return ar, fmt.Errorf("type file not supported: %s", path.Ext(la.OriginalFileName)) } f, err := la.Open() @@ -80,7 +91,7 @@ func (ic *ImmichClient) AssetUpload(ctx context.Context, la *browser.LocalAssetF return } - err = m.WriteField("deviceAssetId", fmt.Sprintf("%s-%d", path.Base(la.Title), s.Size())) + err = m.WriteField("deviceAssetId", fmt.Sprintf("%s-%d", path.Base(la.OriginalFileName), s.Size())) if err != nil { return } @@ -92,11 +103,16 @@ func (ic *ImmichClient) AssetUpload(ctx context.Context, la *browser.LocalAssetF if err != nil { return } - err = m.WriteField("fileCreatedAt", la.Metadata.DateTaken.Format(time.RFC3339)) + + if !la.CaptureDate.IsZero() { + err = m.WriteField("fileCreatedAt", la.CaptureDate.Format(TimeFormat)) + } else { + err = m.WriteField("fileCreatedAt", s.ModTime().Format(TimeFormat)) + } if err != nil { return } - err = m.WriteField("fileModifiedAt", s.ModTime().Format(time.RFC3339)) + err = m.WriteField("fileModifiedAt", s.ModTime().Format(TimeFormat)) if err != nil { return } @@ -120,17 +136,11 @@ func (ic *ImmichClient) AssetUpload(ctx context.Context, la *browser.LocalAssetF if err != nil { return } - if la.LivePhotoID != "" { - err = m.WriteField("livePhotoVideoId", la.LivePhotoID) - if err != nil { - return - } - } h := textproto.MIMEHeader{} h.Set("Content-Disposition", fmt.Sprintf(`form-data; name="%s"; filename="%s"`, - escapeQuotes("assetData"), escapeQuotes(path.Base(la.Title)))) + escapeQuotes("assetData"), escapeQuotes(path.Base(la.OriginalFileName)))) h.Set("Content-Type", mtype) var part io.Writer @@ -143,8 +153,8 @@ func (ic *ImmichClient) AssetUpload(ctx context.Context, la *browser.LocalAssetF return } - if la.SideCar.IsSet() { - scName := path.Base(la.FileName) + ".xmp" + if la.FromSideCar != nil && strings.ToLower(la.FromSideCar.File.Name()) == ".xmp" { + scName := path.Base(la.OriginalFileName) + ".xmp" h.Set("Content-Disposition", fmt.Sprintf(`form-data; name="%s"; filename="%s"`, escapeQuotes("sidecarData"), escapeQuotes(scName))) @@ -155,23 +165,13 @@ func (ic *ImmichClient) AssetUpload(ctx context.Context, la *browser.LocalAssetF if err != nil { return } - err = la.SideCar.Write(part) + defer f.Close() + f, err = la.FromSideCar.File.Open() if err != nil { return } - } else if la.Metadata.IsSet() { - scName := path.Base(la.FileName) + ".xmp" - h.Set("Content-Disposition", - fmt.Sprintf(`form-data; name="%s"; filename="%s"`, - escapeQuotes("sidecarData"), escapeQuotes(scName))) - h.Set("Content-Type", "application/xml") - var part io.Writer - part, err = m.CreatePart(h) - if err != nil { - return - } - err = la.Metadata.Write(part) + _, err = io.Copy(part, f) if err != nil { return } @@ -181,13 +181,10 @@ func (ic *ImmichClient) AssetUpload(ctx context.Context, la *browser.LocalAssetF var callValues map[string]string if ic.apiTraceWriter != nil { callValues = map[string]string{ - ctxAssetName: la.FileName, + ctxAssetName: la.File.Name(), } - if la.SideCar.IsSet() { - callValues[ctxSideCarName] = la.SideCar.FileName - } - if la.LivePhoto != nil { - callValues[ctxLiveVideoName] = la.LivePhoto.FileName + if la.FromSideCar != nil { + callValues[ctxSideCarName] = la.FromSideCar.File.Name() } } @@ -243,6 +240,9 @@ func (o *GetAssetOptions) Values() url.Values { } func (ic *ImmichClient) DeleteAssets(ctx context.Context, id []string, forceDelete bool) error { + if ic.dryRun { + return nil + } req := struct { Force bool `json:"force"` IDs []string `json:"ids"` @@ -254,14 +254,9 @@ func (ic *ImmichClient) DeleteAssets(ctx context.Context, id []string, forceDele return ic.newServerCall(ctx, "DeleteAsset").do(deleteRequest("/assets", setJSONBody(&req))) } -func (ic *ImmichClient) GetAssetByID(ctx context.Context, id string) (*Asset, error) { - body := struct { - WithExif bool `json:"withExif,omitempty"` - IsVisible bool `json:"isVisible,omitempty"` - ID string `json:"id"` - }{WithExif: true, IsVisible: true, ID: id} +func (ic *ImmichClient) GetAssetInfo(ctx context.Context, id string) (*Asset, error) { r := Asset{} - err := ic.newServerCall(ctx, "GetAssetByID").do(postRequest("/search/metadata", "application/json", setAcceptJSON(), setJSONBody(body)), responseJSON(&r)) + err := ic.newServerCall(ctx, "GetAssetInfo").do(getRequest("/assets/"+id, setAcceptJSON()), responseJSON(&r)) return &r, err } @@ -270,6 +265,9 @@ func (ic *ImmichClient) UpdateAssets(ctx context.Context, ids []string, latitude float64, longitude float64, removeParent bool, stackParentID string, ) error { + if ic.dryRun { + return nil + } type updAssets struct { IDs []string `json:"ids"` IsArchived bool `json:"isArchived"` @@ -292,31 +290,30 @@ func (ic *ImmichClient) UpdateAssets(ctx context.Context, ids []string, return ic.newServerCall(ctx, "updateAssets").do(putRequest("/assets", setJSONBody(param))) } -func (ic *ImmichClient) UpdateAsset(ctx context.Context, id string, a *browser.LocalAssetFile) (*Asset, error) { - type updAsset struct { - IsArchived bool `json:"isArchived"` - IsFavorite bool `json:"isFavorite"` - Latitude float64 `json:"latitude,omitempty"` - Longitude float64 `json:"longitude,omitempty"` - Description string `json:"description,omitempty"` - } - param := updAsset{ - IsArchived: a.Archived, - IsFavorite: a.Favorite, - Description: a.Metadata.Description, - Latitude: a.Metadata.Latitude, - Longitude: a.Metadata.Longitude, +// UpdAssetField is used to update asset with fields given in the struct fields +type UpdAssetField struct { + IsArchived bool `json:"isArchived"` + IsFavorite bool `json:"isFavorite"` + Latitude float64 `json:"latitude,omitempty"` + Longitude float64 `json:"longitude,omitempty"` + Description string `json:"description,omitempty"` + Rating int `json:"rating,omitempty"` + LivePhotoVideoID string `json:"livePhotoVideoId,omitempty"` + DateTimeOriginal time.Time `json:"dateTimeOriginal,omitempty"` +} + +func (ic *ImmichClient) UpdateAsset(ctx context.Context, id string, param UpdAssetField) (*Asset, error) { + if ic.dryRun { + return nil, nil } r := Asset{} err := ic.newServerCall(ctx, "updateAsset").do(putRequest("/assets/"+id, setJSONBody(param)), responseJSON(&r)) return &r, err } -func (ic *ImmichClient) StackAssets(ctx context.Context, coverID string, ids []string) error { - cover, err := ic.GetAssetByID(ctx, coverID) - if err != nil { - return err - } +func (ic *ImmichClient) DownloadAsset(ctx context.Context, id string) (io.ReadCloser, error) { + var rc io.ReadCloser - return ic.UpdateAssets(ctx, ids, cover.IsArchived, cover.IsFavorite, cover.ExifInfo.Latitude, cover.ExifInfo.Longitude, false, coverID) + err := ic.newServerCall(ctx, "DownloadAsset").do(getRequest(fmt.Sprintf("/assets/%s/original", id), setOctetStream()), responseOctetStream(&rc)) + return rc, err } diff --git a/immich/asset_test.go b/immich/asset_test.go new file mode 100644 index 00000000..8fc8a8ee --- /dev/null +++ b/immich/asset_test.go @@ -0,0 +1,81 @@ +package immich + +import ( + "encoding/json" + "strings" + "testing" +) + +func Test_AssetJSON(t *testing.T) { + js := `{ + "id": "9a2fff7a-f226-48e8-a888-fdac199f3d56", + "deviceAssetId": "IMG_20180811_173822_1.jpg-2082855", + "ownerId": "13e05729-8933-494e-982e-5910a0c4420f", + "deviceId": "DESKTOP-ILBKKE7", + "type": "IMAGE", + "originalPath": "upload/upload/13e05729-8933-494e-982e-5910a0c4420f/17/6c/176c335a-fbc0-412f-a46f-c187351a55bd.jpg", + "originalFileName": "IMG_20180811_173822_1.jpg", + "resized": true, + "thumbhash": "WRgGDQTZeaiYNz6FCUQXZg4BtAAV", + "fileCreatedAt": "\"2018-08-11T19:38:22+02:00\"", + "fileModifiedAt": "\"2024-07-07T17:29:15+02:00\"", + "updatedAt": "\"2024-11-17T18:57:15+01:00\"", + "isFavorite": false, + "isArchived": false, + "isTrashed": false, + "duration": "0:00:00.00000", + "rating": 0, + "exifInfo": { + "make": "HUAWEI", + "model": "CLT-L09", + "exifImageWidth": 2736, + "exifImageHeight": 3648, + "fileSizeInByte": 2082855, + "orientation": "0", + "dateTimeOriginal": "\"2018-08-11T19:38:22+02:00\"", + "timeZone": "Europe/Paris", + "latitude": 48.8413085936111, + "longitude": 2.4199056625, + "description": "oznor" + }, + "livePhotoVideoId": "", + "checksum": "fDpZUcgYJjZnzLAHfIddp8BLzjE=", + "stackParentId": "", + "tags": [ + { + "id": "e6745272-71d2-4a61-976e-d4ac6b7de3b8", + "name": "tag2", + "value": "tag1/tag2" + }, + { + "id": "bbfd950a-f1b5-4e2d-acc9-e000a27d41e5", + "name": "activities", + "value": "activities" + } + ] +}` + + asset := Asset{} + dec := json.NewDecoder(strings.NewReader(js)) + err := dec.Decode(&asset) + if err != nil { + t.Error(err) + } + if len(asset.Tags) != 2 { + t.Errorf("expected 2 tags, got %d", len(asset.Tags)) + } + expectedTags := []struct { + ID string + Name string + Value string + }{ + {"e6745272-71d2-4a61-976e-d4ac6b7de3b8", "tag2", "tag1/tag2"}, + {"bbfd950a-f1b5-4e2d-acc9-e000a27d41e5", "activities", "activities"}, + } + + for i, tag := range asset.Tags { + if tag.ID != expectedTags[i].ID || tag.Name != expectedTags[i].Name || tag.Value != expectedTags[i].Value { + t.Errorf("expected tag %v, got %v", expectedTags[i], tag) + } + } +} diff --git a/immich/call.go b/immich/call.go index a6800fde..8a6ee531 100644 --- a/immich/call.go +++ b/immich/call.go @@ -12,11 +12,13 @@ import ( "sync/atomic" "time" - "github.com/simulot/immich-go/helpers/fshelper" + "github.com/simulot/immich-go/internal/fshelper" ) const ( EndPointGetJobs = "GetJobs" + EndPointSendJobCommand = "SendJobCommand" + EndPointCreateJob = "CreateJob" EndPointGetAllAlbums = "GetAllAlbums" EndPointGetAlbumInfo = "GetAlbumInfo" EndPointAddAsstToAlbum = "AddAssetToAlbum" @@ -29,6 +31,9 @@ const ( EndPointGetAssetStatistics = "GetAssetStatistics" EndPointGetSupportedMediaTypes = "GetSupportedMediaTypes" EndPointGetAllAssets = "GetAllAssets" + EndPointUpsertTags = "UpsertTags" + EndPointTagAssets = "TagAssets" + EndPointBulkTagAssets = "BulkTagAssets" ) type TooManyInternalError struct { @@ -136,7 +141,11 @@ var callSequence atomic.Int64 const ctxCallSequenceID = "api-call-sequence" -func (sc *serverCall) request(method string, url string, opts ...serverRequestOption) *http.Request { +func (sc *serverCall) request( + method string, + url string, + opts ...serverRequestOption, +) *http.Request { if sc.ic.apiTraceWriter != nil && sc.endPoint != EndPointGetJobs { seq := callSequence.Add(1) sc.ctx = context.WithValue(sc.ctx, ctxCallSequenceID, seq) @@ -168,7 +177,10 @@ func postRequest(url string, cType string, opts ...serverRequestOption) requestF if sc.err != nil { return nil } - return sc.request(http.MethodPost, sc.ic.endPoint+url, append(opts, setContentType(cType))...) + return sc.request( + http.MethodPost, + sc.ic.endPoint+url, + append(opts, setContentType(cType))...) } } @@ -252,6 +264,13 @@ func setAcceptJSON() serverRequestOption { } } +func setOctetStream() serverRequestOption { + return func(sc *serverCall, req *http.Request) error { + req.Header.Add("Accept", "application/octet-stream") + return nil + } +} + func setAPIKey() serverRequestOption { return func(sc *serverCall, req *http.Request) error { req.Header.Set("x-api-key", sc.ic.key) @@ -296,7 +315,15 @@ func responseJSON[T any](object *T) serverResponseOption { err := json.NewDecoder(resp.Body).Decode(object) if sc.ic.apiTraceWriter != nil && sc.endPoint != EndPointGetJobs { seq := sc.ctx.Value(ctxCallSequenceID) - fmt.Fprintln(sc.ic.apiTraceWriter, time.Now().Format(time.RFC3339), "RESPONSE", seq, sc.endPoint, resp.Request.Method, resp.Request.URL.String()) + fmt.Fprintln( + sc.ic.apiTraceWriter, + time.Now().Format(time.RFC3339), + "RESPONSE", + seq, + sc.endPoint, + resp.Request.Method, + resp.Request.URL.String(), + ) fmt.Fprintln(sc.ic.apiTraceWriter, " Status:", resp.Status) fmt.Fprintln(sc.ic.apiTraceWriter, "-- response body --") dec := json.NewEncoder(newLimitWriter(sc.ic.apiTraceWriter, 100)) @@ -323,3 +350,15 @@ func responseCopy(buffer *bytes.Buffer) serverResponseOption { return nil } } + +func responseOctetStream(rc *io.ReadCloser) serverResponseOption { + return func(sc *serverCall, resp *http.Response) error { + if resp != nil { + if resp.Body != nil { + *rc = resp.Body + return nil + } + } + return nil + } +} diff --git a/immich/client.go b/immich/client.go index 81958559..2b256fa4 100644 --- a/immich/client.go +++ b/immich/client.go @@ -8,11 +8,9 @@ import ( "io" "net/http" "os" - "slices" - "sort" - "strings" - "sync" "time" + + "github.com/simulot/immich-go/internal/filetypes" ) /* @@ -30,13 +28,18 @@ type ImmichClient struct { Retries int // Number of attempts on 500 errors RetriesDelay time.Duration // Duration between retries apiTraceWriter io.Writer - supportedMediaTypes SupportedMedia // Server's list of supported medias + supportedMediaTypes filetypes.SupportedMedia // Server's list of supported medias + dryRun bool // If true, do not send any data to the server } func (ic *ImmichClient) SetEndPoint(endPoint string) { ic.endPoint = endPoint } +func (ic *ImmichClient) GetEndPoint() string { + return ic.endPoint +} + func (ic *ImmichClient) SetDeviceUUID(deviceUUID string) { ic.DeviceUUID = deviceUUID } @@ -45,7 +48,7 @@ func (ic *ImmichClient) EnableAppTrace(w io.Writer) { ic.apiTraceWriter = w } -func (ic *ImmichClient) SupportedMedia() SupportedMedia { +func (ic *ImmichClient) SupportedMedia() filetypes.SupportedMedia { return ic.supportedMediaTypes } @@ -65,6 +68,13 @@ func OptionConnectionTimeout(d time.Duration) clientOption { } } +func OptionDryRun(dryRun bool) clientOption { + return func(ic *ImmichClient) error { + ic.dryRun = dryRun + return nil + } +} + // Create a new ImmichClient func NewImmichClient(endPoint string, key string, options ...clientOption) (*ImmichClient, error) { var err error @@ -182,81 +192,24 @@ func (ic *ImmichClient) GetAssetStatistics(ctx context.Context) (UserStatistics, return s, err } -type SupportedMedia map[string]string - -const ( - TypeVideo = "video" - TypeImage = "image" - TypeSidecar = "sidecar" - TypeUnknown = "" -) - -var DefaultSupportedMedia = SupportedMedia{ - ".3gp": TypeVideo, ".avi": TypeVideo, ".flv": TypeVideo, ".insv": TypeVideo, ".m2ts": TypeVideo, ".m4v": TypeVideo, ".mkv": TypeVideo, ".mov": TypeVideo, ".mp4": TypeVideo, ".mpg": TypeVideo, ".mts": TypeVideo, ".webm": TypeVideo, ".wmv": TypeVideo, - ".3fr": TypeImage, ".ari": TypeImage, ".arw": TypeImage, ".avif": TypeImage, ".bmp": TypeImage, ".cap": TypeImage, ".cin": TypeImage, ".cr2": TypeImage, ".cr3": TypeImage, ".crw": TypeImage, ".dcr": TypeImage, ".dng": TypeImage, ".erf": TypeImage, - ".fff": TypeImage, ".gif": TypeImage, ".heic": TypeImage, ".heif": TypeImage, ".hif": TypeImage, ".iiq": TypeImage, ".insp": TypeImage, ".jpe": TypeImage, ".jpeg": TypeImage, ".jpg": TypeImage, - ".jxl": TypeImage, ".k25": TypeImage, ".kdc": TypeImage, ".mrw": TypeImage, ".nef": TypeImage, ".orf": TypeImage, ".ori": TypeImage, ".pef": TypeImage, ".png": TypeImage, ".psd": TypeImage, ".raf": TypeImage, ".raw": TypeImage, ".rw2": TypeImage, - ".rwl": TypeImage, ".sr2": TypeImage, ".srf": TypeImage, ".srw": TypeImage, ".tif": TypeImage, ".tiff": TypeImage, ".webp": TypeImage, ".x3f": TypeImage, - ".xmp": TypeSidecar, - ".mp": TypeVideo, -} - -func (ic *ImmichClient) GetSupportedMediaTypes(ctx context.Context) (SupportedMedia, error) { +func (ic *ImmichClient) GetSupportedMediaTypes(ctx context.Context) (filetypes.SupportedMedia, error) { var s map[string][]string err := ic.newServerCall(ctx, EndPointGetSupportedMediaTypes).do(getRequest("/server/media-types", setAcceptJSON()), responseJSON(&s)) if err != nil { return nil, err } - sm := make(SupportedMedia) + sm := make(filetypes.SupportedMedia) for t, l := range s { for _, e := range l { sm[e] = t } } - sm[".mp"] = TypeVideo + sm[".mp"] = filetypes.TypeVideo + sm[".json"] = filetypes.TypeSidecar return sm, err } -func (sm SupportedMedia) TypeFromExt(ext string) string { - ext = strings.ToLower(ext) - if strings.HasPrefix(ext, ".mp~") { - // #405 - ext = ".mp4" - } - return sm[ext] -} - -func (sm SupportedMedia) IsMedia(ext string) bool { - t := sm.TypeFromExt(ext) - return t == TypeVideo || t == TypeImage -} - -var ( - _supportedExtension []string - initSupportedExtion sync.Once -) - -func (sm SupportedMedia) IsExtensionPrefix(ext string) bool { - initSupportedExtion.Do(func() { - _supportedExtension = make([]string, len(sm)) - i := 0 - for k := range sm { - _supportedExtension[i] = k[:len(k)-2] - i++ - } - sort.Strings(_supportedExtension) - }) - ext = strings.ToLower(ext) - _, b := slices.BinarySearch(_supportedExtension, ext) - return b -} - -func (sm SupportedMedia) IsIgnoredExt(ext string) bool { - t := sm.TypeFromExt(ext) - return t == "" -} - func (ic *ImmichClient) TypeFromExt(ext string) string { return ic.supportedMediaTypes.TypeFromExt(ext) } diff --git a/immich/client_test.go b/immich/client_test.go index 0e20511f..e87790ed 100644 --- a/immich/client_test.go +++ b/immich/client_test.go @@ -1,6 +1,15 @@ -package immich +package immich_test -import "testing" +import ( + "context" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/simulot/immich-go/immich" + "github.com/simulot/immich-go/internal/filetypes" +) /* baseline @@ -22,9 +31,141 @@ PASS ok github.com/simulot/immich-go/immich 1.283s */ func Benchmark_IsExtensionPrefix(b *testing.B) { - sm := DefaultSupportedMedia + sm := filetypes.DefaultSupportedMedia sm.IsExtensionPrefix(".JP") for i := 0; i < b.N; i++ { sm.IsExtensionPrefix(".JP") } } + +func TestPingServer(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(`{"res":"pong"}`)) + })) + defer server.Close() + + client, _ := immich.NewImmichClient(server.URL, "test-key") + err := client.PingServer(context.Background()) + if err != nil { + t.Fatalf("expected no error, got %v", err) + } +} + +func TestValidateConnection(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/api/users/me" { + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(`{"id":"1","email":"test@example.com"}`)) + } else if r.URL.Path == "/api/server/media-types" { + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(`{"image":[".jpg",".png"],"video":[".mp4"]}`)) + } + })) + defer server.Close() + + client, _ := immich.NewImmichClient(server.URL, "test-key") + user, err := client.ValidateConnection(context.Background()) + if err != nil { + t.Fatalf("expected no error, got %v", err) + } + if user.ID != "1" { + t.Errorf("expected user ID to be '1', got %v", user.ID) + } + if user.Email != "test@example.com" { + t.Errorf("expected user email to be 'test@example.com', got %v", user.Email) + } +} + +func TestGetServerStatistics(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(`{"photos":100,"videos":50,"usage":1024}`)) + })) + defer server.Close() + + client, _ := immich.NewImmichClient(server.URL, "test-key") + stats, err := client.GetServerStatistics(context.Background()) + if err != nil { + t.Fatalf("expected no error, got %v", err) + } + if stats.Photos != 100 { + t.Errorf("expected photos to be 100, got %v", stats.Photos) + } + if stats.Videos != 50 { + t.Errorf("expected videos to be 50, got %v", stats.Videos) + } + if stats.Usage != 1024 { + t.Errorf("expected usage to be 1024, got %v", stats.Usage) + } +} + +func TestGetAssetStatistics(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(`{"images":200,"videos":100,"total":300}`)) + })) + defer server.Close() + + client, _ := immich.NewImmichClient(server.URL, "test-key") + stats, err := client.GetAssetStatistics(context.Background()) + if err != nil { + t.Fatalf("expected no error, got %v", err) + } + if stats.Images != 200 { + t.Errorf("expected images to be 200, got %v", stats.Images) + } + if stats.Videos != 100 { + t.Errorf("expected videos to be 100, got %v", stats.Videos) + } + if stats.Total != 300 { + t.Errorf("expected total to be 300, got %v", stats.Total) + } +} + +func TestGetSupportedMediaTypes(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(`{"image":[".jpg",".png"],"video":[".mp4"]}`)) + })) + defer server.Close() + + client, _ := immich.NewImmichClient(server.URL, "test-key") + mediaTypes, err := client.GetSupportedMediaTypes(context.Background()) + if err != nil { + t.Fatalf("expected no error, got %v", err) + } + if mediaTypes[".jpg"] != filetypes.TypeImage { + t.Errorf("expected .jpg to be %v, got %v", filetypes.TypeImage, mediaTypes[".jpg"]) + } + if mediaTypes[".png"] != filetypes.TypeImage { + t.Errorf("expected .png to be %v, got %v", filetypes.TypeImage, mediaTypes[".png"]) + } + if mediaTypes[".mp4"] != filetypes.TypeVideo { + t.Errorf("expected .mp4 to be %v, got %v", filetypes.TypeVideo, mediaTypes[".mp4"]) + } +} + +func TestDownloadAsset(t *testing.T) { + expectedContent := "dummy content" + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(expectedContent)) + })) + defer server.Close() + + client, _ := immich.NewImmichClient(server.URL, "test-key") + rc, err := client.DownloadAsset(context.Background(), "test-asset-id") + if err != nil { + t.Fatalf("expected no error, got %v", err) + } + defer rc.Close() + + content, err := io.ReadAll(rc) + if err != nil { + t.Fatalf("expected no error reading content, got %v", err) + } + if string(content) != expectedContent { + t.Errorf("expected content to be %v, got %v", expectedContent, string(content)) + } +} diff --git a/immich/daterange.go b/immich/daterange.go deleted file mode 100644 index 004f67d7..00000000 --- a/immich/daterange.go +++ /dev/null @@ -1,75 +0,0 @@ -package immich - -import ( - "fmt" - "time" -) - -// DateRange represent the date range for capture date -type DateRange struct { - After, Before time.Time - day, month, year, set bool -} - -func (dr DateRange) String() string { - switch { - case dr.day: - return dr.After.Format("2006-01-02") - case dr.month: - return dr.After.Format("2006-01") - case dr.year: - return dr.After.Format("2006") - default: - return dr.After.Format("2006-01-02") + "," + dr.Before.AddDate(0, 0, -1).Format("2006-01-02") - } -} - -func (dr *DateRange) Set(s string) (err error) { - dr.set = true - switch len(s) { - case 0: - dr.Before = time.Date(999, 12, 31, 0, 0, 0, 0, time.UTC) - case 4: - dr.year = true - dr.After, err = time.ParseInLocation("2006", s, time.UTC) - if err == nil { - dr.Before = dr.After.AddDate(1, 0, 0) - return nil - } - case 7: - dr.month = true - dr.After, err = time.ParseInLocation("2006-01", s, time.UTC) - if err == nil { - dr.Before = dr.After.AddDate(0, 1, 0) - return nil - } - case 10: - dr.day = true - dr.After, err = time.ParseInLocation("2006-01-02", s, time.UTC) - if err == nil { - dr.Before = dr.After.AddDate(0, 0, 1) - return nil - } - case 21: - dr.After, err = time.ParseInLocation("2006-01-02", s[:10], time.UTC) - if err == nil { - dr.Before, err = time.ParseInLocation("2006-01-02", s[11:], time.UTC) - if err == nil { - dr.Before = dr.Before.AddDate(0, 0, 1) - return nil - } - } - } - dr.set = false - return fmt.Errorf("invalid date range:%w", err) -} - -func (dr DateRange) IsSet() bool { return dr.set } - -func (dr DateRange) InRange(d time.Time) bool { - if !dr.set || d.IsZero() { - return true - } - // --------------After----------d------------Before - return (d.Compare(dr.After) >= 0 && dr.Before.Compare(d) > 0) -} diff --git a/immich/immich.go b/immich/immich.go index 8f457ec4..430c9170 100644 --- a/immich/immich.go +++ b/immich/immich.go @@ -8,10 +8,13 @@ import ( "sync" "time" - "github.com/simulot/immich-go/browser" - "github.com/simulot/immich-go/helpers/tzone" + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/filetypes" + "github.com/simulot/immich-go/internal/tzone" ) +var _ ImmichInterface = (*ImmichClient)(nil) + // ImmichInterface is an interface that implements the minimal immich client set of features for uploading // interface used to mock up the client type ImmichInterface interface { @@ -22,25 +25,69 @@ type ImmichInterface interface { ValidateConnection(ctx context.Context) (User, error) GetServerStatistics(ctx context.Context) (ServerStatistics, error) GetAssetStatistics(ctx context.Context) (UserStatistics, error) + GetAssetInfo(ctx context.Context, id string) (*Asset, error) + DownloadAsset(ctx context.Context, id string) (io.ReadCloser, error) - UpdateAsset(ctx context.Context, ID string, a *browser.LocalAssetFile) (*Asset, error) + UpdateAsset(ctx context.Context, id string, param UpdAssetField) (*Asset, error) GetAllAssets(ctx context.Context) ([]*Asset, error) AddAssetToAlbum(context.Context, string, []string) ([]UpdateAlbumResult, error) - UpdateAssets(ctx context.Context, IDs []string, isArchived bool, isFavorite bool, latitude float64, longitude float64, removeParent bool, stackParentID string) error - GetAllAssetsWithFilter(context.Context, func(*Asset) error) error - AssetUpload(context.Context, *browser.LocalAssetFile) (AssetResponse, error) + UpdateAssets( + ctx context.Context, + IDs []string, + isArchived bool, + isFavorite bool, + latitude float64, + longitude float64, + removeParent bool, + stackParentID string, + ) error + GetAllAssetsWithFilter(context.Context, *SearchMetadataQuery, func(*Asset) error) error + AssetUpload(context.Context, *assets.Asset) (AssetResponse, error) DeleteAssets(context.Context, []string, bool) error - GetAllAlbums(ctx context.Context) ([]AlbumSimplified, error) + GetAllAlbums(ctx context.Context) ([]assets.Album, error) GetAlbumInfo(ctx context.Context, id string, withoutAssets bool) (AlbumContent, error) - CreateAlbum(ctx context.Context, tilte string, description string, ids []string) (AlbumSimplified, error) - GetAssetAlbums(ctx context.Context, ID string) ([]AlbumSimplified, error) + CreateAlbum( + ctx context.Context, + tilte string, + description string, + ids []string, + ) (assets.Album, error) + + // GetAssetAlbums get all albums that an asset belongs to + GetAssetAlbums(ctx context.Context, assetID string) ([]assets.Album, error) DeleteAlbum(ctx context.Context, id string) error - StackAssets(ctx context.Context, cover string, IDs []string) error + SupportedMedia() filetypes.SupportedMedia - SupportedMedia() SupportedMedia GetJobs(ctx context.Context) (map[string]Job, error) + SendJobCommand( + ctx context.Context, + jobID JobID, + command JobCommand, + force bool, + ) (SendJobCommandResponse, error) + CreateJob(ctx context.Context, name JobName) error + + UpsertTags(ctx context.Context, tags []string) ([]TagSimplified, error) + TagAssets( + ctx context.Context, + tagID string, + assetIDs []string, + ) ([]TagAssetsResponse, error) + BulkTagAssets( + ctx context.Context, + tagIDs []string, + assetIDs []string, + ) (struct { + Count int `json:"count"` + }, error) +} + +type ImmichStackInterface interface { + ImmichInterface + // CreateStack create a stack with the given assets, the 1st asset is the cover, return the stack ID + CreateStack(ctx context.Context, ids []string) (string, error) } type UnsupportedMedia struct { @@ -138,13 +185,43 @@ type Asset struct { IsArchived bool `json:"isArchived"` IsTrashed bool `json:"isTrashed"` Duration string `json:"duration"` + Rating int `json:"rating"` ExifInfo ExifInfo `json:"exifInfo"` LivePhotoVideoID string `json:"livePhotoVideoId"` - Tags []any `json:"tags"` Checksum string `json:"checksum"` StackParentID string `json:"stackParentId"` - JustUploaded bool `json:"-"` Albums []AlbumSimplified `json:"-"` // Albums that asset belong to + Tags []TagSimplified `json:"tags"` + // JustUploaded bool `json:"-"` // TO REMOVE +} + +// NewAssetFromImmich creates an assets.Asset from an immich.Asset. +func (ia Asset) AsAsset() *assets.Asset { + a := &assets.Asset{ + FileDate: ia.FileModifiedAt.Time, + Description: ia.ExifInfo.Description, + OriginalFileName: ia.OriginalFileName, + ID: ia.ID, + CaptureDate: ia.ExifInfo.DateTimeOriginal.Time, + Trashed: ia.IsTrashed, + Archived: ia.IsArchived, + Favorite: ia.IsFavorite, + Rating: ia.Rating, + Latitude: ia.ExifInfo.Latitude, + Longitude: ia.ExifInfo.Longitude, + } + a.FileSize = int(ia.ExifInfo.FileSizeInByte) + for _, album := range ia.Albums { + a.Albums = append(a.Albums, assets.Album{ + Title: album.AlbumName, + Description: album.Description, + }) + } + + for _, tag := range ia.Tags { + a.Tags = append(a.Tags, tag.AsTag()) + } + return a } type ExifInfo struct { @@ -152,7 +229,7 @@ type ExifInfo struct { Model string `json:"model"` ExifImageWidth int `json:"exifImageWidth"` ExifImageHeight int `json:"exifImageHeight"` - FileSizeInByte int `json:"fileSizeInByte"` + FileSizeInByte int64 `json:"fileSizeInByte"` Orientation string `json:"orientation"` DateTimeOriginal ImmichTime `json:"dateTimeOriginal,omitempty"` // ModifyDate time.Time `json:"modifyDate"` diff --git a/immich/job.go b/immich/job.go index a6297d06..abbdb327 100644 --- a/immich/job.go +++ b/immich/job.go @@ -17,8 +17,70 @@ type Job struct { } `json:"queueStatus"` } +type SendJobCommandResponse struct { + JobCounts struct { + Active int `json:"active"` + Completed int `json:"completed"` + Delayed int `json:"delayed"` + Failed int `json:"failed"` + Paused int `json:"paused"` + Waiting int `json:"waiting"` + } `json:"jobCounts"` + QueueStatus struct { + IsActive bool `json:"isActive"` + IsPause bool `json:"isPause"` + } +} + +type JobID string + +const ( + StorageTemplateMigration JobID = "storageTemplateMigration" +) + +type JobCommand string + +const ( + Start JobCommand = "start" + Pause JobCommand = "pause" + Resume JobCommand = "resume" + Empty JobCommand = "empty" + ClearFailed JobCommand = "clear-failed" +) + +type JobName string + +const ( + PersonCleanup JobName = "person-cleanup" + TagCleanup JobName = "tag-cleanup" + UserCleanup JobName = "user-cleanup" +) + func (ic *ImmichClient) GetJobs(ctx context.Context) (map[string]Job, error) { var resp map[string]Job - err := ic.newServerCall(ctx, EndPointGetJobs).do(getRequest("/jobs", setAcceptJSON()), responseJSON(&resp)) + err := ic.newServerCall(ctx, EndPointGetJobs). + do(getRequest("/jobs", setAcceptJSON()), responseJSON(&resp)) return resp, err } + +func (ic *ImmichClient) SendJobCommand( + ctx context.Context, + jobID JobID, + command JobCommand, + force bool, +) (resp SendJobCommandResponse, err error) { + err = ic.newServerCall(ctx, EndPointSendJobCommand).do(putRequest("/jobs/"+string(jobID), + setJSONBody(struct { + Command JobCommand `json:"command"` + Force bool `json:"force"` + }{Command: command, Force: force})), responseJSON(&resp)) + return +} + +func (ic *ImmichClient) CreateJob(ctx context.Context, name JobName) error { + return ic.newServerCall(ctx, EndPointCreateJob).do(postRequest("/jobs", + "application/json", + setJSONBody(struct { + Name JobName `json:"name"` + }{Name: name}))) +} diff --git a/immich/metadata.go b/immich/metadata.go index d4bd696c..099b7457 100644 --- a/immich/metadata.go +++ b/immich/metadata.go @@ -13,24 +13,32 @@ type searchMetadataResponse struct { } } -type searchMetadataGetAllBody struct { - Page int `json:"page"` - WithExif bool `json:"withExif,omitempty"` - IsVisible bool `json:"isVisible,omitempty"` - WithDeleted bool `json:"withDeleted,omitempty"` - Size int `json:"size,omitempty"` +type SearchMetadataQuery struct { + // pagination + Page int `json:"page"` + Size int `json:"size,omitempty"` + + // filters + WithExif bool `json:"withExif,omitempty"` + IsVisible bool `json:"isVisible,omitempty"` // For motion stuff you need to pass isVisible=true to hide the motion ones (dijrasm91 — https://discord.com/channels/979116623879368755/1178366369423700080/1201206313699508295) + WithDeleted bool `json:"withDeleted,omitempty"` + WithArchived bool `json:"withArchived,omitempty"` + TakenBefore string `json:"takenBefore,omitempty"` + TakenAfter string `json:"takenAfter,omitempty"` + Model string `json:"model,omitempty"` + Make string `json:"make,omitempty"` } -func (ic *ImmichClient) callSearchMetadata(ctx context.Context, req *searchMetadataGetAllBody, filter func(*Asset) error) error { - req.Page = 1 - req.Size = 1000 +func (ic *ImmichClient) callSearchMetadata(ctx context.Context, query *SearchMetadataQuery, filter func(*Asset) error) error { + query.Page = 1 + query.Size = 1000 for { select { case <-ctx.Done(): return ctx.Err() default: resp := searchMetadataResponse{} - err := ic.newServerCall(ctx, EndPointGetAllAssets).do(postRequest("/search/metadata", "application/json", setJSONBody(&req), setAcceptJSON()), responseJSON(&resp)) + err := ic.newServerCall(ctx, EndPointGetAllAssets).do(postRequest("/search/metadata", "application/json", setJSONBody(&query), setAcceptJSON()), responseJSON(&resp)) if err != nil { return err } @@ -45,7 +53,7 @@ func (ic *ImmichClient) callSearchMetadata(ctx context.Context, req *searchMetad if resp.Assets.NextPage == 0 { return nil } - req.Page = resp.Assets.NextPage + query.Page = resp.Assets.NextPage } } } @@ -53,7 +61,7 @@ func (ic *ImmichClient) callSearchMetadata(ctx context.Context, req *searchMetad func (ic *ImmichClient) GetAllAssets(ctx context.Context) ([]*Asset, error) { var assets []*Asset - req := searchMetadataGetAllBody{Page: 1, WithExif: true, IsVisible: true, WithDeleted: true} + req := SearchMetadataQuery{Page: 1, WithExif: true, IsVisible: true, WithDeleted: true} err := ic.callSearchMetadata(ctx, &req, func(asset *Asset) error { assets = append(assets, asset) return nil @@ -64,7 +72,10 @@ func (ic *ImmichClient) GetAllAssets(ctx context.Context) ([]*Asset, error) { return assets, nil } -func (ic *ImmichClient) GetAllAssetsWithFilter(ctx context.Context, filter func(*Asset) error) error { - req := searchMetadataGetAllBody{Page: 1, WithExif: true, IsVisible: true, WithDeleted: true} - return ic.callSearchMetadata(ctx, &req, filter) +func (ic *ImmichClient) GetAllAssetsWithFilter(ctx context.Context, query *SearchMetadataQuery, filter func(*Asset) error) error { + if query == nil { + query = &SearchMetadataQuery{Page: 1, WithExif: true, IsVisible: true, WithDeleted: true} + } + query.Page = 1 + return ic.callSearchMetadata(ctx, query, filter) } diff --git a/immich/metadata/direct.go b/immich/metadata/direct.go deleted file mode 100644 index 0a50fec5..00000000 --- a/immich/metadata/direct.go +++ /dev/null @@ -1,107 +0,0 @@ -package metadata - -import ( - "fmt" - "io" - "io/fs" - "path" - "strings" - "time" -) - -func GetFileMetaData(fsys fs.FS, name string) (Metadata, error) { - f, err := fsys.Open(name) - if err != nil { - return Metadata{}, err - } - defer f.Close() - return GetFromReader(f, path.Ext(name)) -} - -// GetMetaData makes its best efforts to get the date of capture based on -// - if the name matches a at least 4 digits for the year, 2 for month, 2 for day, in this order. -// It takes the hour, minute, second when present. Very fast -// -// - file content if the file includes some metadata, need read a part of the file -// -// - -func GetFromReader(rd io.Reader, ext string) (Metadata, error) { - r := newSliceReader(rd) - meta := Metadata{} - var err error - var dateTaken time.Time - switch strings.ToLower(ext) { - case ".heic", ".heif": - dateTaken, err = readHEIFDateTaken(r) - case ".jpg", ".jpeg", ".dng", ".cr2": - dateTaken, err = readExifDateTaken(r) - case ".mp4", ".mov": - dateTaken, err = readMP4DateTaken(r) - case ".cr3": - dateTaken, err = readCR3DateTaken(r) - default: - err = fmt.Errorf("can't determine the taken date from metadata (%s)", ext) - } - meta.DateTaken = dateTaken - return meta, err -} - -// readExifDateTaken pase the file for Exif DateTaken -func readExifDateTaken(r io.Reader) (time.Time, error) { - md, err := getExifFromReader(r) - return md.DateTaken, err -} - -const searchBufferSize = 32 * 1024 - -// readHEIFDateTaken locate the Exif part and return the date of capture -func readHEIFDateTaken(r *sliceReader) (time.Time, error) { - b := make([]byte, searchBufferSize) - r, err := searchPattern(r, []byte{0x45, 0x78, 0x69, 0x66, 0, 0, 0x4d, 0x4d}, b) - if err != nil { - return time.Time{}, err - } - - filler := make([]byte, 6) - _, err = r.Read(filler) - if err != nil { - return time.Time{}, err - } - - md, err := getExifFromReader(r) - return md.DateTaken, err -} - -// readMP4DateTaken locate the mvhd atom and decode the date of capture -func readMP4DateTaken(r *sliceReader) (time.Time, error) { - b := make([]byte, searchBufferSize) - - r, err := searchPattern(r, []byte{'m', 'v', 'h', 'd'}, b) - if err != nil { - return time.Time{}, err - } - atom, err := decodeMvhdAtom(r) - if err != nil { - return time.Time{}, err - } - return atom.CreationTime, nil -} - -func readCR3DateTaken(r *sliceReader) (time.Time, error) { - b := make([]byte, searchBufferSize) - - r, err := searchPattern(r, []byte("CMT1"), b) - if err != nil { - return time.Time{}, err - } - - filler := make([]byte, 4) - _, err = r.Read(filler) - if err != nil { - return time.Time{}, err - } - - md, err := getExifFromReader(r) - return md.DateTaken, err -} diff --git a/immich/metadata/direct_test.go b/immich/metadata/direct_test.go deleted file mode 100644 index 1af179e2..00000000 --- a/immich/metadata/direct_test.go +++ /dev/null @@ -1,72 +0,0 @@ -//go:build e2e -// +build e2e - -package metadata - -import ( - "os" - "path" - "testing" - "time" -) - -func mustParse(s string) time.Time { - t, err := time.ParseInLocation("2006:01:02 15:04:05-07:00", s, local) - if err != nil { - panic(err) - } - return t -} - -func TestGetFromReader(t *testing.T) { - tests := []struct { - name string - filename string - want time.Time - }{ - { - name: "cr3", - filename: "../../../test-data/burst/Reflex/3H2A0018.CR3", - want: mustParse("2023:06:23 13:32:52+02:00"), - }, - { - name: "jpg", - filename: "../../../test-data/burst/Reflex/3H2A0018.JPG", - want: mustParse("2023:06:23 13:32:52+02:00"), - }, - { - name: "jpg", - filename: "../../../test-data/burst/PXL6/PXL_20231029_062723981.jpg", - want: mustParse("2023:10:29 07:27:23+01:00"), - }, - { - name: "dng", - filename: "../../../test-data/burst/PXL6/PXL_20231029_062723981.dng", - want: mustParse("2023:10:29 07:27:24+01:00"), - }, - { - name: "cr2", - filename: "../../../test-data/burst/IMG_4879.CR2", - want: mustParse("2023:02:24 18:59:09+01:00"), - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - r, err := os.Open(tt.filename) - if err != nil { - t.Error(err) - return - } - defer r.Close() - ext := path.Ext(tt.filename) - got, err := GetFromReader(r, ext) - if err != nil { - t.Error(err) - return - } - if !tt.want.Equal(got.DateTaken) { - t.Errorf("GetFromReader() = %v, want %v", got.DateTaken, tt.want) - } - }) - } -} diff --git a/immich/metadata/exif.go b/immich/metadata/exif.go deleted file mode 100644 index 3efaf770..00000000 --- a/immich/metadata/exif.go +++ /dev/null @@ -1,51 +0,0 @@ -package metadata - -import ( - "errors" - "fmt" - "io" - "strings" - "time" - - "github.com/rwcarlsen/goexif/exif" -) - -func getExifFromReader(r io.Reader) (Metadata, error) { - var md Metadata - // Decode the EXIF data - x, err := exif.Decode(r) - if err != nil && exif.IsCriticalError(err) { - if errors.Is(err, io.EOF) { - return md, nil - } - return md, fmt.Errorf("can't get DateTaken: %w", err) - } - - tag, err := getTagSting(x, exif.GPSDateStamp) - if err == nil { - md.DateTaken, err = time.ParseInLocation("2006:01:02 15:04:05Z", tag, local) - } - if err != nil { - tag, err = getTagSting(x, exif.DateTimeOriginal) - if err == nil { - md.DateTaken, err = time.ParseInLocation("2006:01:02 15:04:05", tag, local) - } - } - if err != nil { - tag, err = getTagSting(x, exif.DateTime) - if err == nil { - md.DateTaken, err = time.ParseInLocation("2006:01:02 15:04:05", tag, local) - } - } - - return md, err -} - -func getTagSting(x *exif.Exif, tagName exif.FieldName) (string, error) { - t, err := x.Get(tagName) - if err != nil { - return "", err - } - s := strings.TrimRight(strings.TrimLeft(t.String(), `"`), `"`) - return s, nil -} diff --git a/immich/metadata/metadata.go b/immich/metadata/metadata.go deleted file mode 100644 index a7e26379..00000000 --- a/immich/metadata/metadata.go +++ /dev/null @@ -1,115 +0,0 @@ -package metadata - -import ( - "encoding/xml" - "fmt" - "io" - "strings" - "time" -) - -type Metadata struct { - Description string - DateTaken time.Time - Latitude float64 - Longitude float64 - Altitude float64 -} - -func (m Metadata) IsSet() bool { - return m.Description != "" || !m.DateTaken.IsZero() || m.Latitude != 0 || m.Longitude != 0 -} - -func (m Metadata) Write(w io.Writer) error { - _, err := io.WriteString(w, header) - if err != nil { - return err - } - if m.Description != "" { - _, err = io.WriteString(w, descriptionHeader) - if err != nil { - return err - } - err = xml.EscapeText(w, []byte(m.Description)) - if err != nil { - return err - } - _, err = io.WriteString(w, descriptionFooter) - if err != nil { - return err - } - } - - writeExifBlock := !m.DateTaken.IsZero() || m.Latitude != 0 || m.Longitude != 0 - if writeExifBlock { - _, err = io.WriteString(w, exifHeader) - if err != nil { - return err - } - if !m.DateTaken.IsZero() { - _, err := fmt.Fprintf(w, exifDateTimeOriginal, m.DateTaken.UTC().Format("2006-01-02T15:04:05Z")) - if err != nil { - return err - } - } - if m.Latitude != 0 || m.Longitude != 0 { - _, err = fmt.Fprintf(w, exifGPSLatitude, m.Latitude) - if err != nil { - return err - } - _, err = fmt.Fprintf(w, exifGPSLongitude, m.Longitude) - if err != nil { - return err - } - } - _, err = io.WriteString(w, exifFooter) - if err != nil { - return err - } - } - _, err = io.WriteString(w, footer) - return err -} - -func (m Metadata) String() string { - s := strings.Builder{} - _ = m.Write(&s) - return s.String() -} - -const ( - header = ` - - -` - descriptionHeader = ` - - - ` - - descriptionFooter = ` - - - -` - - exifHeader = ` - 0220` - - exifDateTimeOriginal = ` %s -` - exifGPSAltitude = ` 0 -` - exifGPSLatitude = ` %f -` - exifGPSLongitude = ` %f -` - exifFooter = ` 2.3.0.0 - -` - footer = ` - -` -) diff --git a/immich/metadata/metadata_test.go b/immich/metadata/metadata_test.go deleted file mode 100644 index 72bccccf..00000000 --- a/immich/metadata/metadata_test.go +++ /dev/null @@ -1,120 +0,0 @@ -package metadata - -import ( - "testing" - "time" -) - -func TestMetadata_String(t *testing.T) { - type fields struct { - Description string - DateTaken time.Time - Latitude float64 - Longitude float64 - } - tests := []struct { - name string - fields fields - want string - }{ - { - name: "TimeOnly", - fields: fields{ - DateTaken: time.Date(2000, 1, 2, 15, 32, 59, 0, time.UTC), - }, - want: ` - - - - 0220 2000-01-02T15:32:59Z - 2.3.0.0 - - - -`, - }, - { - name: "DescriptionOnly", - fields: fields{ - Description: "That's a < description > !", - }, - want: ` - - - - - - That's a < description > ! - - - - - -`, - }, - { - name: "GPSOnly", - fields: fields{ - Latitude: 71.1652089, - Longitude: 25.7909877, - }, - want: ` - - - - 0220 71.165209 - 25.790988 - 2.3.0.0 - - - -`, - }, - { - name: "All", - fields: fields{ - Description: `That /!\ strange & dark â ø`, - DateTaken: time.Date(2000, 1, 2, 15, 32, 59, 0, time.UTC), - Latitude: 71.1652089, - Longitude: 25.7909877, - }, - want: ` - - - - - - That /!\ strange & dark <place> â ø - - - - - 0220 2000-01-02T15:32:59Z - 71.165209 - 25.790988 - 2.3.0.0 - - - -`, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - m := Metadata{ - Description: tt.fields.Description, - DateTaken: tt.fields.DateTaken, - Latitude: tt.fields.Latitude, - Longitude: tt.fields.Longitude, - } - if got := m.String(); got != tt.want { - t.Errorf("Meta.String() = %v, want %v", got, tt.want) - } - }) - } -} diff --git a/immich/metadata/timezone.go b/immich/metadata/timezone.go deleted file mode 100644 index 720cae01..00000000 --- a/immich/metadata/timezone.go +++ /dev/null @@ -1,17 +0,0 @@ -package metadata - -import ( - "time" - - "github.com/simulot/immich-go/helpers/tzone" -) - -var local *time.Location - -func init() { - var err error - local, err = tzone.Local() - if err != nil { - panic(err) - } -} diff --git a/immich/stacks.go b/immich/stacks.go new file mode 100644 index 00000000..c4755df5 --- /dev/null +++ b/immich/stacks.go @@ -0,0 +1,28 @@ +package immich + +import ( + "context" + + "github.com/google/uuid" +) + +// CreateStack create a stack with the given assets, the 1st asset is the cover, return the stack ID +func (ic *ImmichClient) CreateStack(ctx context.Context, ids []string) (string, error) { + if ic.dryRun { + return uuid.NewString(), nil + } + + param := struct { + AssetIds []string `json:"assetIds"` + }{ + AssetIds: ids, + } + + var result struct { + ID string `json:"id"` + PrimaryAssetID string `json:"primaryAssetId"` + } + + err := ic.newServerCall(ctx, "createStack").do(postRequest("/stacks", "application/json", setAcceptJSON(), setJSONBody(param)), responseJSON(&result)) + return result.ID, err +} diff --git a/immich/tag.go b/immich/tag.go new file mode 100644 index 00000000..acaca19a --- /dev/null +++ b/immich/tag.go @@ -0,0 +1,84 @@ +package immich + +import ( + "context" + "fmt" + + "github.com/simulot/immich-go/internal/assets" +) + +type TagSimplified struct { + ID string `json:"id"` + Name string `json:"name"` + Value string `json:"value"` +} + +func (ts TagSimplified) AsTag() assets.Tag { + return assets.Tag{ + ID: ts.ID, + Name: ts.Name, + Value: ts.Value, + } +} + +type TagAssetsResponse struct { + Error string `json:"error,omitempty"` // [duplicate, no_permission, not_found, unknown] + ID string `json:"id"` + Success bool `json:"success"` +} + +func (ic *ImmichClient) UpsertTags(ctx context.Context, tags []string) ([]TagSimplified, error) { + var resp []TagSimplified + body := struct { + Tags []string `json:"tags"` + }{Tags: tags} + err := ic.newServerCall(ctx, EndPointUpsertTags). + do(putRequest("/tags", setJSONBody(body), setAcceptJSON()), responseJSON(&resp)) + if err != nil { + return nil, err + } + return resp, nil +} + +func (ic *ImmichClient) TagAssets( + ctx context.Context, + tagID string, + assetIDs []string, +) ([]TagAssetsResponse, error) { + var resp []TagAssetsResponse + + body := struct { + IDs []string `json:"ids"` + }{IDs: assetIDs} + err := ic.newServerCall(ctx, EndPointTagAssets). + do(putRequest(fmt.Sprintf("/tags/%s/assets", tagID), setJSONBody(body), setAcceptJSON()), responseJSON(&resp)) + if err != nil { + return nil, err + } + return resp, nil +} + +func (ic *ImmichClient) BulkTagAssets( + ctx context.Context, + tagIDs []string, + assetIDs []string, +) (struct { + Count int `json:"count"` +}, error, +) { + var resp struct { + Count int `json:"count"` + } + + body := struct { + TagIDs []string `json:"tagIds"` + AssetIDs []string `json:"assetIds"` + }{ + TagIDs: tagIDs, + AssetIDs: assetIDs, + } + err := ic.newServerCall(ctx, EndPointBulkTagAssets). + do(putRequest("/tags/assets", setJSONBody(body)), responseJSON(&resp)) + + return resp, err +} diff --git a/immich/trace.go b/immich/trace.go index 1666d5e0..948b3846 100644 --- a/immich/trace.go +++ b/immich/trace.go @@ -8,7 +8,7 @@ import ( "sort" "time" - "github.com/simulot/immich-go/helpers/gen" + "github.com/simulot/immich-go/internal/gen" ) /* diff --git a/internal/assets/album.go b/internal/assets/album.go new file mode 100644 index 00000000..7403c7ef --- /dev/null +++ b/internal/assets/album.go @@ -0,0 +1,22 @@ +package assets + +import ( + "log/slog" +) + +type Album struct { + ID string `json:"-"` // The album ID + Title string `json:"title,omitempty"` // either the directory base name, or metadata + Description string `json:"description,omitempty"` // As found in the metadata + Latitude float64 `json:"latitude,omitempty"` // As found in the metadata + Longitude float64 `json:"longitude,omitempty"` // As found in the metadata +} + +func (a Album) LogValue() slog.Value { + return slog.GroupValue( + slog.String("title", a.Title), + slog.String("description", a.Description), + slog.Float64("latitude", a.Latitude), + slog.Float64("longitude", a.Longitude), + ) +} diff --git a/internal/assets/asset.go b/internal/assets/asset.go new file mode 100644 index 00000000..bb3d4a8e --- /dev/null +++ b/internal/assets/asset.go @@ -0,0 +1,131 @@ +package assets + +import ( + "io" + "io/fs" + "log/slog" + "os" + "time" + + "github.com/simulot/immich-go/internal/fshelper" +) + +/* + Asset structure hold information on assets used for building immich assets. + + The asset is taken into a fs.FS system which doesn't implement anything else than a strait + reader. + fsys can be a zip file, a DirFS, or anything else. + + It implements a way to read a minimal quantity of data to be able to take a decision + about chose a file or discard it. + + implements fs.File and fs.FileInfo, Stat + +*/ + +type Asset struct { + // File system and file name + File fshelper.FSAndName + FileDate time.Time // File creation date + ID string // Immich ID after upload + + // Common fields + OriginalFileName string // File name as delivered to Immich/Google + Description string // Google Photos may a have description + FileSize int // File size in bytes + + // Metadata for the process and the upload to Immich + CaptureDate time.Time // Date of the capture + Trashed bool // The asset is trashed + Archived bool // The asset is archived + FromPartner bool // the asset comes from a partner + Favorite bool // the asset is marked as favorite + Rating int // the asset is marked with stars + Albums []Album // List of albums the asset is in + Tags []Tag // List of tags the asset is tagged with + + // Information inferred from the original file name + NameInfo + + FromSideCar *Metadata // Metadata extracted from a sidecar file (XMP or JSON) + FromSourceFile *Metadata // Metadata extracted from the file content (embedded metadata) + FromApplication *Metadata // Metadata extracted from the application that created the file + + // GPS location + Latitude float64 // GPS latitude + Longitude float64 // GPS longitude + + // buffer management + sourceFile fs.File // the opened source file + tempFile *os.File // buffer that keep partial reads available for the full file reading + teeReader io.Reader // write each read from it into the tempWriter + reader io.Reader // the reader that combines the partial read and original file for full file reading +} + +// Kind is the probable type of the image +type Kind int + +const ( + KindNone Kind = iota + KindBurst + KindEdited + KindPortrait + KindNight + KindMotion + KindLongExposure +) + +type NameInfo struct { + Base string // base name (with extension) + Ext string // extension + Radical string // base name usable for grouping photos + Type string // type of the asset video, image + Kind Kind // type of the series + Index int // index of the asset in the series + Taken time.Time // date taken + IsCover bool // is this is the cover if the series + IsModified bool // is this is a modified version of the original +} + +func (a *Asset) SetNameInfo(ni NameInfo) { + a.NameInfo = ni +} + +func (a *Asset) UseMetadata(md *Metadata) *Metadata { + if md == nil { + return nil + } + a.Description = md.Description + a.Latitude = md.Latitude + a.Longitude = md.Longitude + a.CaptureDate = md.DateTaken + a.FromPartner = md.FromPartner + a.Trashed = md.Trashed + a.Archived = md.Archived + a.Favorite = md.Favorited + a.Rating = int(md.Rating) + a.Albums = md.Albums + a.Tags = md.Tags + return md +} + +// LogValue returns a slog.Value representing the LocalAssetFile's properties. +func (a Asset) LogValue() slog.Value { + return slog.GroupValue( + slog.Any("FileName", a.File), + slog.Time("FileDate", a.FileDate), + slog.String("Description", a.Description), + slog.String("Title", a.OriginalFileName), + slog.Int("FileSize", a.FileSize), + slog.String("ID", a.ID), + slog.Time("CaptureDate", a.CaptureDate), + slog.Bool("Trashed", a.Trashed), + slog.Bool("Archived", a.Archived), + slog.Bool("FromPartner", a.FromPartner), + slog.Bool("Favorite", a.Favorite), + slog.Int("Stars", a.Rating), + slog.Float64("Latitude", a.Latitude), + slog.Float64("Longitude", a.Longitude), + ) +} diff --git a/internal/assets/assetFile.go b/internal/assets/assetFile.go new file mode 100644 index 00000000..f4d2d906 --- /dev/null +++ b/internal/assets/assetFile.go @@ -0,0 +1,126 @@ +package assets + +import ( + "errors" + "fmt" + "io" + "io/fs" + "os" + "time" + + "github.com/simulot/immich-go/internal/fshelper" +) + +// Remove the temporary file +func (a *Asset) Remove() error { + if fsys, ok := a.File.FS().(fshelper.FSCanRemove); ok { + return fsys.Remove(a.File.Name()) + } + return nil +} + +func (a *Asset) DeviceAssetID() string { + return fmt.Sprintf("%s-%d", a.OriginalFileName, a.FileSize) +} + +// PartialSourceReader open a reader on the current asset. +// each byte read from it is saved into a temporary file. +// +// It returns a TeeReader that writes each read byte from the sou²rce into the temporary file. +// The temporary file is discarded when the LocalAssetFile is closed +// TODO: possible optimization: when the file is a plain file, do not copy it into a temporary file +// TODO: use user temp folder + +func (a *Asset) PartialSourceReader() (reader io.Reader, tmpName string, err error) { + if a.sourceFile == nil { + a.sourceFile, err = a.File.Open() + if err != nil { + return nil, "", err + } + } + if a.tempFile == nil { + a.tempFile, err = os.CreateTemp("", "immich-go_*"+a.NameInfo.Ext) + if err != nil { + return nil, "", err + } + if a.teeReader == nil { + a.teeReader = io.TeeReader(a.sourceFile, a.tempFile) + } + } + _, err = a.tempFile.Seek(0, 0) + if err != nil { + return nil, "", err + } + return io.MultiReader(a.tempFile, a.teeReader), a.tempFile.Name(), nil +} + +// Open return fs.File that reads previously read bytes followed by the actual file content. +func (a *Asset) Open() (fs.File, error) { + var err error + if a.sourceFile == nil { + a.sourceFile, err = a.File.Open() + if err != nil { + return nil, err + } + } + if a.tempFile != nil { + _, err = a.tempFile.Seek(0, 0) + if err != nil { + return nil, err + } + a.reader = io.MultiReader(a.tempFile, a.sourceFile) + } else { + a.reader = a.sourceFile + } + return a, nil +} + +// Read +func (a *Asset) Read(b []byte) (int, error) { + return a.reader.Read(b) +} + +// Close close the temporary file and close the source +func (a *Asset) Close() error { + var err error + if a.sourceFile != nil { + err = errors.Join(err, a.sourceFile.Close()) + a.sourceFile = nil + } + if a.tempFile != nil { + f := a.tempFile.Name() + err = errors.Join(err, a.tempFile.Close()) + err = errors.Join(err, os.Remove(f)) + a.tempFile = nil + } + return err +} + +// Stat implements the fs.FILE interface +func (a *Asset) Stat() (fs.FileInfo, error) { + return a, nil +} +func (a *Asset) IsDir() bool { return false } + +func (a *Asset) Name() string { + return a.File.Name() +} + +func (a *Asset) Size() int64 { + return int64(a.FileSize) +} + +// Mode Implements the fs.FILE interface +func (a *Asset) Mode() fs.FileMode { return 0 } + +// ModTime implements the fs.FILE interface +func (a *Asset) ModTime() time.Time { + s, err := a.File.Stat() + if err != nil { + return time.Time{} + } + return s.ModTime() +} + +// Sys implements the fs.FILE interface +func (a *Asset) Sys() any { return nil } diff --git a/internal/assets/group.go b/internal/assets/group.go new file mode 100644 index 00000000..38c07d3e --- /dev/null +++ b/internal/assets/group.go @@ -0,0 +1,87 @@ +package assets + +import ( + "errors" +) + +type GroupBy int + +const ( + GroupByNone GroupBy = iota + GroupByBurst // Group by burst + GroupByRawJpg // Group by raw/jpg + GroupByHeicJpg // Group by heic/jpg + GroupByOther // Group by other (same radical, not previous cases) +) + +type removed struct { + Asset *Asset + Reason string +} + +type Group struct { + Assets []*Asset + Removed []removed + Albums []Album + Grouping GroupBy + CoverIndex int // index of the cover assert in the Assets slice +} + +// NewGroup create a new asset group +func NewGroup(grouping GroupBy, a ...*Asset) *Group { + return &Group{ + Grouping: grouping, + Assets: a, + } +} + +// AddAsset add an asset to the group +func (g *Group) AddAsset(a *Asset) { + g.Assets = append(g.Assets, a) +} + +// RemoveAsset remove an asset from the group +func (g *Group) RemoveAsset(a *Asset, reason string) { + for i, asset := range g.Assets { + if asset == a { + g.Removed = append(g.Removed, removed{Asset: asset, Reason: reason}) + g.Assets = append(g.Assets[:i], g.Assets[i+1:]...) + return + } + } +} + +// AddAlbum adds an album to the group if there is no other album with the same title +func (g *Group) AddAlbum(album Album) { + for _, a := range g.Albums { + if a.Title == album.Title { + return + } + } + g.Albums = append(g.Albums, album) +} + +// SetCover set the cover asset of the group +func (g *Group) SetCover(i int) *Group { + g.CoverIndex = i + return g +} + +func (g *Group) Validate() error { + if g == nil { + return errors.New("nil group") + } + if len(g.Assets) == 0 { + return errors.New("empty group") + } + // test all asset not nil + for _, a := range g.Assets { + if a == nil { + return errors.New("nil asset in group") + } + } + if 0 > g.CoverIndex || g.CoverIndex > len(g.Assets) { + return errors.New("cover index out of range") + } + return nil +} diff --git a/internal/assets/metadata.go b/internal/assets/metadata.go new file mode 100644 index 00000000..32abf7c6 --- /dev/null +++ b/internal/assets/metadata.go @@ -0,0 +1,52 @@ +package assets + +import ( + "encoding/json" + "log/slog" + "time" + + "github.com/simulot/immich-go/internal/fshelper" +) + +type Metadata struct { + File fshelper.FSAndName `json:"-"` // File name and file system that holds the metadata. Could be empty + FileName string `json:"fileName,omitempty"` // File name as presented to users + Latitude float64 `json:"latitude,omitempty"` // GPS + Longitude float64 `json:"longitude,omitempty"` // GPS + DateTaken time.Time `json:"dateTaken,omitempty"` // Date of exposure + Description string `json:"description,omitempty"` // Long description + Albums []Album `json:"albums,omitempty"` // Used to list albums that contain the file + Tags []Tag `json:"tags,omitempty"` // Used to list tags + Rating byte `json:"rating,omitempty"` // 0 to 5 + Trashed bool `json:"trashed,omitempty"` // Flag to indicate if the image has been trashed + Archived bool `json:"archived,omitempty"` // Flag to indicate if the image has been archived + Favorited bool `json:"favorited,omitempty"` // Flag to indicate if the image has been favorited + FromPartner bool `json:"fromPartner,omitempty"` // Flag to indicate if the image is from a partner +} + +func (m Metadata) LogValue() slog.Value { + return slog.GroupValue( + slog.Float64("latitude", m.Latitude), + slog.Float64("longitude", m.Longitude), + slog.Any("fileName", m.File), + slog.Time("dateTaken", m.DateTaken), + slog.String("description", m.Description), + slog.Int("rating", int(m.Rating)), + slog.Bool("trashed", m.Trashed), + slog.Bool("archived", m.Archived), + slog.Bool("favorited", m.Favorited), + slog.Bool("fromPartner", m.FromPartner), + slog.Any("albums", m.Albums), + slog.Any("tags", m.Tags), + ) +} + +func (m Metadata) IsSet() bool { + return m.Description != "" || !m.DateTaken.IsZero() || m.Latitude != 0 || m.Longitude != 0 +} + +func UnMarshalMetadata(data []byte) (*Metadata, error) { + var m Metadata + err := json.Unmarshal(data, &m) + return &m, err +} diff --git a/immich/metadata/sidecar.go b/internal/assets/sidecar.goNO similarity index 95% rename from immich/metadata/sidecar.go rename to internal/assets/sidecar.goNO index 2a786779..af8626a2 100644 --- a/immich/metadata/sidecar.go +++ b/internal/assets/sidecar.goNO @@ -1,4 +1,4 @@ -package metadata +package assets import ( "io" diff --git a/internal/assets/tag.go b/internal/assets/tag.go new file mode 100644 index 00000000..3649d64f --- /dev/null +++ b/internal/assets/tag.go @@ -0,0 +1,22 @@ +package assets + +import "path" + +type Tag struct { + ID string `json:"-"` + Name string `json:"-"` + Value string `json:"value,omitempty"` +} + +func (t Tag) LogValuer() string { + return t.Value +} + +func (a *Asset) AddTag(tag string) { + for _, t := range a.Tags { + if t.Value == tag { + return + } + } + a.Tags = append(a.Tags, Tag{Name: path.Base(tag), Value: tag}) +} diff --git a/internal/cliFlags/dateFromNames.go b/internal/cliFlags/dateFromNames.go new file mode 100644 index 00000000..be1e710a --- /dev/null +++ b/internal/cliFlags/dateFromNames.go @@ -0,0 +1,42 @@ +package cliflags + +import ( + "fmt" + "strings" +) + +type DateMethod string + +const ( + DateMethodNone DateMethod = "NONE" + DateMethodName DateMethod = "FILENAME" + DateMethodEXIF DateMethod = "EXIF" + DateMethodNameThenExif DateMethod = "FILENAME-EXIF" + DateMethodExifThenName DateMethod = "EXIF-FILENAME" +) + +func (dm *DateMethod) Set(s string) error { + s = strings.TrimSpace(strings.ToUpper(s)) + if s == "" { + s = string(DateMethodNone) + } + switch DateMethod(s) { + case DateMethodNone, + DateMethodEXIF, + DateMethodNameThenExif, + DateMethodExifThenName, + DateMethodName: + *dm = DateMethod(s) + return nil + default: + return fmt.Errorf("invalid DateMethod: %s, expecting NONE|FILENAME|EXIF|FILENAME-EXIF|EXIF-FILENAME", s) + } +} + +func (dm *DateMethod) Type() string { + return "DateMethod" +} + +func (dm *DateMethod) String() string { + return string(*dm) +} diff --git a/internal/cliFlags/daterange.go b/internal/cliFlags/daterange.go new file mode 100644 index 00000000..b4935a7c --- /dev/null +++ b/internal/cliFlags/daterange.go @@ -0,0 +1,114 @@ +package cliflags + +import ( + "fmt" + "time" +) + +// DateRange represent the date range for capture date + +type DateRange struct { + After, Before time.Time // todo: make After and Before private + day, month, year, set bool + tz *time.Location + s string +} + +// InitDateRange initialize a DateRange with a string (for tests) +func InitDateRange(tz *time.Location, s string) DateRange { + dr := DateRange{ + tz: tz, + } + _ = dr.Set(s) + return dr +} + +// IsSet returns whether the date range is set +func (dr DateRange) IsSet() bool { return dr.set } + +func (dr DateRange) String() string { + if dr.set { + switch { + case dr.day: + return dr.After.Format("2006-01-02") + case dr.month: + return dr.After.Format("2006-01") + case dr.year: + return dr.After.Format("2006") + default: + return dr.After.Format("2006-01-02") + "," + dr.Before.AddDate(0, 0, -1).Format("2006-01-02") + } + } else { + return "unset" + } +} + +func (dr *DateRange) SetTZ(tz *time.Location) { + dr.tz = tz + if dr.set { + _ = dr.Set(dr.s) + } +} + +// Implements the flags interface +// A day: 2022-01-01 +// A month: 2022-01 +// A year: 2022 +// A range: 2022-01-01,2022-12-31 +func (dr *DateRange) Set(s string) (err error) { + if dr.tz == nil { + dr.tz = time.Local + } + switch len(s) { + case 4: + dr.year = true + dr.After, err = time.ParseInLocation("2006", s, dr.tz) + if err != nil { + return fmt.Errorf("invalid date range:%w", err) + } + dr.Before = dr.After.AddDate(1, 0, 0) + case 7: + dr.month = true + dr.After, err = time.ParseInLocation("2006-01", s, dr.tz) + if err != nil { + return fmt.Errorf("invalid date range:%w", err) + } + dr.Before = dr.After.AddDate(0, 1, 0) + case 10: + dr.day = true + dr.After, err = time.ParseInLocation("2006-01-02", s, dr.tz) + if err != nil { + return fmt.Errorf("invalid date range:%w", err) + } + dr.Before = dr.After.AddDate(0, 0, 1) + case 21: + dr.After, err = time.ParseInLocation("2006-01-02", s[:10], dr.tz) + if err != nil { + return fmt.Errorf("invalid date range:%w", err) + } + dr.Before, err = time.ParseInLocation("2006-01-02", s[11:], dr.tz) + if err != nil { + return fmt.Errorf("invalid date range:%w", err) + } + dr.Before = dr.Before.AddDate(0, 0, 1) + default: + dr.set = false + return fmt.Errorf("invalid date range:%s", s) + } + dr.set = true + dr.s = s + return nil +} + +// InRange checks if a given date is within the range +func (dr DateRange) InRange(d time.Time) bool { + if !dr.set { + return true + } + // --------------After----------d------------Before + return (d.Compare(dr.After) >= 0 && dr.Before.Compare(d) > 0) +} + +func (dr DateRange) Type() string { + return "date-range" +} diff --git a/immich/daterange_test.go b/internal/cliFlags/daterange_test.go similarity index 95% rename from immich/daterange_test.go rename to internal/cliFlags/daterange_test.go index c82cbfd0..3800304a 100644 --- a/immich/daterange_test.go +++ b/internal/cliFlags/daterange_test.go @@ -1,4 +1,4 @@ -package immich +package cliflags import ( "testing" @@ -145,7 +145,9 @@ func TestDateRange_InRange(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { + tz := time.Local var dr DateRange + dr.SetTZ(tz) err := dr.Set(tt.name) if err != nil { t.Errorf("set DateRange %q fails: %s", tt.name, err) @@ -154,7 +156,7 @@ func TestDateRange_InRange(t *testing.T) { t.Errorf("the String() gives %q, want %q", dr.String(), tt.name) } for _, check := range tt.check { - d, err := time.ParseInLocation(time.DateTime, check.date, time.UTC) + d, err := time.ParseInLocation(time.DateTime, check.date, tz) if err != nil { t.Errorf("can't parse check date %q fails: %s", check.date, err) } diff --git a/internal/cliFlags/extensionList.go b/internal/cliFlags/extensionList.go new file mode 100644 index 00000000..caeb4411 --- /dev/null +++ b/internal/cliFlags/extensionList.go @@ -0,0 +1,80 @@ +package cliflags + +import ( + "slices" + "strings" + + "github.com/spf13/cobra" +) + +type InclusionFlags struct { + ExcludedExtensions ExtensionList + IncludedExtensions ExtensionList + DateRange DateRange +} + +func AddInclusionFlags(cmd *cobra.Command, flags *InclusionFlags) { + cmd.Flags().Var(&flags.DateRange, "date-range", "Only import photos taken within the specified date range") + cmd.Flags().Var(&flags.ExcludedExtensions, "exclude-extensions", "Comma-separated list of extension to exclude. (e.g. .gif,.PM) (default: none)") + cmd.Flags().Var(&flags.IncludedExtensions, "include-extensions", "Comma-separated list of extension to include. (e.g. .jpg,.heic) (default: all)") +} + +// Validate validates the common flags. +func (flags *InclusionFlags) Validate() { + flags.ExcludedExtensions = flags.ExcludedExtensions.Validate() + flags.IncludedExtensions = flags.IncludedExtensions.Validate() +} + +// An ExtensionList is a list of file extensions, where each extension is a string that starts with a dot (.) and is in lowercase. +type ExtensionList []string + +// Validate validates the extension list by converting to lowercase. +func (sl ExtensionList) Validate() ExtensionList { + vl := ExtensionList{} + for _, e := range sl { + e = strings.ToLower(strings.TrimSpace(e)) + if !strings.HasPrefix(e, ".") { + e = "." + e + } + vl = append(vl, e) + } + return vl +} + +// Include checks if the extension list includes a given extension. +func (sl ExtensionList) Include(s string) bool { + if len(sl) == 0 { + return true + } + s = strings.ToLower(s) + return slices.Contains(sl, strings.ToLower(s)) +} + +// Exclude checks if the extension list excludes a given extension. +func (sl ExtensionList) Exclude(s string) bool { + if len(sl) == 0 { + return false + } + s = strings.ToLower(s) + return slices.Contains(sl, strings.ToLower(s)) +} + +// Implements the flag interface +func (sl *ExtensionList) Set(s string) error { + exts := strings.Split(s, ",") + for _, ext := range exts { + ext = strings.TrimSpace(ext) + if ext != "" { + *sl = append(*sl, ext) + } + } + return nil +} + +func (sl ExtensionList) String() string { + return strings.Join(sl, ", ") +} + +func (sl ExtensionList) Type() string { + return "ExtensionList" +} diff --git a/cmd/upload/stringlist_test.go b/internal/cliFlags/stringlist_test.go similarity index 75% rename from cmd/upload/stringlist_test.go rename to internal/cliFlags/stringlist_test.go index 0e2136f8..71fb4949 100644 --- a/cmd/upload/stringlist_test.go +++ b/internal/cliFlags/stringlist_test.go @@ -1,41 +1,41 @@ -package upload +package cliflags import "testing" func TestStringList_Include(t *testing.T) { tests := []struct { name string - sl StringList + sl ExtensionList ext string want bool }{ { name: "empty", - sl: StringList{}, + sl: ExtensionList{}, ext: ".jpg", want: true, }, { name: ".jpg", - sl: StringList{".jpg"}, + sl: ExtensionList{".jpg"}, ext: ".JPG", want: true, }, { name: ".jpg but .heic", - sl: StringList{".jpg"}, + sl: ExtensionList{".jpg"}, ext: ".heic", want: false, }, { name: ".jpg,.mp4,.mov with .mov", - sl: StringList{".jpg", ".mp4", ".mov"}, + sl: ExtensionList{".jpg", ".mp4", ".mov"}, ext: ".MOV", want: true, }, { name: ".jpg,.mp4,.mov with .heic", - sl: StringList{".jpg", ".mp4", ".mov"}, + sl: ExtensionList{".jpg", ".mp4", ".mov"}, ext: ".HEIC", want: false, }, @@ -52,37 +52,37 @@ func TestStringList_Include(t *testing.T) { func TestStringList_Exclude(t *testing.T) { tests := []struct { name string - sl StringList + sl ExtensionList ext string want bool }{ { name: "empty", - sl: StringList{}, + sl: ExtensionList{}, ext: ".jpg", want: false, }, { name: ".jpg", - sl: StringList{".jpg"}, + sl: ExtensionList{".jpg"}, ext: ".JPG", want: true, }, { name: ".jpg but .heic", - sl: StringList{".jpg"}, + sl: ExtensionList{".jpg"}, ext: ".heic", want: false, }, { name: ".jpg,.mp4,.mov with .mov", - sl: StringList{".jpg", ".mp4", ".mov"}, + sl: ExtensionList{".jpg", ".mp4", ".mov"}, ext: ".MOV", want: true, }, { name: ".jpg,.mp4,.mov with .heic", - sl: StringList{".jpg", ".mp4", ".mov"}, + sl: ExtensionList{".jpg", ".mp4", ".mov"}, ext: ".HEIC", want: false, }, diff --git a/helpers/configuration/config.go b/internal/configuration/config.go similarity index 100% rename from helpers/configuration/config.go rename to internal/configuration/config.go diff --git a/helpers/configuration/doc.go b/internal/configuration/doc.go similarity index 100% rename from helpers/configuration/doc.go rename to internal/configuration/doc.go diff --git a/helpers/docker/docker.go b/internal/docker/docker.go similarity index 100% rename from helpers/docker/docker.go rename to internal/docker/docker.go diff --git a/helpers/docker/local.go b/internal/docker/local.go similarity index 100% rename from helpers/docker/local.go rename to internal/docker/local.go diff --git a/helpers/docker/ssh.go b/internal/docker/ssh.go similarity index 100% rename from helpers/docker/ssh.go rename to internal/docker/ssh.go diff --git a/internal/exif/DATA/PXL_20220724_210650210.NIGHT.mp4 b/internal/exif/DATA/PXL_20220724_210650210.NIGHT.mp4 new file mode 100644 index 00000000..ef348867 Binary files /dev/null and b/internal/exif/DATA/PXL_20220724_210650210.NIGHT.mp4 differ diff --git a/internal/exif/DATA/PXL_20231006_063000139.jpg b/internal/exif/DATA/PXL_20231006_063000139.jpg new file mode 100644 index 00000000..07ec1e2e Binary files /dev/null and b/internal/exif/DATA/PXL_20231006_063000139.jpg differ diff --git a/internal/exif/DATA/YG816507.jpg b/internal/exif/DATA/YG816507.jpg new file mode 100755 index 00000000..ab3fea21 Binary files /dev/null and b/internal/exif/DATA/YG816507.jpg differ diff --git a/internal/exif/DATA/YG816507.orf b/internal/exif/DATA/YG816507.orf new file mode 100755 index 00000000..109f7eaa Binary files /dev/null and b/internal/exif/DATA/YG816507.orf differ diff --git a/internal/exif/direct.go b/internal/exif/direct.go new file mode 100644 index 00000000..b1d00a8e --- /dev/null +++ b/internal/exif/direct.go @@ -0,0 +1,194 @@ +package exif + +/* + Read metadata from a file not using exiftool. + + TODO: Use sync.Pool for buffers +*/ +import ( + "bytes" + "fmt" + "io" + "path" + "strings" + "time" + + "github.com/rwcarlsen/goexif/exif" + "github.com/rwcarlsen/goexif/tiff" + "github.com/simulot/immich-go/internal/assets" +) + +// MetadataFromDirectRead read the file using GO package +func MetadataFromDirectRead(f io.Reader, name string, localTZ *time.Location) (*assets.Metadata, error) { + var md *assets.Metadata + var err error + ext := strings.ToLower(path.Ext(name)) + + switch strings.ToLower(ext) { + case ".heic", ".heif": + md, err = readHEIFMetadata(f, localTZ) + case ".jpg", ".jpeg", ".dng", ".cr2": + md, err = readExifMetadata(f, localTZ) + case ".mp4", ".mov": + md, err = readMP4Metadata(f) + case ".cr3": + md, err = readCR3Metadata(f, localTZ) + default: + return nil, fmt.Errorf("can't read metadata for this format '%s'", ext) + } + if err != nil { + return nil, fmt.Errorf("can't read metadata: %w", err) + } + + return md, nil +} + +// readExifMetadata locate the Exif part and return the date of capture +func readExifMetadata(r io.Reader, localTZ *time.Location) (*assets.Metadata, error) { + // try to read the Exif data directly + readBuffer := bytes.NewBuffer(make([]byte, searchBufferSize)) + r2 := io.TeeReader(r, readBuffer) + x, err := exif.Decode(r2) + if err == nil || !exif.IsCriticalError(err) { + return getExifMetadata(x, localTZ) + } + b := make([]byte, searchBufferSize) + + // search for the Exif header + r, err = searchPattern(io.MultiReader(readBuffer, r), []byte("Exif\x00\x00"), b) + if err == nil { + x, err = exif.Decode(r) + if err == nil || !exif.IsCriticalError(err) { + return getExifMetadata(x, localTZ) + } + } + return nil, err +} + +const searchBufferSize = 32 * 1024 + +// readHEIFMetadata locate the Exif part and return the date of capture +func readHEIFMetadata(r io.Reader, localTZ *time.Location) (*assets.Metadata, error) { + b := make([]byte, searchBufferSize) + r, err := searchPattern(r, []byte{0x45, 0x78, 0x69, 0x66, 0, 0, 0x4d, 0x4d}, b) + if err != nil { + return nil, err + } + + filler := make([]byte, 6) + _, err = r.Read(filler) + if err != nil { + return nil, err + } + x, err := exif.Decode(r) + if err == nil || !exif.IsCriticalError(err) { + return getExifMetadata(x, localTZ) + } + return nil, err +} + +// readMP4Metadata locate the mvhd atom and decode the date of capture +func readMP4Metadata(r io.Reader) (*assets.Metadata, error) { + b := make([]byte, searchBufferSize) + + r, err := searchPattern(r, []byte{'m', 'v', 'h', 'd'}, b) + if err != nil { + return nil, err + } + atom, err := decodeMvhdAtom(r) + if err != nil { + return nil, err + } + t := atom.CreationTime + if t.Year() < 2000 { + t = atom.ModificationTime + } + return &assets.Metadata{DateTaken: t}, nil +} + +// readCR3Metadata locate the CMT1 atom and decode the date of capture +func readCR3Metadata(r io.Reader, localTZ *time.Location) (*assets.Metadata, error) { + b := make([]byte, searchBufferSize) + + r, err := searchPattern(r, []byte("CMT1"), b) + if err != nil { + return nil, err + } + + filler := make([]byte, 4) + _, err = r.Read(filler) + if err != nil { + return nil, err + } + x, err := exif.Decode(r) + if err == nil || !exif.IsCriticalError(err) { + return getExifMetadata(x, localTZ) + } + return nil, err +} + +// getExifMetadata extract the date and location from the Exif data +func getExifMetadata(x *exif.Exif, local *time.Location) (*assets.Metadata, error) { + var err error + md := &assets.Metadata{} + md.DateTaken, err = readGPSTimeStamp(x, local) + if err != nil || md.DateTaken.IsZero() { + var tag string + tag, err = getTagSting(x, exif.DateTimeOriginal) + if err == nil { + md.DateTaken, err = time.ParseInLocation("2006:01:02 15:04:05", tag, local) + } + if err != nil { + tag, err = getTagSting(x, exif.DateTime) + if err == nil { + md.DateTaken, _ = time.ParseInLocation("2006:01:02 15:04:05", tag, local) // last chance + } + } + } + + if err == nil { + lat, lon, err := x.LatLong() + if err == nil { + md.Latitude = lat + md.Longitude = lon + } + } + return md, err +} + +// readGPSTimeStamp extract the date from the GPS data +func readGPSTimeStamp(x *exif.Exif, _ *time.Location) (time.Time, error) { + tag, err := getTagSting(x, exif.GPSDateStamp) + if err == nil { + var tags *tiff.Tag + tags, err = x.Get(exif.GPSTimeStamp) + if err == nil { + tag = tag + " " + fmt.Sprintf("%02d:%02d:%02dZ", ratToInt(tags, 0), ratToInt(tags, 1), ratToInt(tags, 2)) + t, err := time.ParseInLocation("2006:01:02 15:04:05Z", tag, time.UTC) + if err == nil { + return t, nil + } + } + } + return time.Time{}, err +} + +func ratToInt(t *tiff.Tag, i int) int { + n, d, err := t.Rat2(i) + if err != nil { + return 0 + } + if d == 1 { + return int(n) + } + return int(float64(n) / float64(d)) +} + +func getTagSting(x *exif.Exif, tagName exif.FieldName) (string, error) { + t, err := x.Get(tagName) + if err != nil { + return "", err + } + s := strings.TrimRight(strings.TrimLeft(t.String(), `"`), `"`) + return s, nil +} diff --git a/internal/exif/direct_test.go b/internal/exif/direct_test.go new file mode 100644 index 00000000..02b3828e --- /dev/null +++ b/internal/exif/direct_test.go @@ -0,0 +1,86 @@ +package exif + +import ( + "os" + "testing" + "time" + + "github.com/simulot/immich-go/internal/assets" +) + +func Test_MetadataFromDirectRead(t *testing.T) { + tests := []struct { + name string + fileName string + want *assets.Metadata + wantErr bool + }{ + { + name: "read JPG", + fileName: "DATA/PXL_20231006_063000139.jpg", + want: &assets.Metadata{ + DateTaken: time.Date(2023, 10, 6, 6, 29, 56, 0, time.UTC), // 2023:10:06 06:29:56Z + Latitude: +48.8583736, + Longitude: +2.2919010, + }, + wantErr: false, + }, + { + name: "read mp4", + fileName: "DATA/PXL_20220724_210650210.NIGHT.mp4", + want: &assets.Metadata{ + DateTaken: time.Date(2022, 7, 24, 21, 10, 56, 0, time.UTC), + // Latitude: 47.538300, + // Longitude: -2.891900, + }, + // wantErr: false, + }, + { + name: "read OLYMPUS", + fileName: "DATA/YG816507.jpg", + want: &assets.Metadata{ + DateTaken: time.Date(2024, 7, 8, 4, 35, 7, 0, time.Local), + }, + wantErr: false, + }, + { + name: "read OLYMPUS orf", + fileName: "DATA/YG816507.orf", + want: &assets.Metadata{ + DateTaken: time.Date(2024, 7, 7, 19, 37, 7, 0, time.UTC), // 2024:07:07 19:37:07Z + }, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + f, err := os.Open(tt.fileName) + if err != nil { + t.Errorf("Can't open file %s: %v", tt.fileName, err) + return + } + defer f.Close() + got, err := MetadataFromDirectRead(f, tt.fileName, time.Local) + if (err != nil) != tt.wantErr { + t.Errorf("ExifTool.ReadMetaData() error = %v, wantErr %v", err, tt.wantErr) + return + } + if err != nil { + return + } + if !tt.want.DateTaken.IsZero() && !got.DateTaken.Equal(tt.want.DateTaken) { + t.Errorf("DateTaken = %v, want %v", got.DateTaken, tt.want.DateTaken) + } + if !floatEquals(got.Latitude, tt.want.Latitude, 1e-6) { + t.Errorf("Latitude = %v, want %v", got.Latitude, tt.want.Latitude) + } + if !floatEquals(got.Longitude, tt.want.Longitude, 1e-6) { + t.Errorf("Longitude = %v, want %v", got.Longitude, tt.want.Longitude) + } + }) + } +} + +func floatEquals(a, b, epsilon float64) bool { + return (a-b) < epsilon && (b-a) < epsilon +} diff --git a/internal/exif/exiftool.nogo b/internal/exif/exiftool.nogo new file mode 100644 index 00000000..0c6a7397 --- /dev/null +++ b/internal/exif/exiftool.nogo @@ -0,0 +1,117 @@ +package exif + +import ( + "fmt" + "time" + + etool "github.com/barasher/go-exiftool" + "github.com/simulot/immich-go/internal/assets" +) + +type ExifTool struct { + flags *ExifToolFlags + eTool *etool.Exiftool +} + +func NewExifTool(flags *ExifToolFlags) error { + opts := []func(*etool.Exiftool) error{ + etool.Charset("filename=utf8"), + etool.CoordFormant("%+.7f"), + } + + if flags != nil { + if flags.ExifPath != "" { + opts = append(opts, etool.SetExiftoolBinaryPath(flags.ExifPath)) + } + } + + tool, err := etool.NewExiftool(opts...) + if err != nil { + return err + } + flags.et = &ExifTool{ + eTool: tool, + flags: flags, + } + return nil +} + +func (et *ExifTool) Close() error { + return et.eTool.Close() +} + +var dateKeys = []struct { + key string + format string + isUTC bool +}{ + {"GPSDateTime", "2006:01:02 15:04:05Z", true}, // 2023:10:06 06:29:56Z + {"DateTimeUTC", "2006:01:02 15:04:05", true}, // 2016:11:19 20:30:52 + {"DateTimeOriginal", "2006:01:02 15:04:05", false}, // 2023:10:06 08:30:00 + {"CreateDate", "2006:01:02 15:04:05", false}, // 2023:10:06 08:30:00 + {"ModifyDate", "2006:01:02 15:04:05", false}, // 2016:11:19 20:30:52 + {"MediaModifyDate", "2006:01:02 15:04:05", false}, // 2016:11:19 20:30:52 +} + +// GetMetadata returns the metadata of the file. The date of capture is searched in the preferred tags first. +// missing tags or tags with incorrect dates are skipped +// +// TODO: make a better use of time offset taken on the exif fields +// ``` +// Modify Date : 2023:10:06 08:30:00 +// Date/Time Original : 2023:10:06 08:30:00 +// Create Date : 2023:10:06 08:30:00 +// Offset Time : +02:00 +// Offset Time Original : +02:00 +// Offset Time Digitized : +02:00 +// Sub Sec Time : 139 +// Sub Sec Time Original : 139 +// Sub Sec Time Digitized : 139 +// GPS Time Stamp : 06:29:56 +// GPS Date Stamp : 2023:10:06 +// Profile Date Time : 2023:03:09 10:57:00 +// Create Date : 2023:10:06 08:30:00.139+02:00 +// Date/Time Original : 2023:10:06 08:30:00.139+02:00 +// Modify Date : 2023:10:06 08:30:00.139+02:00 +// GPS Date/Time : 2023:10:06 06:29:56Z +// ``` + +// ReadMetaData reads the metadata of the file and fills the metadata structure +func (et *ExifTool) ReadMetaData(fileName string) (*assets.Metadata, error) { + ms := et.eTool.ExtractMetadata(fileName) + if len(ms) != 1 { + return nil, fmt.Errorf("cant extract metadata from file '%s': unexpected exif-tool result", fileName) + } + m := ms[0] + if m.Err != nil { + return nil, fmt.Errorf("cant extract metadata from file '%s': %w", fileName, m.Err) + } + + md := &assets.Metadata{} + + if v, err := m.GetFloat("GPSLatitude"); err == nil { + md.Latitude = v + } + if v, err := m.GetFloat("GPSLongitude"); err == nil { + md.Longitude = v + } + + // get the date of capture using preferred exif tag + for _, dk := range dateKeys { + if s, err := m.GetString(dk.key); err == nil { + tz := et.flags.Timezone.Location() + if dk.isUTC { + tz = time.UTC + } + t, err := time.ParseInLocation(dk.format, s, tz) + if err == nil { + if t.IsZero() || t.Before(time.Date(1980, 1, 1, 0, 0, 0, 0, time.UTC)) || t.After(time.Now().AddDate(0, 0, 365*10)) { + continue + } + md.DateTaken = t + break + } + } + } + return md, nil +} diff --git a/internal/exif/exiftoolFlags.nogo b/internal/exif/exiftoolFlags.nogo new file mode 100644 index 00000000..96097202 --- /dev/null +++ b/internal/exif/exiftoolFlags.nogo @@ -0,0 +1,24 @@ +package exif + +import ( + "github.com/simulot/immich-go/internal/tzone" + "github.com/spf13/cobra" +) + +type ExifToolFlags struct { + UseExifTool bool + ExifPath string + Timezone tzone.Timezone + + // TakeDateFromFilename indicates whether to take the date from the filename if the date isn't available in the image. + TakeDateFromFilename bool + et *ExifTool +} + +func AddExifToolFlags(cmd *cobra.Command, flags *ExifToolFlags) { + _ = flags.Timezone.Set("Local") + // cmd.Flags().BoolVar(&flags.UseExifTool, "exiftool-enabled", false, "Enable the use of the external 'exiftool' program (if installed and available in the system path) to extract EXIF metadata") + // cmd.Flags().StringVar(&flags.ExifPath, "exiftool-path", "", "Path to the ExifTool executable (default: search in system's PATH)") + cmd.Flags().Var(&flags.Timezone, "exif-timezone", "Timezone to use when parsing exif timestamps without timezone Options: LOCAL (use the system's local timezone), UTC (use UTC timezone), or a valid timezone name (e.g. America/New_York)") + cmd.Flags().BoolVar(&flags.TakeDateFromFilename, "date-from-name", false, "Use the date from the filename if the date isn't available in the metadata (.jpg,mp4,.heic,.dng,cr2,.cr3,). Use --exiftool-enabled to support more image formats") +} diff --git a/internal/exif/exiftool_test.nogo b/internal/exif/exiftool_test.nogo new file mode 100644 index 00000000..2e7b5fc8 --- /dev/null +++ b/internal/exif/exiftool_test.nogo @@ -0,0 +1,89 @@ +package exif + +import ( + "testing" + "time" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/tzone" +) + +func TestExifTool_ReadMetaData(t *testing.T) { + tests := []struct { + name string + fileName string + want *assets.Metadata + wantErr bool + }{ + { + name: "read JPG", + fileName: "DATA/PXL_20231006_063000139.jpg", + want: &assets.Metadata{ + DateTaken: time.Date(2023, 10, 6, 6, 29, 56, 0, time.UTC), // 2023:10:06 06:29:56Z + Latitude: +48.8583736, + Longitude: +2.2919010, + }, + wantErr: false, + }, + { + name: "read mp4", + fileName: "DATA/PXL_20220724_210650210.NIGHT.mp4", + want: &assets.Metadata{ + DateTaken: time.Date(2022, 7, 24, 21, 10, 56, 0, time.Local), + Latitude: 47.538300, + Longitude: -2.891900, + }, + wantErr: false, + }, + { + name: "read OLYMPUS", + fileName: "DATA/YG816507.jpg", + want: &assets.Metadata{ + DateTaken: time.Date(2024, 7, 7, 19, 37, 7, 0, time.UTC), // 2024:07:07 19:37:07Z + }, + wantErr: false, + }, + { + name: "read OLYMPUS orf", + fileName: "DATA/YG816507.orf", + want: &assets.Metadata{ + DateTaken: time.Date(2024, 7, 7, 19, 37, 7, 0, time.UTC), // 2024:07:07 19:37:07Z + }, + wantErr: false, + }, + } + flag := &ExifToolFlags{ + UseExifTool: true, + Timezone: tzone.Timezone{TZ: time.Local}, + } + err := NewExifTool(flag) + if err != nil { + t.Error(err) + return + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := flag.et.ReadMetaData(tt.fileName) + if (err != nil) != tt.wantErr { + t.Errorf("ExifTool.ReadMetaData() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if !got.DateTaken.Equal(tt.want.DateTaken) { + t.Errorf("DateTaken = %v, want %v", got.DateTaken, tt.want.DateTaken) + } + if !float64Equal(got.Latitude, tt.want.Latitude) { + t.Errorf("Latitude = %v, want %v", got.Latitude, tt.want.Latitude) + } + if !float64Equal(got.Longitude, tt.want.Longitude) { + t.Errorf("Longitude = %v, want %v", got.Longitude, tt.want.Longitude) + } + }) + } +} + +func float64Equal(a, b float64) bool { + const epsilon = 1e-6 + return (a-b) < epsilon && (b-a) < epsilon +} diff --git a/internal/exif/metadata.go b/internal/exif/metadata.go new file mode 100644 index 00000000..e0456a59 --- /dev/null +++ b/internal/exif/metadata.go @@ -0,0 +1,24 @@ +package exif + +import ( + "io" + "time" + + "github.com/simulot/immich-go/internal/assets" +) + +// GetMetaData read metadata from the asset file to enrich the metadata structure +func GetMetaData(r io.Reader, name string, local *time.Location) (*assets.Metadata, error) { + return MetadataFromDirectRead(r, name, local) +} + +// MetadataFromExiftool call exiftool to get exif data +// func MetadataFromExiftool(f io.Reader, name string, options ExifToolFlags) (*assets.Metadata, error) { +// // be sure the file is completely extracted in the temp file +// _, err := io.Copy(io.Discard, f) +// if err != nil { +// return nil, err +// } + +// return options.et.ReadMetaData(name) +// } diff --git a/immich/metadata/quicktime.go b/internal/exif/quicktime.go similarity index 97% rename from immich/metadata/quicktime.go rename to internal/exif/quicktime.go index 3b14f455..675bd44f 100644 --- a/immich/metadata/quicktime.go +++ b/internal/exif/quicktime.go @@ -1,7 +1,8 @@ -package metadata +package exif import ( "encoding/binary" + "io" "time" ) @@ -47,7 +48,8 @@ type MvhdAtom struct { // NextTrackID uint32 } -func decodeMvhdAtom(r *sliceReader) (*MvhdAtom, error) { +func decodeMvhdAtom(rf io.Reader) (*MvhdAtom, error) { + r := newSliceReader(rf) a := &MvhdAtom{} var err error // Read the mvhd marker (4 bytes) diff --git a/immich/metadata/search.go b/internal/exif/search.go similarity index 98% rename from immich/metadata/search.go rename to internal/exif/search.go index ab049d99..154d8bdb 100644 --- a/immich/metadata/search.go +++ b/internal/exif/search.go @@ -1,4 +1,4 @@ -package metadata +package exif import ( "bufio" diff --git a/immich/metadata/search_test.go b/internal/exif/search_test.go similarity index 99% rename from immich/metadata/search_test.go rename to internal/exif/search_test.go index eef0d59f..611fa7fe 100644 --- a/immich/metadata/search_test.go +++ b/internal/exif/search_test.go @@ -1,4 +1,4 @@ -package metadata +package exif import ( "bytes" diff --git a/internal/exif/sidecars/jsonsidecar/json.go b/internal/exif/sidecars/jsonsidecar/json.go new file mode 100644 index 00000000..eed66f89 --- /dev/null +++ b/internal/exif/sidecars/jsonsidecar/json.go @@ -0,0 +1,34 @@ +package jsonsidecar + +import ( + "encoding/json" + "io" + + "github.com/simulot/immich-go/app" + "github.com/simulot/immich-go/internal/assets" +) + +type meta struct { + Software string `json:"software"` + assets.Metadata +} + +func Write(md *assets.Metadata, w io.Writer) error { + v := meta{ + Software: app.GetVersion(), + Metadata: *md, + } + enc := json.NewEncoder(w) + enc.SetIndent("", " ") + return enc.Encode(v) +} + +func Read(r io.Reader, md *assets.Metadata) error { + var v meta + dec := json.NewDecoder(r) + if err := dec.Decode(&v); err != nil { + return err + } + *md = v.Metadata + return nil +} diff --git a/internal/exif/sidecars/xmpsidecar/DATA/159d9172-2a1e-4d95-aef1-b5133549927b.jpg.xmp b/internal/exif/sidecars/xmpsidecar/DATA/159d9172-2a1e-4d95-aef1-b5133549927b.jpg.xmp new file mode 100644 index 00000000..76458a18 --- /dev/null +++ b/internal/exif/sidecars/xmpsidecar/DATA/159d9172-2a1e-4d95-aef1-b5133549927b.jpg.xmp @@ -0,0 +1,48 @@ + + + + + + + + Alors! + + + + + + + + activities/outdoors + + + + + + 2018-08-11T17:38:25Z + + + + 2018-08-11T17:38:25Z + + + + + + Alors! + + + + + + 4 + + + + \ No newline at end of file diff --git a/internal/exif/sidecars/xmpsidecar/DATA/IMG_2477.CR2.xmp b/internal/exif/sidecars/xmpsidecar/DATA/IMG_2477.CR2.xmp new file mode 100755 index 00000000..58351d27 --- /dev/null +++ b/internal/exif/sidecars/xmpsidecar/DATA/IMG_2477.CR2.xmp @@ -0,0 +1,12 @@ + + + + + + 48,24.50256879N + 3,5.43538761W + + + + diff --git a/internal/exif/sidecars/xmpsidecar/bool.go b/internal/exif/sidecars/xmpsidecar/bool.go new file mode 100644 index 00000000..2ee6a3e2 --- /dev/null +++ b/internal/exif/sidecars/xmpsidecar/bool.go @@ -0,0 +1,14 @@ +package xmpsidecar + +import "strings" + +func BoolToString(b bool) string { + if b { + return "True" + } + return "False" +} + +func StringToBool(s string) bool { + return strings.ToLower(s) == "true" +} diff --git a/internal/exif/sidecars/xmpsidecar/gps.go b/internal/exif/sidecars/xmpsidecar/gps.go new file mode 100644 index 00000000..c948cc4f --- /dev/null +++ b/internal/exif/sidecars/xmpsidecar/gps.go @@ -0,0 +1,69 @@ +package xmpsidecar + +import ( + "fmt" + "math" +) + +/* +GPSCoordinate +A Text value in the form “DDD,MM,SSk” or “DDD,MM.mmk”, where: + DDD is a number of degrees + MM is a number of minutes + SS is a number of seconds + mm is a fraction of minutes + k is a single character N, S, E, or W indicating a direction (north, south, east, west) + +Leading zeros are not necessary for the for DDD, MM, and SS values. The DDD,MM.mmk form should be used +when any of the native EXIF component rational values has a denominator other than 1. There can be any +number of fractional digits. + + +*/ + +// GPSFloatToString converts a float GPS coordinate to a string in the format "48,55.68405768N" +func GPSFloatToString(coordinate float64, isLatitude bool) string { + neg := coordinate < 0 + if coordinate < 0 { + coordinate = -coordinate + } + + degrees := int(math.Floor(coordinate)) + minutes := float64(coordinate-float64(degrees)) * 60 + direction := "N" + if !isLatitude { + direction = "E" + } + if neg { + direction = "S" + if !isLatitude { + direction = "W" + } + } + + return fmt.Sprintf("%d,%08.5f%s", degrees, minutes, direction) +} + +// GPTStringToFloat converts a string GPS coordinate in the format "48,55.68405768N" to a float +func GPTStringToFloat(coordinate string) (float64, error) { + var degrees int + var minutes float64 + var direction string + + if len(coordinate) > 0 { + direction = string(coordinate[len(coordinate)-1]) + coordinate = coordinate[:len(coordinate)-1] + } + _, err := fmt.Sscanf(coordinate, "%d,%f", °rees, &minutes) + if err != nil { + return 0, err + } + + decimal := float64(degrees) + float64(minutes)/60 + + if direction == "S" || direction == "W" { + decimal = -decimal + } + + return decimal, nil +} diff --git a/internal/exif/sidecars/xmpsidecar/int.go b/internal/exif/sidecars/xmpsidecar/int.go new file mode 100644 index 00000000..6c26956c --- /dev/null +++ b/internal/exif/sidecars/xmpsidecar/int.go @@ -0,0 +1,17 @@ +package xmpsidecar + +import "strconv" + +func IntToString(i int) string { + return strconv.Itoa(i) +} + +func StringToInt(s string) int { + i, _ := strconv.Atoi(s) + return i +} + +func StringToByte(s string) byte { + i, _ := strconv.Atoi(s) + return byte(i) +} diff --git a/internal/exif/sidecars/xmpsidecar/read.go b/internal/exif/sidecars/xmpsidecar/read.go new file mode 100644 index 00000000..037398d7 --- /dev/null +++ b/internal/exif/sidecars/xmpsidecar/read.go @@ -0,0 +1,74 @@ +package xmpsidecar + +import ( + "fmt" + "io" + "path" + "regexp" + "time" + + "github.com/clbanning/mxj/v2" + "github.com/simulot/immich-go/internal/assets" +) + +func ReadXMP(r io.Reader, md *assets.Metadata) error { + // Read the XMP data from the reader and return an Asset + m, err := mxj.NewMapXmlReader(r) + if err != nil { + return err + } + walk(m, md, "") + return nil +} + +func walk(m mxj.Map, md *assets.Metadata, path string) { + for key, value := range m { + switch v := value.(type) { + case map[string]interface{}: + walk(v, md, path+"/"+key) + case []interface{}: + path = path + "/" + key + for i, item := range v { + p := fmt.Sprintf("%s[%d]", path, i) + if itemMap, ok := item.(map[string]interface{}); ok { + walk(itemMap, md, p) + } else { + filter(md, p, item.(string)) + } + } + default: + filter(md, path+"/"+key, value.(string)) + } + } +} + +var reDescription = regexp.MustCompile(`/xmpmeta/RDF/Description\[\d+\]/`) + +func filter(md *assets.Metadata, p string, value string) { + p = reDescription.ReplaceAllString(p, "") + // debug fmt.Printf("%s: %s\n", p, value) + switch p { + case "DateTimeOriginal": + if d, err := TimeStringToTime(value, time.UTC); err == nil { + md.DateTaken = d + } + case "ImageDescription/Alt/li/#text": + md.Description = value + case "Rating": + md.Rating = StringToByte(value) + case "TagsList/Seq/li": + md.Tags = append(md.Tags, + assets.Tag{ + Name: path.Base(value), + Value: value, + }) + case "/xmpmeta/RDF/Description/GPSLatitude": + if f, err := GPTStringToFloat(value); err == nil { + md.Latitude = f + } + case "/xmpmeta/RDF/Description/GPSLongitude": + if f, err := GPTStringToFloat(value); err == nil { + md.Longitude = f + } + } +} diff --git a/internal/exif/sidecars/xmpsidecar/read_test.go b/internal/exif/sidecars/xmpsidecar/read_test.go new file mode 100644 index 00000000..2c6f0c63 --- /dev/null +++ b/internal/exif/sidecars/xmpsidecar/read_test.go @@ -0,0 +1,105 @@ +package xmpsidecar + +import ( + "os" + "testing" + "time" + + "github.com/simulot/immich-go/internal/assets" +) + +func TestRead(t *testing.T) { + tc := []struct { + path string + expect assets.Metadata + }{ + { + path: "DATA/159d9172-2a1e-4d95-aef1-b5133549927b.jpg.xmp", + expect: assets.Metadata{ + Description: "Alors!", + DateTaken: time.Date(2018, 8, 11, 17, 38, 25, 0, time.UTC), + Rating: 4, + Tags: []assets.Tag{ + {Value: "activities/outdoors", Name: "outdoors"}, + }, + }, + }, + { + path: "DATA/IMG_2477.CR2.xmp", + expect: assets.Metadata{ + Latitude: 48.408376, + Longitude: -3.090590, + }, + }, + } + + for _, c := range tc { + t.Run(c.path, func(t *testing.T) { + r, err := os.Open(c.path) + if err != nil { + t.Fatal(err.Error()) + } + defer r.Close() + md := &assets.Metadata{} + err = ReadXMP(r, md) + if err != nil { + t.Fatal(err.Error()) + } + if md.Description != c.expect.Description { + t.Errorf("expected description %s, got %s", c.expect.Description, md.Description) + } + if !md.DateTaken.Equal(c.expect.DateTaken) { + t.Errorf("expected date taken %s, got %s", c.expect.DateTaken, md.DateTaken) + } + if md.Rating != c.expect.Rating { + t.Errorf("expected rating %d, got %d", c.expect.Rating, md.Rating) + } + if len(md.Tags) != len(c.expect.Tags) { + t.Errorf("expected %d tags, got %d", len(c.expect.Tags), len(md.Tags)) + } else { + for i, tag := range md.Tags { + if tag != c.expect.Tags[i] { + t.Errorf("expected tag %v, got %v", c.expect.Tags[i], tag) + } + } + } + if !floatIsEqual(md.Latitude, c.expect.Latitude) { + t.Errorf("expected latitude %f, got %f", c.expect.Latitude, md.Latitude) + } + if !floatIsEqual(md.Longitude, c.expect.Longitude) { + t.Errorf("expected longitude %f, got %f", c.expect.Longitude, md.Longitude) + } + }) + } +} + +func floatIsEqual(a, b float64) bool { + const epsilon = 1e-6 + return (a-b) < epsilon && (b-a) < epsilon +} + +/* + +// explore the map +func exploreMap(m mxj.Map, padding string) { + for key, value := range m { + switch v := value.(type) { + case map[string]interface{}: + fmt.Printf("%skey: %s, value: map\n", padding, key) + exploreMap(v, padding+" ") + case []interface{}: + fmt.Printf("%skey: %s, value: array\n", padding, key) + for i, item := range v { + fmt.Printf("%s index: %d\n", padding, i) + if itemMap, ok := item.(map[string]interface{}); ok { + exploreMap(itemMap, padding+" ") + } else { + fmt.Printf("%s value: %v\n", padding, item) + } + } + default: + fmt.Printf("%skey: %s, value: %v\n", padding, key, value) + } + } +} +*/ diff --git a/internal/exif/sidecars/xmpsidecar/time.go b/internal/exif/sidecars/xmpsidecar/time.go new file mode 100644 index 00000000..18856c66 --- /dev/null +++ b/internal/exif/sidecars/xmpsidecar/time.go @@ -0,0 +1,65 @@ +package xmpsidecar + +import "time" + +/* +exif:DateTimeOriginalDateInternal + +EXIF tags 36867, 0x9003 (primary) and 37521, 0x9291 (subseconds). Date and time when original image was generated, in ISO 8601 format. Includes the EXIF +SubSecTimeOriginal data. + +Note that EXIF date-time values have no time zone information. + + +exif:GPSTimeStampDateInternalGPS tag 29 (date), 0x1D, and, and GPS tag 7 (time), 0x07. + +Time stamp of GPS data, in Coordinated Universal + +Time. + +The GPSDateStamp tag is new in EXIF 2.2. The GPS +timestamp in EXIF 2.1 does not include a date. If not +present, the date component for the XMP should be +taken from exif:DateTimeOriginal, or if that is also +lacking from exif:DateTimeDigitized. If no date is +available, do not write exif:GPSTimeStamp to XMP. + +*/ + +/* +Date +A date-time value which is represented using a subset of ISO RFC 8601 formatting, as described in +http://www.w3.org/TR/Note-datetime.html. The following formats are supported: + YYYY + YYYY-MM + YYYY-MM-DD + YYYY-MM-DDThh:mmTZD + YYYY-MM-DDThh:mm:ssTZD + YYYY-MM-DDThh:mm:ss.sTZD + YYYY = four-digit year + MM = two-digit month (01=January) + DD = two-digit day of month (01 through 31) + hh = two digits of hour (00 through 23) + mm = two digits of minute (00 through 59) + ss = two digits of second (00 through 59) + s = one or more digits representing a decimal fraction of a second + TZD = time zone designator (Z or +hh:mm or -hh:mm) + +The time zone designator is optional in XMP. When not present, the time zone is unknown, and software +should not assume anything about the missing time zone. + +It is recommended, when working with local times, that you use a time zone designator of +hh:mm or +-hh:mm instead of Z, to aid human readability. For example, if you know a file was saved at noon on +October 23 a timestamp of 2004-10-23T12:00:00-06:00 is more understandable than +2004-10-23T18:00:00Z. +*/ + +const xmpTimeLayout = "2006-01-02T15:04:05Z" + +func TimeStringToTime(t string, l *time.Location) (time.Time, error) { + return time.ParseInLocation(xmpTimeLayout, t, l) +} + +func TimeToString(t time.Time) string { + return t.Format(xmpTimeLayout) +} diff --git a/internal/fakeImmich/immich.go b/internal/fakeImmich/immich.go index dd06a72f..6437f638 100644 --- a/internal/fakeImmich/immich.go +++ b/internal/fakeImmich/immich.go @@ -4,8 +4,9 @@ import ( "context" "io" - "github.com/simulot/immich-go/browser" "github.com/simulot/immich-go/immich" + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/filetypes" ) type MockedCLient struct{} @@ -14,7 +15,7 @@ func (c *MockedCLient) GetAllAssetsWithFilter(context.Context, func(*immich.Asse return nil } -func (c *MockedCLient) AssetUpload(context.Context, *browser.LocalAssetFile) (immich.AssetResponse, error) { +func (c *MockedCLient) AssetUpload(context.Context, *assets.Asset) (immich.AssetResponse, error) { return immich.AssetResponse{}, nil } @@ -42,7 +43,7 @@ func (c *MockedCLient) StackAssets(ctx context.Context, cover string, ids []stri return nil } -func (c *MockedCLient) UpdateAsset(ctx context.Context, id string, a *browser.LocalAssetFile) (*immich.Asset, error) { +func (c *MockedCLient) UpdateAsset(ctx context.Context, id string, a *assets.Asset) (*immich.Asset, error) { return nil, nil } @@ -76,8 +77,8 @@ func (c *MockedCLient) DeleteAlbum(ctx context.Context, id string) error { return nil } -func (c *MockedCLient) SupportedMedia() immich.SupportedMedia { - return immich.DefaultSupportedMedia +func (c *MockedCLient) SupportedMedia() filetypes.SupportedMedia { + return filetypes.DefaultSupportedMedia } func (c *MockedCLient) GetAssetStatistics(ctx context.Context) (immich.UserStatistics, error) { diff --git a/internal/fakefs/fakefs.go b/internal/fakefs/fakefs.go index 186b7a03..b32b9e79 100644 --- a/internal/fakefs/fakefs.go +++ b/internal/fakefs/fakefs.go @@ -11,8 +11,8 @@ import ( "strings" "time" - "github.com/simulot/immich-go/helpers/gen" - "github.com/simulot/immich-go/immich/metadata" + "github.com/simulot/immich-go/internal/filenames" + "github.com/simulot/immich-go/internal/gen" ) /* @@ -116,7 +116,7 @@ func (fsys FakeFS) Open(name string) (fs.File, error) { r, fakeInfo.size = fakeJSON() default: d := info.ModTime() - if d2 := metadata.TakeTimeFromName(name); !d2.IsZero() { + if d2 := filenames.TakeTimeFromName(name, time.Local); !d2.IsZero() { d = d2 } title := strings.TrimSuffix(path.Base(name), path.Ext(base)) diff --git a/internal/fakefs/ziplist.go b/internal/fakefs/ziplist.go index 0cf892f7..dce01500 100644 --- a/internal/fakefs/ziplist.go +++ b/internal/fakefs/ziplist.go @@ -4,22 +4,28 @@ package fakefs for f in *.zip; do echo "$f: "; unzip -l $f; done >list.lst */ import ( + "archive/zip" "bufio" + "errors" "io" "io/fs" "os" + "path/filepath" "regexp" "sort" "strconv" "strings" "time" - "github.com/simulot/immich-go/helpers/gen" + "github.com/simulot/immich-go/internal/gen" ) // ` 2104348 07-20-2023 00:00 Takeout/Google Photos/2020 - Costa Rica/IMG_3235.MP4` -var reZipList = regexp.MustCompile(`(-rw-r--r-- 0/0\s+)?(\d+)\s+(.{16})\s+(.*)$`) +var ( + reZipList = regexp.MustCompile(`(-rw-r--r-- 0/0\s+)?(\d+)\s+(.{16})\s+(.*)$`) + reFileLine = regexp.MustCompile(`^(\d+)\s+(\d+)\s+files$`) // 2144740441 10826 files +) func readFileLine(l string, dateFormat string) (string, int64, time.Time) { if len(l) < 30 { @@ -41,12 +47,34 @@ func ScanStringList(dateFormat string, s string) ([]fs.FS, error) { } func ScanFileList(name string, dateFormat string) ([]fs.FS, error) { + var r io.ReadCloser f, err := os.Open(name) if err != nil { return nil, err } + if strings.ToLower(filepath.Ext(name)) == ".zip" { + i, err := f.Stat() + if err != nil { + return nil, err + } + z, err := zip.NewReader(f, i.Size()) + if err != nil { + return nil, err + } + if len(z.File) == 0 { + return nil, errors.New("zip file is empty") + } + r, err = z.File[0].Open() + if err != nil { + return nil, err + } + defer r.Close() + } else { + r = f + } + defer f.Close() - return ScanFileListReader(f, dateFormat) + return ScanFileListReader(r, dateFormat) } func ScanFileListReader(f io.Reader, dateFormat string) ([]fs.FS, error) { @@ -84,6 +112,9 @@ func ScanFileListReader(f io.Reader, dateFormat string) ([]fs.FS, error) { } continue } + if reFileLine.MatchString(l) { + continue + } if name, size, modTime := readFileLine(l, dateFormat); name != "" { fsys.addFile(name, size, modTime) } diff --git a/helpers/fileevent/fileevents.go b/internal/fileevent/fileevents.go similarity index 63% rename from helpers/fileevent/fileevents.go rename to internal/fileevent/fileevents.go index f4da5cab..3ca98251 100644 --- a/helpers/fileevent/fileevents.go +++ b/internal/fileevent/fileevents.go @@ -1,17 +1,20 @@ +// Package fileevent provides a mechanism to record and report events related to file processing. + package fileevent +/* + TODO: + - rename the package as journal + - use a filenemame type that keeps the fsys and the name in that fsys + +*/ import ( "context" "fmt" - "io" "log/slog" - "sort" "strings" "sync" "sync/atomic" - - "github.com/simulot/immich-go/browser" - "github.com/simulot/immich-go/helpers/gen" ) /* @@ -21,11 +24,12 @@ import ( type Code int const ( - DiscoveredImage Code = iota // = "Scanned image" - DiscoveredVideo // = "Scanned video" - DiscoveredSidecar // = "Scanned side car file" - DiscoveredDiscarded // = "Discarded" - DiscoveredUnsupported // = "File type not supported" + NotHandled Code = iota + DiscoveredImage // = "Scanned image" + DiscoveredVideo // = "Scanned video" + DiscoveredSidecar // = "Scanned side car file" + DiscoveredDiscarded // = "Discarded" + DiscoveredUnsupported // = "File type not supported" AnalysisAssociatedMetadata AnalysisMissingAssociatedMetadata @@ -36,7 +40,8 @@ const ( UploadServerDuplicate // = "Server has photo" UploadServerBetter // = "Server's asset is better" UploadAlbumCreated - UploadAddToAlbum // = "Added to an album" + UploadAddToAlbum // = "Added to an album" + UploadLi UploadServerError // = "Server error" Uploaded // = "Uploaded" @@ -44,11 +49,17 @@ const ( LivePhoto // = "Live photo" Metadata // = "Metadata files" INFO // = "Info" + + Written // = "Written" + + Tagged // = "Tagged" + Error MaxCode ) var _code = map[Code]string{ + NotHandled: "Not handled", DiscoveredImage: "scanned image file", DiscoveredVideo: "scanned video file", DiscoveredSidecar: "scanned sidecar file", @@ -72,7 +83,11 @@ var _code = map[Code]string{ LivePhoto: "Live photo", Metadata: "Metadata files", INFO: "Info", - Error: "error", + + Written: "Written", + + Tagged: "Tagged", + Error: "error", } func (e Code) String() string { @@ -83,40 +98,31 @@ func (e Code) String() string { } type Recorder struct { - lock sync.RWMutex - counts []int64 - fileEvents map[string]map[Code]int - log *slog.Logger - debug bool + lock sync.RWMutex + counts counts + log *slog.Logger } -func NewRecorder(l *slog.Logger, debug bool) *Recorder { +type counts []int64 + +func NewRecorder(l *slog.Logger) *Recorder { r := &Recorder{ - counts: make([]int64, MaxCode), - fileEvents: map[string]map[Code]int{}, - log: l, - debug: debug, + counts: make([]int64, MaxCode), + log: l, } return r } -func (r *Recorder) Record(ctx context.Context, code Code, object any, file string, args ...any) { +func (r *Recorder) Log() *slog.Logger { + return r.log +} + +func (r *Recorder) Record(ctx context.Context, code Code, file slog.LogValuer, args ...any) { atomic.AddInt64(&r.counts[code], 1) - if r.debug && file != "" { - r.lock.Lock() - events := r.fileEvents[file] - if events == nil { - events = map[Code]int{} - } - v := events[code] + 1 - events[code] = v - r.fileEvents[file] = events - r.lock.Unlock() - } if r.log != nil { level := slog.LevelInfo - if file != "" { - args = append([]any{"file", file}, args...) + if file != nil { + args = append([]any{"file", file.LogValue()}, args...) } for _, a := range args { if a == "error" { @@ -125,17 +131,6 @@ func (r *Recorder) Record(ctx context.Context, code Code, object any, file strin } r.log.Log(ctx, level, code.String(), args...) } - if a, ok := object.(*browser.LocalAssetFile); ok && a.LivePhoto != nil { - arg2 := []any{} - for i := 0; i < len(args); i++ { - if args[i] == "file" { - i += 1 - continue - } - arg2 = append(arg2, args[i]) - } - r.Record(ctx, code, a.LivePhoto, a.LivePhoto.FileName, arg2...) - } } func (r *Recorder) SetLogger(l *slog.Logger) { @@ -174,9 +169,11 @@ func (r *Recorder) Report() { } { sb.WriteString(fmt.Sprintf("%-40s: %7d\n", c.String(), r.counts[c])) } - - r.log.Info(sb.String()) fmt.Println(sb.String()) + lines := strings.Split(sb.String(), "\n") + for _, s := range lines { + r.log.Info(s) + } } func (r *Recorder) GetCounts() []int64 { @@ -187,53 +184,6 @@ func (r *Recorder) GetCounts() []int64 { return counts } -func (r *Recorder) WriteFileCounts(w io.Writer) error { - reportCodes := []Code{ - -1, - DiscoveredImage, - DiscoveredVideo, - AnalysisAssociatedMetadata, - DiscoveredDiscarded, - DiscoveredUnsupported, - AnalysisLocalDuplicate, - UploadNotSelected, - UploadUpgraded, - UploadServerBetter, - UploadServerDuplicate, - Uploaded, - } - fmt.Fprint(w, "File,") - for _, c := range reportCodes { - if c >= 0 { - fmt.Fprint(w, strings.Replace(c.String(), " ", "_", -1)+",") - } else { - fmt.Fprint(w, "check,") - } - } - fmt.Fprintln(w) - keys := gen.MapKeys(r.fileEvents) - sort.Strings(keys) - for _, f := range keys { - fmt.Fprint(w, "\"", f, "\",") - e := r.fileEvents[f] - check := 0 - for _, c := range reportCodes { - if c >= 0 { - check += e[c] - } - } - for _, c := range reportCodes { - if c >= 0 { - fmt.Fprint(w, e[c], ",") - } else { - fmt.Fprint(w, check, ",") - } - } - fmt.Fprintln(w) - } - return nil -} - func (r *Recorder) TotalAssets() int64 { return atomic.LoadInt64(&r.counts[DiscoveredImage]) + atomic.LoadInt64(&r.counts[DiscoveredVideo]) } @@ -258,3 +208,34 @@ func (r *Recorder) TotalProcessed(forcedMissingJSON bool) int64 { } return v } + +// IsEqualCounts checks if two slices of int64 have the same elements in the same order. +// Used for tests only +func IsEqualCounts(a, b []int64) bool { + if len(a) != len(b) { + return false + } + for i := range a { + if a[i] != b[i] { + return false + } + } + return true +} + +// PrepareCountsForTest takes an undefined number of int arguments and returns a slice of int64 +// Used for tests only + +func NewCounts() *counts { + c := counts(make([]int64, MaxCode)) + return &c +} + +func (cnt *counts) Set(c Code, v int64) *counts { + (*cnt)[c] = v + return cnt +} + +func (cnt *counts) Value() []int64 { + return (*cnt)[:MaxCode] +} diff --git a/internal/filenames/huawei.go b/internal/filenames/huawei.go new file mode 100644 index 00000000..5f4d062a --- /dev/null +++ b/internal/filenames/huawei.go @@ -0,0 +1,37 @@ +package filenames + +import ( + "regexp" + "strconv" + "strings" + "time" + + "github.com/simulot/immich-go/internal/assets" +) + +/* +Huawei burst file name pattern +IMG_20231014_183246_BURST001_COVER.jpg +IMG_20231014_183246_BURST002.jpg +IMG_20231014_183246_BURST003.jpg +*/ +var huaweiRE = regexp.MustCompile(`^(IMG_\d{8}_\d{6})_BURST(\d{3})(?:_(\w+))?(\..+)$`) + +func (ic InfoCollector) Huawei(name string) (bool, assets.NameInfo) { + parts := huaweiRE.FindStringSubmatch(name) + if len(parts) == 0 { + return false, assets.NameInfo{} + } + ext := parts[4] + info := assets.NameInfo{ + Radical: parts[1], + Base: name, + IsCover: strings.HasSuffix(parts[3], "COVER"), + Ext: strings.ToLower(ext), + Type: ic.SM.TypeFromExt(ext), + Kind: assets.KindBurst, + } + info.Index, _ = strconv.Atoi(parts[2]) + info.Taken, _ = time.ParseInLocation("20060102_150405", parts[1][4:19], ic.TZ) + return true, info +} diff --git a/internal/filenames/huawei_test.go b/internal/filenames/huawei_test.go new file mode 100644 index 00000000..b3eac544 --- /dev/null +++ b/internal/filenames/huawei_test.go @@ -0,0 +1,73 @@ +package filenames + +import ( + "reflect" + "testing" + "time" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/filetypes" +) + +func TestHuawei(t *testing.T) { + tests := []struct { + name string + filename string + expected bool + info assets.NameInfo + }{ + { + name: "BURSTCOVER", + filename: "IMG_20231014_183246_BURST001_COVER.jpg", + expected: true, + info: assets.NameInfo{ + Radical: "IMG_20231014_183246", + Base: "IMG_20231014_183246_BURST001_COVER.jpg", + IsCover: true, + Ext: ".jpg", + Type: filetypes.TypeImage, + Kind: assets.KindBurst, + Index: 1, + Taken: time.Date(2023, 10, 14, 18, 32, 46, 0, time.Local), + }, + }, + { + name: "BURST", + filename: "IMG_20231014_183246_BURST002.jpg", + expected: true, + info: assets.NameInfo{ + Radical: "IMG_20231014_183246", + Base: "IMG_20231014_183246_BURST002.jpg", + IsCover: false, + Ext: ".jpg", + Type: filetypes.TypeImage, + Kind: assets.KindBurst, + Index: 2, + Taken: time.Date(2023, 10, 14, 18, 32, 46, 0, time.Local), + }, + }, + + { + name: "InvalidFilename", + filename: "IMG_1123.jpg", + expected: false, + info: assets.NameInfo{}, + }, + } + + ic := InfoCollector{ + TZ: time.Local, + SM: filetypes.DefaultSupportedMedia, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, info := ic.Huawei(tt.filename) + if got != tt.expected { + t.Errorf("expected %v, got %v", tt.expected, got) + } + if got && !reflect.DeepEqual(info, tt.info) { + t.Errorf("expected \n%+v,\n got \n%+v", tt.info, info) + } + }) + } +} diff --git a/internal/filenames/infoCollector.go b/internal/filenames/infoCollector.go new file mode 100644 index 00000000..949b0dc3 --- /dev/null +++ b/internal/filenames/infoCollector.go @@ -0,0 +1,50 @@ +package filenames + +import ( + "path" + "strings" + "time" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/filetypes" +) + +type InfoCollector struct { + TZ *time.Location + SM filetypes.SupportedMedia +} + +// NewInfoCollector creates a new InfoCollector +func NewInfoCollector(tz *time.Location, sm filetypes.SupportedMedia) *InfoCollector { + return &InfoCollector{ + TZ: tz, + SM: sm, + } +} + +// nameMatcher analyze the name and return +// bool -> true when name is a part of a burst +// NameInfo -> the information extracted from the name +type nameMatcher func(name string) (bool, assets.NameInfo) + +// GetInfo analyze the name and return the information extracted from the name +func (ic InfoCollector) GetInfo(name string) assets.NameInfo { + base := path.Base(name) + for _, m := range []nameMatcher{ic.Pixel, ic.Samsung, ic.Nexus, ic.Huawei, ic.SonyXperia} { + if ok, i := m(base); ok { + return i + } + } + + // no matcher found, return a basic info + t := TakeTimeFromPath(name, ic.TZ) + ext := path.Ext(base) + + return assets.NameInfo{ + Base: base, + Radical: strings.TrimSuffix(base, ext), + Ext: strings.ToLower(ext), + Taken: t, + Type: ic.SM.TypeFromExt(ext), + } +} diff --git a/internal/filenames/info_test.go b/internal/filenames/info_test.go new file mode 100644 index 00000000..ccaa1a0d --- /dev/null +++ b/internal/filenames/info_test.go @@ -0,0 +1,137 @@ +package filenames + +import ( + "reflect" + "testing" + "time" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/filetypes" +) + +func normalizeTime(t time.Time) time.Time { + return t.Round(0).UTC() +} + +func TestGetInfo(t *testing.T) { + tests := []struct { + name string + filename string + expected bool + info assets.NameInfo + }{ + { + name: "Normal", + filename: "PXL_20231026_210642603.dng", + expected: true, + info: assets.NameInfo{ + Radical: "PXL_20231026_210642603", + Base: "PXL_20231026_210642603.dng", + IsCover: false, + Ext: ".dng", + Type: filetypes.TypeImage, + Taken: time.Date(2023, 10, 26, 21, 6, 42, 0, time.UTC), + }, + }, + { + name: "Nexus BURST cover", + filename: "00015IMG_00015_BURST20171111030039_COVER.jpg", + expected: true, + info: assets.NameInfo{ + Radical: "BURST20171111030039", + Base: "00015IMG_00015_BURST20171111030039_COVER.jpg", + IsCover: true, + Ext: ".jpg", + Type: filetypes.TypeImage, + Kind: assets.KindBurst, + Index: 15, + Taken: time.Date(2017, 11, 11, 3, 0, 39, 0, time.Local), + }, + }, + { + name: "Samsung BURST", + filename: "20231207_101605_031.jpg", + expected: true, + info: assets.NameInfo{ + Radical: "20231207_101605", + Base: "20231207_101605_031.jpg", + IsCover: false, + Ext: ".jpg", + Type: filetypes.TypeImage, + Kind: assets.KindBurst, + Index: 31, + Taken: time.Date(2023, 12, 7, 10, 16, 5, 0, time.Local), + }, + }, + { + name: "Regular", + filename: "IMG_20171111_030128.jpg", + expected: false, + info: assets.NameInfo{ + Radical: "IMG_20171111_030128", + Base: "IMG_20171111_030128.jpg", + Ext: ".jpg", + Type: filetypes.TypeImage, + Taken: time.Date(2017, 11, 11, 3, 1, 28, 0, time.Local), + }, + }, + { + name: "Sony Xperia BURST", + filename: "DSC_0001_BURST20230709220904977.JPG", + expected: true, + info: assets.NameInfo{ + Radical: "BURST20230709220904977", + Base: "DSC_0001_BURST20230709220904977.JPG", + IsCover: false, + Ext: ".jpg", + Type: filetypes.TypeImage, + Kind: assets.KindBurst, + Index: 1, + Taken: time.Date(2023, 7, 9, 22, 9, 4, int(977*time.Millisecond), time.Local), + }, + }, + // Epson files are handled in groups + // { + // name: "Epson Scanner", + // filename: "img0001_a.jpg", + // expected: true, + // info: assets.NameInfo{ + // Radical: "img0001", + // Base: "img0001_a.jpg", + // IsCover: false, + // Ext: ".jpg", + // Type: filetypes.TypeImage, + // Kind: KindEdited, + // }, + // }, + { + name: "InvalidFilename", + filename: "IMG_1123.jpg", + expected: false, + info: assets.NameInfo{ + Base: "IMG_1123.jpg", + Radical: "IMG_1123", + Ext: ".jpg", + Type: filetypes.TypeImage, + }, + }, + } + + ic := InfoCollector{ + TZ: time.Local, + SM: filetypes.DefaultSupportedMedia, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + info := ic.GetInfo(tt.filename) + + // Normalize time fields + info.Taken = normalizeTime(info.Taken) + tt.info.Taken = normalizeTime(tt.info.Taken) + + if !reflect.DeepEqual(info, tt.info) { + t.Errorf("expected \n%+v,\n got \n%+v", tt.info, info) + } + }) + } +} diff --git a/immich/metadata/namesdate.go b/internal/filenames/namesdate.go similarity index 80% rename from immich/metadata/namesdate.go rename to internal/filenames/namesdate.go index 4354bd99..53da8457 100644 --- a/immich/metadata/namesdate.go +++ b/internal/filenames/namesdate.go @@ -1,4 +1,4 @@ -package metadata +package filenames import ( "os" @@ -14,21 +14,21 @@ var timeRe = regexp.MustCompile(`(19[89]\d|20\d\d)\D?(0\d|1[0-2])\D?([0-3]\d)\D{ // from the given full path. At first it tries to extract from filename, then from each folder // name (end to start), If no time is found - it will try to extract from the path itself as a // last resort (e.g. /something/2024/06/06/file123.png). -func TakeTimeFromPath(fullpath string) time.Time { +func TakeTimeFromPath(fullpath string, tz *time.Location) time.Time { parts := strings.Split(fullpath, string(os.PathSeparator)) for i := len(parts) - 1; i >= 0; i-- { - if t := TakeTimeFromName(parts[i]); !t.IsZero() { + if t := TakeTimeFromName(parts[i], tz); !t.IsZero() { return t } } - return TakeTimeFromName(fullpath) + return TakeTimeFromName(fullpath, tz) } // TakeTimeFromName takes the name of a file and returns a time.Time value that is extracted -// from the given file name. -func TakeTimeFromName(s string) time.Time { +// from the given file name. It uses the given Timezone to parse the time. +func TakeTimeFromName(s string, tz *time.Location) time.Time { timeSegments := timeRe.FindStringSubmatch(s) if len(timeSegments) < 4 { return time.Time{} @@ -38,7 +38,7 @@ func TakeTimeFromName(s string) time.Time { for i := 1; i < len(timeSegments); i++ { m[i-1], _ = strconv.Atoi(timeSegments[i]) } - t := time.Date(m[0], time.Month(m[1]), m[2], m[3], m[4], m[5], 0, time.UTC) + t := time.Date(m[0], time.Month(m[1]), m[2], m[3], m[4], m[5], 0, tz) if t.Year() != m[0] || t.Month() != time.Month(m[1]) || t.Day() != m[2] || t.Hour() != m[3] || t.Minute() != m[4] || t.Second() != m[5] { diff --git a/immich/metadata/namesdate_test.go b/internal/filenames/namesdate_test.go similarity index 93% rename from immich/metadata/namesdate_test.go rename to internal/filenames/namesdate_test.go index c66e0340..c30d8f1c 100644 --- a/immich/metadata/namesdate_test.go +++ b/internal/filenames/namesdate_test.go @@ -1,4 +1,4 @@ -package metadata +package filenames import ( "testing" @@ -110,7 +110,7 @@ func TestTakeTimeFromPath(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - if got := TakeTimeFromPath(tt.name); !got.Equal(tt.expected) { + if got := TakeTimeFromPath(tt.name, time.UTC); !got.Equal(tt.expected) { t.Errorf("TakeTimeFromPath() = %v, want %v", got, tt.expected) } }) @@ -119,12 +119,12 @@ func TestTakeTimeFromPath(t *testing.T) { func BenchmarkTakeTimeFromPathPath(b *testing.B) { for i := 0; i < b.N; i++ { - TakeTimeFromPath("2022/2022.11/2022.11.09/IMG_1234.HEIC") + TakeTimeFromPath("2022/2022.11/2022.11.09/IMG_1234.HEIC", time.UTC) } } func BenchmarkTakeTimeFromName(b *testing.B) { for i := 0; i < b.N; i++ { - TakeTimeFromName("PXL_20220909_154515546.TS.mp4") + TakeTimeFromName("PXL_20220909_154515546.TS.mp4", time.UTC) } } diff --git a/internal/filenames/nexus.go b/internal/filenames/nexus.go new file mode 100644 index 00000000..ce6f0625 --- /dev/null +++ b/internal/filenames/nexus.go @@ -0,0 +1,53 @@ +package filenames + +import ( + "regexp" + "strconv" + "strings" + "time" + + "github.com/simulot/immich-go/internal/assets" +) + +/* + +Nexus burst file name pattern +#100 stack: Huawei Nexus 6P + +Burst +00001IMG_00001_BURST20171111030039.jpg +... +00014IMG_00014_BURST20171111030039.jpg +00015IMG_00015_BURST20171111030039_COVER.jpg +00000PORTRAIT_00000_BURST20190828181853475.jpg +00100lPORTRAIT_00100_BURST20181229213517346_COVER.jpg +00000IMG_00000_BURST20200607093330363_COVER.jpg +00000IMG_00000_BURST20190830164840873_COVER.jpg +00000IMG_00000_BURST20190830164840873.jpg + + +Regular +IMG_20171111_030055.jpg +IMG_20171111_030128.jpg +*/ + +var nexusRE = regexp.MustCompile(`^(\d+)\D+_\d+_(BURST\d+)(\D+)?(\..+)$`) + +func (ic InfoCollector) Nexus(name string) (bool, assets.NameInfo) { + parts := nexusRE.FindStringSubmatch(name) + if len(parts) == 0 { + return false, assets.NameInfo{} + } + ext := parts[4] + info := assets.NameInfo{ + Radical: parts[2], + Base: name, + IsCover: strings.Contains(parts[3], "COVER"), + Ext: strings.ToLower(ext), + Type: ic.SM.TypeFromExt(ext), + Kind: assets.KindBurst, + } + info.Index, _ = strconv.Atoi(parts[1]) + info.Taken, _ = time.ParseInLocation("20060102150405", parts[2][5:19], ic.TZ) + return true, info +} diff --git a/internal/filenames/nexus_test.go b/internal/filenames/nexus_test.go new file mode 100644 index 00000000..b4b0e94f --- /dev/null +++ b/internal/filenames/nexus_test.go @@ -0,0 +1,102 @@ +package filenames + +import ( + "testing" + "time" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/filetypes" +) + +func TestNexus(t *testing.T) { + tests := []struct { + name string + filename string + expected bool + info assets.NameInfo + }{ + { + name: "BURST", + filename: "00001IMG_00001_BURST20171111030039.jpg", + expected: true, + info: assets.NameInfo{ + Radical: "BURST20171111030039", + Base: "00001IMG_00001_BURST20171111030039.jpg", + IsCover: false, + Ext: ".jpg", + Type: filetypes.TypeImage, + Kind: assets.KindBurst, + Index: 1, + Taken: time.Date(2017, 11, 11, 3, 0, 39, 0, time.Local), + }, + }, + { + name: "BURST cover", + filename: "00015IMG_00015_BURST20171111030039_COVER.jpg", + expected: true, + info: assets.NameInfo{ + Radical: "BURST20171111030039", + Base: "00015IMG_00015_BURST20171111030039_COVER.jpg", + IsCover: true, + Ext: ".jpg", + Type: filetypes.TypeImage, + Kind: assets.KindBurst, + Index: 15, + Taken: time.Date(2017, 11, 11, 3, 0, 39, 0, time.Local), + }, + }, + { + name: "PORTRAIT BURST cover", + filename: "00100lPORTRAIT_00100_BURST20181229213517346_COVER.jpg", + expected: true, + info: assets.NameInfo{ + Radical: "BURST20181229213517346", + Base: "00100lPORTRAIT_00100_BURST20181229213517346_COVER.jpg", + IsCover: true, + Ext: ".jpg", + Type: filetypes.TypeImage, + Kind: assets.KindBurst, + Index: 100, + Taken: time.Date(2018, 12, 29, 21, 35, 17, 0, time.Local), + }, + }, + { + name: "PORTRAIT BURST", + filename: "00000PORTRAIT_00000_BURST20190828181853475.jpg", + expected: true, + info: assets.NameInfo{ + Radical: "BURST20190828181853475", + Base: "00000PORTRAIT_00000_BURST20190828181853475.jpg", + IsCover: false, + Ext: ".jpg", + Type: filetypes.TypeImage, + Kind: assets.KindBurst, + Index: 0, + Taken: time.Date(2019, 8, 28, 18, 18, 53, 0, time.Local), + }, + }, + + { + name: "InvalidFilename", + filename: "IMG_1123.jpg", + expected: false, + info: assets.NameInfo{}, + }, + } + + ic := InfoCollector{ + TZ: time.Local, + SM: filetypes.DefaultSupportedMedia, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, info := ic.Nexus(tt.filename) + if got != tt.expected { + t.Errorf("expected %v, got %v", tt.expected, got) + } + if got && info != tt.info { + t.Errorf("expected \n%+v,\n got \n%+v", tt.info, info) + } + }) + } +} diff --git a/internal/filenames/pixel.go b/internal/filenames/pixel.go new file mode 100644 index 00000000..592fa19f --- /dev/null +++ b/internal/filenames/pixel.go @@ -0,0 +1,77 @@ +package filenames + +import ( + "regexp" + "strconv" + "strings" + "time" + + "github.com/simulot/immich-go/internal/assets" +) + +/* +Pixel burst file name pattern +#94 stack: for Pixel 5 and Pixel 8 Pro naming schemes +Google Pixel 5 +Normal - STACKS +PXL_20231026_210642603.dng +PXL_20231026_210642603.jpg + +Burst - DOES NOT STACK +PXL_20231026_205755225.dng +PXL_20231026_205755225.MP.jpg + +Google Pixel 8 Pro +Normal - DOES NOT STACK +PXL_20231207_032111247.RAW-02.ORIGINAL.dng +PXL_20231207_032111247.RAW-01.COVER.jpg + +Burst - DOES NOT STACK +PXL_20231207_032108788.RAW-02.ORIGINAL.dng +PXL_20231207_032108788.RAW-01.MP.COVER.jpg + +PXL_20230330_184138390.MOTION-01.COVER.jpg +PXL_20230330_184138390.MOTION-02.ORIGINAL.jpg +PXL_20230330_201207251.jpg +PXL_20230816_132648337.NIGHT.jpg +PXL_20230817_175514506.PANO.jpg +PXL_20230809_203029471.LONG_EXPOSURE-01.COVER.jpg +PXL_20230809_203055470.LONG_EXPOSURE-01.COVER.jpg +PXL_20231220_170358366.RAW-01.COVER.jpg +PXL_20231220_170358366.RAW-02.ORIGINAL.dng + +PXL_20211014_171433750.MP.jpg +PXL_20211015_192314061.PORTRAIT.jpg +PXL_20211012_171937656.NIGHT.jpg +*/ +var pixelRE = regexp.MustCompile(`^(PXL_\d{8}_\d{9})((.*)?(\d{2}))?(.*)?(\..*)$`) + +func (ic InfoCollector) Pixel(name string) (bool, assets.NameInfo) { + parts := pixelRE.FindStringSubmatch(name) + if len(parts) == 0 { + return false, assets.NameInfo{} + } + ext := parts[6] + info := assets.NameInfo{ + Radical: parts[1], + Base: name, + IsCover: strings.HasSuffix(parts[5], "COVER"), + Ext: strings.ToLower(ext), + Type: ic.SM.TypeFromExt(ext), + } + if parts[4] != "" { + info.Index, _ = strconv.Atoi(parts[4]) + } + switch { + case strings.Contains(parts[3], "PORTRAIT"): + info.Kind = assets.KindPortrait + case strings.Contains(parts[3], "NIGHT"): + info.Kind = assets.KindNight + case strings.Contains(parts[3], "LONG_EXPOSURE"): + info.Kind = assets.KindLongExposure + case strings.Contains(parts[3], "MOTION"): + info.Kind = assets.KindMotion + } + info.Taken, _ = time.ParseInLocation("20060102_150405", parts[1][4:19], time.UTC) + return true, info +} diff --git a/internal/filenames/pixel_test.go b/internal/filenames/pixel_test.go new file mode 100644 index 00000000..f3bcf447 --- /dev/null +++ b/internal/filenames/pixel_test.go @@ -0,0 +1,142 @@ +package filenames + +import ( + "testing" + "time" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/filetypes" +) + +func TestPixel(t *testing.T) { + tests := []struct { + name string + filename string + expected bool + info assets.NameInfo + }{ + { + name: "Normal", + filename: "PXL_20231026_210642603.dng", + expected: true, + info: assets.NameInfo{ + Radical: "PXL_20231026_210642603", + Base: "PXL_20231026_210642603.dng", + IsCover: false, + Ext: ".dng", + Type: filetypes.TypeImage, + Taken: time.Date(2023, 10, 26, 21, 6, 42, 0, time.UTC), + }, + }, + { + name: "RawJpg", + filename: "PXL_20231207_032111247.RAW-02.ORIGINAL.dng", + expected: true, + info: assets.NameInfo{ + Radical: "PXL_20231207_032111247", + Base: "PXL_20231207_032111247.RAW-02.ORIGINAL.dng", + IsCover: false, + Ext: ".dng", + Type: filetypes.TypeImage, + Index: 2, + Taken: time.Date(2023, 12, 7, 3, 21, 11, 0, time.UTC), + }, + }, + { + name: "RawJpg Cover", + filename: "PXL_20231207_032111247.RAW-01.COVER.jpg", + expected: true, + info: assets.NameInfo{ + Radical: "PXL_20231207_032111247", + Base: "PXL_20231207_032111247.RAW-01.COVER.jpg", + IsCover: true, + Ext: ".jpg", + Type: filetypes.TypeImage, + Index: 1, + Taken: time.Date(2023, 12, 7, 3, 21, 11, 0, time.UTC), + }, + }, + { + name: "MotionCover", + filename: "PXL_20230330_184138390.MOTION-01.COVER.jpg", + expected: true, + info: assets.NameInfo{ + Radical: "PXL_20230330_184138390", + Base: "PXL_20230330_184138390.MOTION-01.COVER.jpg", + IsCover: true, + Ext: ".jpg", + Type: filetypes.TypeImage, + Kind: assets.KindMotion, + Index: 1, + Taken: time.Date(2023, 3, 30, 18, 41, 38, 0, time.UTC), + }, + }, + { + name: "LONG_EXPOSURE_COVER", + filename: "PXL_20230809_203029471.LONG_EXPOSURE-01.COVER.jpg", + expected: true, + info: assets.NameInfo{ + Radical: "PXL_20230809_203029471", + Base: "PXL_20230809_203029471.LONG_EXPOSURE-01.COVER.jpg", + IsCover: true, + Ext: ".jpg", + Type: filetypes.TypeImage, + Kind: assets.KindLongExposure, + Index: 1, + Taken: time.Date(2023, 8, 9, 20, 30, 29, 0, time.UTC), + }, + }, + { + name: "NIGHT ROW ORIGINAL", + filename: "PXL_20240615_204528165.NIGHT.RAW-02.ORIGINAL.dng", + expected: true, + info: assets.NameInfo{ + Radical: "PXL_20240615_204528165", + Base: "PXL_20240615_204528165.NIGHT.RAW-02.ORIGINAL.dng", + IsCover: false, + Ext: ".dng", + Type: filetypes.TypeImage, + Kind: assets.KindNight, + Index: 2, + Taken: time.Date(2024, 6, 15, 20, 45, 28, 0, time.UTC), + }, + }, + { + name: "NIGHT ROW COVER", + filename: "PXL_20240615_204528165.NIGHT.RAW-01.COVER.jpg", + expected: true, + info: assets.NameInfo{ + Radical: "PXL_20240615_204528165", + Base: "PXL_20240615_204528165.NIGHT.RAW-01.COVER.jpg", + IsCover: true, + Ext: ".jpg", + Type: filetypes.TypeImage, + Kind: assets.KindNight, + Index: 1, + Taken: time.Date(2024, 6, 15, 20, 45, 28, 0, time.UTC), + }, + }, + { + name: "InvalidFilename", + filename: "IMG_1123.jpg", + expected: false, + info: assets.NameInfo{}, + }, + } + + ic := InfoCollector{ + TZ: time.UTC, + SM: filetypes.DefaultSupportedMedia, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, info := ic.Pixel(tt.filename) + if got != tt.expected { + t.Errorf("expected %v, got %v", tt.expected, got) + } + if got && info != tt.info { + t.Errorf("expected \n%+v,\n got \n%+v", tt.info, info) + } + }) + } +} diff --git a/internal/filenames/samsung.go b/internal/filenames/samsung.go new file mode 100644 index 00000000..14321435 --- /dev/null +++ b/internal/filenames/samsung.go @@ -0,0 +1,36 @@ +package filenames + +import ( + "regexp" + "strconv" + "strings" + "time" + + "github.com/simulot/immich-go/internal/assets" +) + +// Samsung burst file name pattern +// #99 stack: Samsung #99 +// 20231207_101605_001.jpg +// 20231207_101605_002.jpg +// 20231207_101605_xxx.jpg + +var samsungRE = regexp.MustCompile(`^(\d{8}_\d{6})_(\d{3})(\..+)$`) + +func (ic InfoCollector) Samsung(name string) (bool, assets.NameInfo) { + parts := samsungRE.FindStringSubmatch(name) + if len(parts) == 0 { + return false, assets.NameInfo{} + } + info := assets.NameInfo{ + Radical: parts[1], + Base: name, + Ext: strings.ToLower(parts[3]), + Type: ic.SM.TypeFromExt(parts[3]), + Kind: assets.KindBurst, + } + info.Index, _ = strconv.Atoi(parts[2]) + info.IsCover = info.Index == 1 + info.Taken, _ = time.ParseInLocation("20060102_150405", parts[1], ic.TZ) + return true, info +} diff --git a/internal/filenames/samsung_test.go b/internal/filenames/samsung_test.go new file mode 100644 index 00000000..2604b8c5 --- /dev/null +++ b/internal/filenames/samsung_test.go @@ -0,0 +1,72 @@ +package filenames + +import ( + "testing" + "time" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/filetypes" +) + +func TestSamsung(t *testing.T) { + tests := []struct { + name string + filename string + expected bool + info assets.NameInfo + }{ + { + name: "BURST COVER", + filename: "20231207_101605_001.jpg", + expected: true, + info: assets.NameInfo{ + Radical: "20231207_101605", + Base: "20231207_101605_001.jpg", + IsCover: true, + Ext: ".jpg", + Type: filetypes.TypeImage, + Kind: assets.KindBurst, + Index: 1, + Taken: time.Date(2023, 12, 7, 10, 16, 5, 0, time.Local), + }, + }, + { + name: "BURST", + filename: "20231207_101605_031.jpg", + expected: true, + info: assets.NameInfo{ + Radical: "20231207_101605", + Base: "20231207_101605_031.jpg", + IsCover: false, + Ext: ".jpg", + Type: filetypes.TypeImage, + Kind: assets.KindBurst, + Index: 31, + Taken: time.Date(2023, 12, 7, 10, 16, 5, 0, time.Local), + }, + }, + + { + name: "InvalidFilename", + filename: "IMG_1123.jpg", + expected: false, + info: assets.NameInfo{}, + }, + } + + ic := InfoCollector{ + TZ: time.Local, + SM: filetypes.DefaultSupportedMedia, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, info := ic.Samsung(tt.filename) + if got != tt.expected { + t.Errorf("expected %v, got %v", tt.expected, got) + } + if got && info != tt.info { + t.Errorf("expected \n%+v,\n got \n%+v", tt.info, info) + } + }) + } +} diff --git a/internal/filenames/sony_xperia.go b/internal/filenames/sony_xperia.go new file mode 100644 index 00000000..f92c52e6 --- /dev/null +++ b/internal/filenames/sony_xperia.go @@ -0,0 +1,32 @@ +package filenames + +import ( + "regexp" + "strconv" + "strings" + "time" + + "github.com/simulot/immich-go/internal/assets" +) + +var sonyXperiaRE = regexp.MustCompile(`^DSC_(\d+)_BURST(\d+)(\D+)?(\..+)$`) + +func (ic InfoCollector) SonyXperia(name string) (bool, assets.NameInfo) { + parts := sonyXperiaRE.FindStringSubmatch(name) + if len(parts) == 0 { + return false, assets.NameInfo{} + } + ext := parts[4] + info := assets.NameInfo{ + Radical: "BURST" + parts[2], + Base: name, + IsCover: strings.Contains(parts[3], "COVER"), + Ext: strings.ToLower(ext), + Type: ic.SM.TypeFromExt(ext), + Kind: assets.KindBurst, + } + info.Index, _ = strconv.Atoi(parts[1]) + + info.Taken, _ = time.ParseInLocation("20060102150405.000", parts[2][:14]+"."+parts[2][14:], ic.TZ) + return true, info +} diff --git a/internal/filenames/sony_xperia_test.go b/internal/filenames/sony_xperia_test.go new file mode 100644 index 00000000..c5b13ea7 --- /dev/null +++ b/internal/filenames/sony_xperia_test.go @@ -0,0 +1,71 @@ +package filenames + +import ( + "testing" + "time" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/filetypes" +) + +func TestSonyXperia(t *testing.T) { + tests := []struct { + name string + filename string + expected bool + info assets.NameInfo + }{ + { + name: "Sony Xperia BURST", + filename: "DSC_0001_BURST20230709220904977.JPG", + expected: true, + info: assets.NameInfo{ + Radical: "BURST20230709220904977", + Base: "DSC_0001_BURST20230709220904977.JPG", + IsCover: false, + Ext: ".jpg", + Type: filetypes.TypeImage, + Kind: assets.KindBurst, + Index: 1, + Taken: time.Date(2023, 7, 9, 22, 9, 4, int(977*time.Millisecond), time.Local), + }, + }, + { + name: "Sony Xperia BURST cover", + filename: "DSC_0052_BURST20230709220904977_COVER.JPG", + expected: true, + info: assets.NameInfo{ + Radical: "BURST20230709220904977", + Base: "DSC_0052_BURST20230709220904977_COVER.JPG", + IsCover: true, + Ext: ".jpg", + Type: filetypes.TypeImage, + Kind: assets.KindBurst, + Index: 52, + Taken: time.Date(2023, 7, 9, 22, 9, 4, int(977*time.Millisecond), time.Local), + }, + }, + { + name: "InvalidFilename", + filename: "IMG_1123.jpg", + expected: false, + info: assets.NameInfo{}, + }, + } + + ic := InfoCollector{ + TZ: time.Local, + SM: filetypes.DefaultSupportedMedia, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, info := ic.SonyXperia(tt.filename) + if got != tt.expected { + t.Errorf("expected %v, got %v", tt.expected, got) + } + if got && info != tt.info { + t.Errorf("expected \n%+v,\n got \n%+v", tt.info, info) + } + }) + } +} diff --git a/internal/filetypes/supported.go b/internal/filetypes/supported.go new file mode 100644 index 00000000..9a606245 --- /dev/null +++ b/internal/filetypes/supported.go @@ -0,0 +1,92 @@ +package filetypes + +import ( + "slices" + "sort" + "strings" + "sync" +) + +type SupportedMedia map[string]string + +const ( + TypeVideo = "video" + TypeImage = "image" + TypeSidecar = "sidecar" + TypeUnknown = "" +) + +var DefaultSupportedMedia = SupportedMedia{ + ".3gp": TypeVideo, ".avi": TypeVideo, ".flv": TypeVideo, ".insv": TypeVideo, ".m2ts": TypeVideo, ".m4v": TypeVideo, ".mkv": TypeVideo, ".mov": TypeVideo, ".mp4": TypeVideo, ".mpg": TypeVideo, ".mts": TypeVideo, ".webm": TypeVideo, ".wmv": TypeVideo, + ".3fr": TypeImage, ".ari": TypeImage, ".arw": TypeImage, ".avif": TypeImage, ".bmp": TypeImage, ".cap": TypeImage, ".cin": TypeImage, ".cr2": TypeImage, ".cr3": TypeImage, ".crw": TypeImage, ".dcr": TypeImage, ".dng": TypeImage, ".erf": TypeImage, + ".fff": TypeImage, ".gif": TypeImage, ".heic": TypeImage, ".heif": TypeImage, ".hif": TypeImage, ".iiq": TypeImage, ".insp": TypeImage, ".jpe": TypeImage, ".jpeg": TypeImage, ".jpg": TypeImage, + ".jxl": TypeImage, ".k25": TypeImage, ".kdc": TypeImage, ".mrw": TypeImage, ".nef": TypeImage, ".orf": TypeImage, ".ori": TypeImage, ".pef": TypeImage, ".png": TypeImage, ".psd": TypeImage, ".raf": TypeImage, ".raw": TypeImage, ".rw2": TypeImage, + ".rwl": TypeImage, ".sr2": TypeImage, ".srf": TypeImage, ".srw": TypeImage, ".tif": TypeImage, ".tiff": TypeImage, ".webp": TypeImage, ".x3f": TypeImage, + ".xmp": TypeSidecar, + ".json": TypeSidecar, + ".mp": TypeVideo, +} + +func (sm SupportedMedia) TypeFromName(name string) string { + ext := name[strings.LastIndex(name, "."):] + return sm.TypeFromExt(ext) +} + +func (sm SupportedMedia) TypeFromExt(ext string) string { + ext = strings.ToLower(ext) + if strings.HasPrefix(ext, ".mp~") { + // #405 + ext = ".mp4" + } + return sm[ext] +} + +func (sm SupportedMedia) IsMedia(ext string) bool { + t := sm.TypeFromExt(ext) + return t == TypeVideo || t == TypeImage +} + +var ( + _supportedExtension []string + initSupportedExtension sync.Once +) + +func (sm SupportedMedia) IsExtensionPrefix(ext string) bool { + initSupportedExtension.Do(func() { + _supportedExtension = make([]string, len(sm)) + i := 0 + for k := range sm { + _supportedExtension[i] = k[:len(k)-2] + i++ + } + sort.Strings(_supportedExtension) + }) + ext = strings.ToLower(ext) + _, b := slices.BinarySearch(_supportedExtension, ext) + return b +} + +func (sm SupportedMedia) IsIgnoredExt(ext string) bool { + t := sm.TypeFromExt(ext) + return t == "" +} + +// rawExtensions defines the supported RAW file extensions +// https://github.com/immich-app/immich/blob/39b571a95c99cbc4183e5d389e6d682cd8e903d9/server/src/utils/mime-types.ts#L1-L55 +// source: https://en.wikipedia.org/wiki/Raw_image_format +var rawExtensions = map[string]bool{ + ".3fr": true, ".ari": true, ".arw": true, ".cap": true, + ".cin": true, ".cr2": true, ".cr3": true, ".crw": true, + ".dcr": true, ".dng": true, ".erf": true, ".fff": true, + ".iiq": true, ".k25": true, ".kdc": true, ".mrw": true, + ".nef": true, ".nrw": true, ".orf": true, ".ori": true, + ".pef": true, ".psd": true, ".raf": true, ".raw": true, + ".rw2": true, ".rwl": true, ".sr2": true, ".srf": true, + ".srw": true, ".x3f": true, +} + +// IsRawFile checks if the given filename has a RAW file extension +func IsRawFile(ext string) bool { + ext = strings.ToLower(ext) + return rawExtensions[ext] +} diff --git a/internal/filters/filterBursts.go b/internal/filters/filterBursts.go new file mode 100644 index 00000000..36299dae --- /dev/null +++ b/internal/filters/filterBursts.go @@ -0,0 +1,132 @@ +package filters + +import ( + "fmt" + "strings" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/filetypes" +) + +type BurstFlag int + +const ( + BurstNothing BurstFlag = iota + BurstStack // Stack burst photos, all the photos in the burst are kept + BurstkKeepRaw // Stack burst, keep raw photos when when have JPEG and raw + BurstKeepJPEG // Stack burst, keep JPEG photos when when have JPEG and raw +) + +func (b BurstFlag) GroupFilter() Filter { + switch b { + case BurstNothing: + return unGroupBurst + case BurstStack: + return groupBurst + case BurstkKeepRaw: + return groupBurstKeepRaw + case BurstKeepJPEG: + return stackBurstKeepJPEG + default: + return nil + } +} + +func unGroupBurst(g *assets.Group) *assets.Group { + if g.Grouping != assets.GroupByBurst { + return g + } + g.Grouping = assets.GroupByNone + return g +} + +func groupBurst(g *assets.Group) *assets.Group { + return g +} + +func groupBurstKeepRaw(g *assets.Group) *assets.Group { + if g.Grouping != assets.GroupByBurst { + return g + } + // Keep only raw files + removedAssets := []*assets.Asset{} + keep := 0 + for _, a := range g.Assets { + if filetypes.IsRawFile(a.Ext) { + keep++ + } else { + removedAssets = append(removedAssets, a) + } + } + if keep > 0 { + for _, a := range removedAssets { + g.RemoveAsset(a, "Keep only RAW files in burst") + } + } + if len(g.Assets) < 2 { + g.Grouping = assets.GroupByNone + } + return g +} + +func stackBurstKeepJPEG(g *assets.Group) *assets.Group { + if g.Grouping != assets.GroupByBurst { + return g + } + // Keep only jpe files + removedAssets := []*assets.Asset{} + keep := 0 + for _, a := range g.Assets { + if a.Ext == ".jpg" || a.Ext == ".jpeg" { // nolint: goconst + keep++ + } else { + removedAssets = append(removedAssets, a) + } + } + if keep > 0 { + for _, a := range removedAssets { + g.RemoveAsset(a, "Keep only JPEG files in burst") + } + } + if len(g.Assets) < 2 { + g.Grouping = assets.GroupByNone + } + return g +} + +// Implement spf13 flag.Value interface + +func (b *BurstFlag) Set(value string) error { + switch strings.ToLower(value) { + case "": + *b = BurstNothing + case "stack": + *b = BurstStack + case "stackkeepraw": + *b = BurstkKeepRaw + case "stackkeepjpeg": + *b = BurstKeepJPEG + default: + return fmt.Errorf("invalid value %q for BurstFlag", value) + } + return nil +} + +func (b BurstFlag) String() string { + switch b { + case BurstNothing: + return "" + case BurstStack: + return "Stack" + case BurstkKeepRaw: + return "StackKeepRaw" + case BurstKeepJPEG: + return "StackKeepJPEG" + default: + return "Unknown" // nolint: goconst + } +} + +func (b BurstFlag) Type() string { + return "BurstFlag" +} diff --git a/internal/filters/filterBursts_test.go b/internal/filters/filterBursts_test.go new file mode 100644 index 00000000..4a276930 --- /dev/null +++ b/internal/filters/filterBursts_test.go @@ -0,0 +1,182 @@ +package filters + +import ( + "testing" + "time" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/filenames" + "github.com/simulot/immich-go/internal/filetypes" + "github.com/simulot/immich-go/internal/fshelper" +) + +var ic = filenames.NewInfoCollector(time.Local, filetypes.DefaultSupportedMedia) + +func mockAsset(name string) *assets.Asset { + a := &assets.Asset{ + File: fshelper.FSName(nil, name), + } + a.SetNameInfo(ic.GetInfo(name)) + return a +} + +func Test_unGroupBurst(t *testing.T) { + tests := []struct { + name string + group *assets.Group + expected *assets.Group + }{ + { + name: "GroupByBurst", + group: &assets.Group{ + Grouping: assets.GroupByBurst, + }, + expected: &assets.Group{ + Grouping: assets.GroupByNone, + }, + }, + { + name: "NotGroupByBurst", + group: &assets.Group{ + Grouping: assets.GroupByOther, + }, + expected: &assets.Group{ + Grouping: assets.GroupByOther, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := unGroupBurst(tt.group) + if result.Grouping != tt.expected.Grouping { + t.Errorf("expected %v, got %v", tt.expected.Grouping, result.Grouping) + } + }) + } +} + +func Test_stackBurstKeepJPEG(t *testing.T) { + tests := []struct { + name string + group *assets.Group + jpgCount int + rawCount int + heicCount int + expected *assets.Group + }{ + { + name: "GroupByBurstWithJPEG", + group: assets.NewGroup(assets.GroupByBurst, + mockAsset("photo1.jpg"), + mockAsset("photo2.jpg"), + ), + jpgCount: 2, + expected: assets.NewGroup(assets.GroupByBurst, + mockAsset("photo1.jpg"), + mockAsset("photo2.jpg"), + ), + }, + { + name: "GroupByBurstWithMixed", + group: assets.NewGroup(assets.GroupByBurst, + mockAsset("photo1.raw"), + mockAsset("photo2.jpg"), + ), + rawCount: 1, + jpgCount: 1, + expected: assets.NewGroup(assets.GroupByBurst, + mockAsset("photo2.jpg"), + ), + }, + { + name: "NotGroupByBurst", + group: assets.NewGroup(assets.GroupByOther, + mockAsset("photo1.jpg"), + mockAsset("photo2.jpg"), + ), + jpgCount: 2, + expected: assets.NewGroup(assets.GroupByOther, + mockAsset("photo1.jpg"), + mockAsset("photo2.jpg"), + ), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := stackBurstKeepJPEG(tt.group) + if len(result.Assets) != len(tt.expected.Assets) { + t.Errorf("expected %v assets, got %v", len(tt.expected.Assets), len(result.Assets)) + } + for i, asset := range result.Assets { + if asset.File.Name() != tt.expected.Assets[i].File.Name() { + t.Errorf("expected asset %v, got %v", tt.expected.Assets[i].File.Name(), asset.File.Name()) + } + } + }) + } +} + +func Test_groupBurstKeepRaw(t *testing.T) { + tests := []struct { + name string + group *assets.Group + jpgCount int + rawCount int + heicCount int + expected *assets.Group + }{ + { + name: "GroupByBurstWithRaw", + group: assets.NewGroup(assets.GroupByBurst, + mockAsset("photo1.raw"), + mockAsset("photo2.raw"), + ), + rawCount: 2, + expected: assets.NewGroup(assets.GroupByBurst, + mockAsset("photo1.raw"), + mockAsset("photo2.raw"), + ), + }, + { + name: "GroupByBurstWithMixed", + group: assets.NewGroup(assets.GroupByBurst, + mockAsset("photo1.raw"), + mockAsset("photo2.jpg"), + ), + rawCount: 1, + jpgCount: 1, + expected: assets.NewGroup(assets.GroupByBurst, + mockAsset("photo1.raw"), + ), + }, + { + name: "NotGroupByBurst", + group: assets.NewGroup(assets.GroupByOther, + mockAsset("photo1.raw"), + mockAsset("photo2.jpg"), + ), + rawCount: 1, + jpgCount: 1, + expected: assets.NewGroup(assets.GroupByOther, + mockAsset("photo1.raw"), + mockAsset("photo2.jpg"), + ), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := groupBurstKeepRaw(tt.group) + if len(result.Assets) != len(tt.expected.Assets) { + t.Errorf("expected %v assets, got %v", len(tt.expected.Assets), len(result.Assets)) + } + for i, asset := range result.Assets { + if asset.File.Name() != tt.expected.Assets[i].File.Name() { + t.Errorf("expected asset %v, got %v", tt.expected.Assets[i].File.Name(), asset.File.Name()) + } + } + }) + } +} diff --git a/internal/filters/filterHeicJPEG.go b/internal/filters/filterHeicJPEG.go new file mode 100644 index 00000000..07b15345 --- /dev/null +++ b/internal/filters/filterHeicJPEG.go @@ -0,0 +1,161 @@ +package filters + +import ( + "fmt" + "strings" + + "github.com/simulot/immich-go/internal/assets" +) + +type HeicJpgFlag int + +const ( + HeicJpgNothing HeicJpgFlag = iota + HeicJpgKeepHeic // Keep only HEIC files + HeicJpgKeepJPG // Keep only JPEG files + HeicJpgStackHeic // Stack HEIC and JPEG files, with the HEIC file as the cover + HeicJpgStackJPG // Stack HEIC and JPEG files, with the JPEG file as the cover +) + +func (h HeicJpgFlag) GroupFilter() Filter { + switch h { + case HeicJpgNothing: + return unGroupHeicJpeg + case HeicJpgKeepHeic: + return groupHeicJpgKeepHeic + case HeicJpgKeepJPG: + return groupHeicJpgKeepJPG + case HeicJpgStackHeic: + return groupHeicJpgStackHeic + case HeicJpgStackJPG: + return groupHeicJpgStackJPG + default: + return nil + } +} + +func unGroupHeicJpeg(g *assets.Group) *assets.Group { + if g.Grouping != assets.GroupByHeicJpg { + return g + } + g.Grouping = assets.GroupByNone + return g +} + +func groupHeicJpgKeepHeic(g *assets.Group) *assets.Group { + if g.Grouping != assets.GroupByHeicJpg { + return g + } + // Keep only heic files + removedAssets := []*assets.Asset{} + keep := 0 + for _, a := range g.Assets { + if a.Ext == ".heic" { + keep++ + } else { + removedAssets = append(removedAssets, a) + } + } + + if keep > 0 { + for _, a := range removedAssets { + g.RemoveAsset(a, "Keep only HEIC files in HEIC/JPEG group") + } + } + if len(g.Assets) < 2 { + g.Grouping = assets.GroupByNone + } + return g +} + +func groupHeicJpgKeepJPG(g *assets.Group) *assets.Group { + if g.Grouping != assets.GroupByHeicJpg { + return g + } + // Keep only heic files + removedAssets := []*assets.Asset{} + keep := 0 + for _, a := range g.Assets { + if a.Ext == ".jpg" || a.Ext == ".jpeg" { + keep++ + } else { + removedAssets = append(removedAssets, a) + } + } + if keep > 0 { + for _, a := range removedAssets { + g.RemoveAsset(a, "Keep only HEIC files in HEIC/JPEG group") + } + } + if len(g.Assets) < 2 { + g.Grouping = assets.GroupByNone + } + return g +} + +func groupHeicJpgStackHeic(g *assets.Group) *assets.Group { + if g.Grouping != assets.GroupByHeicJpg { + return g + } + // Set the cover index to the first HEIC file + for i, a := range g.Assets { + if a.Ext == ".heic" { + g.CoverIndex = i + break + } + } + return g +} + +func groupHeicJpgStackJPG(g *assets.Group) *assets.Group { + if g.Grouping != assets.GroupByHeicJpg { + return g + } + // Set the cover index to the first JPEG file + for i, a := range g.Assets { + if a.Ext == ".jpg" || a.Ext == ".jpeg" { + g.CoverIndex = i + break + } + } + return g +} + +func (h *HeicJpgFlag) Set(value string) error { + switch strings.ToLower(value) { + case "": + *h = HeicJpgNothing + case "keepheic": + *h = HeicJpgKeepHeic + case "keepjpg": + *h = HeicJpgKeepJPG + case "stackcoverheic": + *h = HeicJpgStackHeic + case "stackcoverjpg": + *h = HeicJpgStackJPG + default: + return fmt.Errorf("invalid value %q for HeicJpgFlag", value) + } + return nil +} + +func (h HeicJpgFlag) String() string { + switch h { + case HeicJpgNothing: + return "" + case HeicJpgKeepHeic: + return "KeepHeic" + case HeicJpgKeepJPG: + return "KeepJPG" + case HeicJpgStackHeic: + return "StackCoverHeic" + case HeicJpgStackJPG: + return "StackCoverJPG" + default: + return "Unknown" + } +} + +func (h HeicJpgFlag) Type() string { + return "HeicJpgFlag" +} diff --git a/internal/filters/filterHeicJPEG_test.go b/internal/filters/filterHeicJPEG_test.go new file mode 100644 index 00000000..e293af03 --- /dev/null +++ b/internal/filters/filterHeicJPEG_test.go @@ -0,0 +1,255 @@ +package filters + +import ( + "testing" + + "github.com/simulot/immich-go/internal/assets" +) + +func Test_unGroupHeicJpeg(t *testing.T) { + tests := []struct { + name string + group *assets.Group + expected *assets.Group + }{ + { + name: "GroupByHeicJpg", + group: &assets.Group{ + Grouping: assets.GroupByHeicJpg, + Assets: []*assets.Asset{ + mockAsset("photo1.heic"), + mockAsset("photo2.jpg"), + }, + }, + expected: &assets.Group{ + Grouping: assets.GroupByNone, + Assets: []*assets.Asset{ + mockAsset("photo1.heic"), + mockAsset("photo2.jpg"), + }, + }, + }, + { + name: "NotGroupByHeicJpg", + group: &assets.Group{ + Grouping: assets.GroupByOther, + Assets: []*assets.Asset{ + mockAsset("photo1.heic"), + mockAsset("photo2.jpg"), + }, + }, + expected: &assets.Group{ + Grouping: assets.GroupByOther, + Assets: []*assets.Asset{ + mockAsset("photo1.heic"), + mockAsset("photo2.jpg"), + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := unGroupHeicJpeg(tt.group) + if result.Grouping != tt.expected.Grouping { + t.Errorf("expected %v, got %v", tt.expected.Grouping, result.Grouping) + } + if len(result.Assets) != len(tt.expected.Assets) { + t.Errorf("expected %v assets, got %v", len(tt.expected.Assets), len(result.Assets)) + } + for i, asset := range result.Assets { + if asset.File.Name() != tt.expected.Assets[i].File.Name() { + t.Errorf("expected asset %v, got %v", tt.expected.Assets[i].File.Name(), asset.File.Name()) + } + } + }) + } +} + +func Test_groupHeicJpgKeepHeic(t *testing.T) { + tests := []struct { + name string + group *assets.Group + expected *assets.Group + }{ + { + name: "GroupByHeicJpgWithMixedAssets", + group: &assets.Group{ + Grouping: assets.GroupByHeicJpg, + Assets: []*assets.Asset{ + mockAsset("photo1.heic"), + mockAsset("photo2.jpg"), + }, + }, + expected: &assets.Group{ + Grouping: assets.GroupByNone, + Assets: []*assets.Asset{ + mockAsset("photo1.heic"), + }, + }, + }, + { + name: "GroupByHeicJpgWithMixedAssets2", + group: &assets.Group{ + Grouping: assets.GroupByHeicJpg, + Assets: []*assets.Asset{ + mockAsset("photo1.jpg"), + mockAsset("photo2.heic"), + }, + }, + expected: &assets.Group{ + Grouping: assets.GroupByNone, + Assets: []*assets.Asset{ + mockAsset("photo2.heic"), + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := groupHeicJpgKeepHeic(tt.group) + if result.Grouping != tt.expected.Grouping { + t.Errorf("expected %v, got %v", tt.expected.Grouping, result.Grouping) + } + if len(result.Assets) != len(tt.expected.Assets) { + t.Errorf("expected %v assets, got %v", len(tt.expected.Assets), len(result.Assets)) + } + for i, asset := range result.Assets { + if asset.File.Name() != tt.expected.Assets[i].File.Name() { + t.Errorf("expected asset %v, got %v", tt.expected.Assets[i].File.Name(), asset.File.Name()) + } + } + }) + } +} + +func Test_groupHeicJpgStackHeic(t *testing.T) { + tests := []struct { + name string + group *assets.Group + expected *assets.Group + }{ + { + name: "GroupByHeicJpgWithHeicFirst", + group: &assets.Group{ + Grouping: assets.GroupByHeicJpg, + Assets: []*assets.Asset{ + mockAsset("photo1.heic"), + mockAsset("photo2.jpg"), + }, + }, + expected: &assets.Group{ + Grouping: assets.GroupByHeicJpg, + Assets: []*assets.Asset{ + mockAsset("photo1.heic"), + mockAsset("photo2.jpg"), + }, + CoverIndex: 0, + }, + }, + { + name: "GroupByHeicJpgWithHeicSecond", + group: &assets.Group{ + Grouping: assets.GroupByHeicJpg, + Assets: []*assets.Asset{ + mockAsset("photo1.jpg"), + mockAsset("photo2.heic"), + }, + }, + expected: &assets.Group{ + Grouping: assets.GroupByHeicJpg, + Assets: []*assets.Asset{ + mockAsset("photo1.jpg"), + mockAsset("photo2.heic"), + }, + CoverIndex: 1, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := groupHeicJpgStackHeic(tt.group) + if result.Grouping != tt.expected.Grouping { + t.Errorf("expected %v, got %v", tt.expected.Grouping, result.Grouping) + } + if len(result.Assets) != len(tt.expected.Assets) { + t.Errorf("expected %v assets, got %v", len(tt.expected.Assets), len(result.Assets)) + } + for i, asset := range result.Assets { + if asset.File.Name() != tt.expected.Assets[i].File.Name() { + t.Errorf("expected asset %v, got %v", tt.expected.Assets[i].File.Name(), asset.File.Name()) + } + } + if result.CoverIndex != tt.expected.CoverIndex { + t.Errorf("expected cover index %v, got %v", tt.expected.CoverIndex, result.CoverIndex) + } + }) + } +} + +func Test_groupHeicJpgStackJPG(t *testing.T) { + tests := []struct { + name string + group *assets.Group + expected *assets.Group + }{ + { + name: "GroupByHeicJpgWithJPGFirst", + group: &assets.Group{ + Grouping: assets.GroupByHeicJpg, + Assets: []*assets.Asset{ + mockAsset("photo1.jpg"), + mockAsset("photo2.heic"), + }, + }, + expected: &assets.Group{ + Grouping: assets.GroupByHeicJpg, + Assets: []*assets.Asset{ + mockAsset("photo1.jpg"), + mockAsset("photo2.heic"), + }, + CoverIndex: 0, + }, + }, + { + name: "GroupByHeicJpgWithJPGSecond", + group: &assets.Group{ + Grouping: assets.GroupByHeicJpg, + Assets: []*assets.Asset{ + mockAsset("photo1.heic"), + mockAsset("photo2.jpg"), + }, + }, + expected: &assets.Group{ + Grouping: assets.GroupByHeicJpg, + Assets: []*assets.Asset{ + mockAsset("photo1.heic"), + mockAsset("photo2.jpg"), + }, + CoverIndex: 1, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := groupHeicJpgStackJPG(tt.group) + if result.Grouping != tt.expected.Grouping { + t.Errorf("expected %v, got %v", tt.expected.Grouping, result.Grouping) + } + if len(result.Assets) != len(tt.expected.Assets) { + t.Errorf("expected %v assets, got %v", len(tt.expected.Assets), len(result.Assets)) + } + for i, asset := range result.Assets { + if asset.File.Name() != tt.expected.Assets[i].File.Name() { + t.Errorf("expected asset %v, got %v", tt.expected.Assets[i].File.Name(), asset.File.Name()) + } + } + if result.CoverIndex != tt.expected.CoverIndex { + t.Errorf("expected cover index %v, got %v", tt.expected.CoverIndex, result.CoverIndex) + } + }) + } +} diff --git a/internal/filters/filterRawJPEG.go b/internal/filters/filterRawJPEG.go new file mode 100644 index 00000000..083c9e8b --- /dev/null +++ b/internal/filters/filterRawJPEG.go @@ -0,0 +1,161 @@ +package filters + +import ( + "fmt" + "strings" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/filetypes" +) + +type RawJPGFlag int + +const ( + RawJPGNothing RawJPGFlag = iota + RawJPGKeepRaw // Keep only raw files + RawJPGKeepJPG // Keep only JPEG files + RawJPGStackRaw // Stack raw and JPEG files, with the raw file as the cover + RawJPGStackJPG // Stack raw and JPEG files, with the JPEG file as the cover +) + +func (r RawJPGFlag) GroupFilter() Filter { + switch r { + case RawJPGNothing: + return unGroupRawJPGNothing + case RawJPGKeepRaw: + return groupRawJPGKeepRaw + case RawJPGKeepJPG: + return groupRawJPGKeepJPG + case RawJPGStackRaw: + return groupRawJPGStackRaw + case RawJPGStackJPG: + return groupRawJPGStackJPG + default: + return nil + } +} + +func unGroupRawJPGNothing(g *assets.Group) *assets.Group { + if g.Grouping != assets.GroupByRawJpg { + return g + } + g.Grouping = assets.GroupByNone + return g +} + +func groupRawJPGKeepRaw(g *assets.Group) *assets.Group { + if g.Grouping != assets.GroupByRawJpg { + return g + } + // Keep only raw files + removedAssets := []*assets.Asset{} + keep := 0 + for _, a := range g.Assets { + if filetypes.IsRawFile(a.Ext) { + keep++ + } else { + removedAssets = append(removedAssets, a) + } + } + if keep > 0 { + for _, a := range removedAssets { + g.RemoveAsset(a, "Keep only RAW files in RAW/JPEG group") + } + } + if len(g.Assets) < 2 { + g.Grouping = assets.GroupByNone + } + return g +} + +func groupRawJPGKeepJPG(g *assets.Group) *assets.Group { + if g.Grouping != assets.GroupByRawJpg { + return g + } + // Keep only JPEG files + removedAssets := []*assets.Asset{} + keep := 0 + for _, a := range g.Assets { + if a.Ext == ".jpg" || a.Ext == ".jpeg" { + keep++ + } else { + removedAssets = append(removedAssets, a) + } + } + if keep > 0 { + for _, a := range removedAssets { + g.RemoveAsset(a, "Keep only JPEG files in RAW/JPEG group") + } + } + if len(g.Assets) < 2 { + g.Grouping = assets.GroupByNone + } + return g +} + +func groupRawJPGStackRaw(g *assets.Group) *assets.Group { + if g.Grouping != assets.GroupByRawJpg { + return g + } + // Set the cover index to the first RAW file + for i, a := range g.Assets { + if filetypes.IsRawFile(a.Ext) { + g.CoverIndex = i + break + } + } + return g +} + +func groupRawJPGStackJPG(g *assets.Group) *assets.Group { + if g.Grouping != assets.GroupByRawJpg { + return g + } + // Set the cover index to the first JPEG file + for i, a := range g.Assets { + if a.Ext == ".jpg" || a.Ext == ".jpeg" { + g.CoverIndex = i + break + } + } + return g +} + +func (r *RawJPGFlag) Set(value string) error { + switch strings.ToLower(value) { + case "": + *r = RawJPGNothing + case "keepraw": + *r = RawJPGKeepRaw + case "keepjpg": + *r = RawJPGKeepJPG + case "stackcoverraw": + *r = RawJPGStackRaw + case "stackcoverjpg": + *r = RawJPGStackJPG + default: + return fmt.Errorf("invalid value %q for RawJPGFlag", value) + } + return nil +} + +func (r RawJPGFlag) String() string { + switch r { + case RawJPGNothing: + return "" + case RawJPGKeepRaw: + return "KeepRaw" + case RawJPGKeepJPG: + return "KeepJPG" + case RawJPGStackRaw: + return "StackCoverRaw" + case RawJPGStackJPG: + return "StackCoverJPG" + default: + return "Unknown" + } +} + +func (r RawJPGFlag) Type() string { + return "RawJPGFlag" +} diff --git a/internal/filters/filterRawJPEG_test.go b/internal/filters/filterRawJPEG_test.go new file mode 100644 index 00000000..46078965 --- /dev/null +++ b/internal/filters/filterRawJPEG_test.go @@ -0,0 +1,119 @@ +package filters + +import ( + "testing" + + "github.com/simulot/immich-go/internal/assets" +) + +func TestUnGroupRawJPGNothing(t *testing.T) { + tests := []struct { + name string + group *assets.Group + expected *assets.Group + }{ + { + name: "GroupByRawJpg", + group: assets.NewGroup(assets.GroupByRawJpg, + mockAsset("a.jpg"), + mockAsset("a.raw"), + ), + expected: assets.NewGroup(assets.GroupByNone, + mockAsset("a.jpg"), + mockAsset("a.raw"), + ), + }, + { + name: "NotGroupByRawJpg", + group: assets.NewGroup(assets.GroupByBurst, + mockAsset("a.jpg"), + mockAsset("a.raw"), + ), + expected: assets.NewGroup(assets.GroupByBurst, + mockAsset("a.jpg"), + mockAsset("a.raw"), + ), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := unGroupRawJPGNothing(tt.group) + if result.Grouping != tt.expected.Grouping { + t.Errorf("expected %v, got %v", tt.expected.Grouping, result.Grouping) + } + }) + } +} + +func TestGroupRawJPGKeepRaw(t *testing.T) { + tests := []struct { + name string + group *assets.Group + expected *assets.Group + }{ + { + name: "GroupByRawJpgWithMixedFiles", + group: assets.NewGroup(assets.GroupByRawJpg, + mockAsset("a.jpg"), + mockAsset("a.raw"), + ), + expected: assets.NewGroup(assets.GroupByNone, + mockAsset("a.raw"), + ), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := groupRawJPGKeepRaw(tt.group) + if result.Grouping != tt.expected.Grouping { + t.Errorf("expected grouping %v, got %v", tt.expected.Grouping, result.Grouping) + } + if len(result.Assets) != len(tt.expected.Assets) { + t.Errorf("expected %d assets, got %d", len(tt.expected.Assets), len(result.Assets)) + } + for i, asset := range result.Assets { + if asset.File.Name() != tt.expected.Assets[i].File.Name() { + t.Errorf("expected asset %v, got %v", tt.expected.Assets[i].File.Name(), asset.File.Name()) + } + } + }) + } +} + +func TestGroupRawJPGKeepJPG(t *testing.T) { + tests := []struct { + name string + group *assets.Group + expected *assets.Group + }{ + { + name: "GroupByRawJpgWithMixedFiles", + group: assets.NewGroup(assets.GroupByRawJpg, + mockAsset("a.jpg"), + mockAsset("a.raw"), + ), + expected: assets.NewGroup(assets.GroupByNone, + mockAsset("a.jpg"), + ), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := groupRawJPGKeepJPG(tt.group) + if result.Grouping != tt.expected.Grouping { + t.Errorf("expected grouping %v, got %v", tt.expected.Grouping, result.Grouping) + } + if len(result.Assets) != len(tt.expected.Assets) { + t.Errorf("expected %d assets, got %d", len(tt.expected.Assets), len(result.Assets)) + } + for i, asset := range result.Assets { + if asset.File.Name() != tt.expected.Assets[i].File.Name() { + t.Errorf("expected asset %v, got %v", tt.expected.Assets[i].File.Name(), asset.File.Name()) + } + } + }) + } +} diff --git a/internal/filters/filters.go b/internal/filters/filters.go new file mode 100644 index 00000000..7615738d --- /dev/null +++ b/internal/filters/filters.go @@ -0,0 +1,20 @@ +package filters + +import ( + "github.com/simulot/immich-go/internal/assets" +) + +/* +Applies filters to a group of assets. +*/ + +type Filter func(g *assets.Group) *assets.Group + +func ApplyFilters(g *assets.Group, filters ...Filter) *assets.Group { + if g.Grouping != assets.GroupByNone { + for _, f := range filters { + g = f(g) + } + } + return g +} diff --git a/helpers/fshelper/TESTDATA/A/1.jpg b/internal/fshelper/TESTDATA/A/1.jpg similarity index 100% rename from helpers/fshelper/TESTDATA/A/1.jpg rename to internal/fshelper/TESTDATA/A/1.jpg diff --git a/helpers/fshelper/TESTDATA/A/1.json b/internal/fshelper/TESTDATA/A/1.json similarity index 100% rename from helpers/fshelper/TESTDATA/A/1.json rename to internal/fshelper/TESTDATA/A/1.json diff --git a/helpers/fshelper/TESTDATA/A/2.jpg b/internal/fshelper/TESTDATA/A/2.jpg similarity index 100% rename from helpers/fshelper/TESTDATA/A/2.jpg rename to internal/fshelper/TESTDATA/A/2.jpg diff --git a/helpers/fshelper/TESTDATA/A/2.json b/internal/fshelper/TESTDATA/A/2.json similarity index 100% rename from helpers/fshelper/TESTDATA/A/2.json rename to internal/fshelper/TESTDATA/A/2.json diff --git a/helpers/fshelper/TESTDATA/A/T/10.jpg b/internal/fshelper/TESTDATA/A/T/10.jpg similarity index 100% rename from helpers/fshelper/TESTDATA/A/T/10.jpg rename to internal/fshelper/TESTDATA/A/T/10.jpg diff --git a/helpers/fshelper/TESTDATA/A/T/10.json b/internal/fshelper/TESTDATA/A/T/10.json similarity index 100% rename from helpers/fshelper/TESTDATA/A/T/10.json rename to internal/fshelper/TESTDATA/A/T/10.json diff --git a/helpers/fshelper/TESTDATA/B/4.jpg b/internal/fshelper/TESTDATA/B/4.jpg similarity index 100% rename from helpers/fshelper/TESTDATA/B/4.jpg rename to internal/fshelper/TESTDATA/B/4.jpg diff --git a/helpers/fshelper/TESTDATA/B/4.json b/internal/fshelper/TESTDATA/B/4.json similarity index 100% rename from helpers/fshelper/TESTDATA/B/4.json rename to internal/fshelper/TESTDATA/B/4.json diff --git a/helpers/fshelper/TESTDATA/B/T/20.jpg b/internal/fshelper/TESTDATA/B/T/20.jpg similarity index 100% rename from helpers/fshelper/TESTDATA/B/T/20.jpg rename to internal/fshelper/TESTDATA/B/T/20.jpg diff --git a/helpers/fshelper/TESTDATA/B/T/20.json b/internal/fshelper/TESTDATA/B/T/20.json similarity index 100% rename from helpers/fshelper/TESTDATA/B/T/20.json rename to internal/fshelper/TESTDATA/B/T/20.json diff --git a/helpers/fshelper/TESTDATA/C.JPG b/internal/fshelper/TESTDATA/C.JPG similarity index 100% rename from helpers/fshelper/TESTDATA/C.JPG rename to internal/fshelper/TESTDATA/C.JPG diff --git a/internal/fshelper/extendedFS.go b/internal/fshelper/extendedFS.go new file mode 100644 index 00000000..f3e482dd --- /dev/null +++ b/internal/fshelper/extendedFS.go @@ -0,0 +1,119 @@ +package fshelper + +import ( + "errors" + "io" + "io/fs" + "os" + "path/filepath" + "strings" +) + +type FSCanWrite interface { + OpenFile(name string, flag int, perm fs.FileMode) (WFile, error) + Mkdir(name string, perm fs.FileMode) error +} + +type FSCanMkdirAll interface { + MkdirAll(path string, perm fs.FileMode) error +} +type FSCanRemove interface { + Remove(name string) error +} + +type FSCanStat interface { + Stat(name string) (fs.FileInfo, error) +} + +type FSCanLink interface { + Lstat(name string) (fs.FileInfo, error) + Readlink(name string) (string, error) + MkSymlink(name, target string) error +} + +type FileCanWrite interface { + Write(b []byte) (ret int, err error) +} + +type WFile interface { + fs.File + Write(b []byte) (ret int, err error) +} + +func OpenFile(fsys fs.FS, name string, flag int, perm fs.FileMode) (WFile, error) { + if fsys, ok := fsys.(FSCanWrite); ok { + return fsys.OpenFile(name, flag, perm) + } + return nil, errors.New("openFile not supported") +} + +func Mkdir(fsys fs.FS, name string, perm fs.FileMode) error { + if fsys, ok := fsys.(FSCanWrite); ok { + return fsys.Mkdir(name, perm) + } + return errors.New("mkdir not supported") +} + +func MkdirAll(fsys fs.FS, path string, perm fs.FileMode) error { + if fsys, ok := fsys.(FSCanMkdirAll); ok { + return fsys.MkdirAll(path, perm) + } + if fsys, ok := fsys.(FSCanWrite); ok { + parts := strings.Split(path, "/") + + // parts := strings.Split(path, string(filepath.Separator)) + path = "" + for i := 0; i < len(parts); i++ { + path = filepath.Join(path, parts[i]) + if err := fsys.Mkdir(path, perm); err != nil && !errors.Is(err, os.ErrExist) { + return err + } + } + return nil + } else { + return errors.New("mkdirAll not supported") + } +} + +func Remove(fsys fs.FS, name string) error { + if fsys, ok := fsys.(FSCanRemove); ok { + return fsys.Remove(name) + } + return errors.New("remove not supported") +} + +func Stat(fsys fs.FS, name string) (fs.FileInfo, error) { + if fsys, ok := fsys.(FSCanStat); ok { + return fsys.Stat(name) + } + return nil, errors.New("stat not supported") +} + +func Lstat(fsys fs.FS, name string) (fs.FileInfo, error) { + if fsys, ok := fsys.(FSCanLink); ok { + return fsys.Lstat(name) + } + return nil, errors.New("lstat not supported") +} + +func Readlink(fsys fs.FS, name string) (string, error) { + if fsys, ok := fsys.(FSCanLink); ok { + return fsys.Readlink(name) + } + return "", errors.New("readlink not supported") +} + +func WriteFile(fsys fs.FS, name string, r io.Reader) error { + if fsys, ok := fsys.(FSCanWrite); ok { + f, err := fsys.OpenFile(name, os.O_CREATE|os.O_WRONLY, 0o644) + if err != nil { + return err + } + defer f.Close() + if f, ok := f.(FileCanWrite); ok { + _, err = io.Copy(f, r) + return err + } + } + return errors.New("write not supported") +} diff --git a/internal/fshelper/filename.go b/internal/fshelper/filename.go new file mode 100644 index 00000000..d5c94fe9 --- /dev/null +++ b/internal/fshelper/filename.go @@ -0,0 +1,43 @@ +package fshelper + +import ( + "io/fs" + "log/slog" +) + +type FSAndName struct { + fsys fs.FS + name string +} + +func FSName(fsys fs.FS, name string) FSAndName { + return FSAndName{fsys: fsys, name: name} +} + +func (fn FSAndName) LogValue() slog.Value { + return slog.StringValue(fn.FullName()) +} + +func (fn FSAndName) FS() fs.FS { + return fn.fsys +} + +func (fn FSAndName) Name() string { + return fn.name +} + +func (fn FSAndName) FullName() string { + fsys := fn.fsys + if fsys, ok := fsys.(NameFS); ok { + return fsys.Name() + ":" + fn.name + } + return fn.name +} + +func (fn FSAndName) Open() (fs.File, error) { + return fn.fsys.Open(fn.name) +} + +func (fn FSAndName) Stat() (fs.FileInfo, error) { + return fs.Stat(fn.fsys, fn.name) +} diff --git a/helpers/fshelper/globwalkfs.go b/internal/fshelper/globwalkfs.go similarity index 96% rename from helpers/fshelper/globwalkfs.go rename to internal/fshelper/globwalkfs.go index 1a841fde..effd21a3 100644 --- a/helpers/fshelper/globwalkfs.go +++ b/internal/fshelper/globwalkfs.go @@ -42,8 +42,14 @@ func NewGlobWalkFS(pattern string) (fs.FS, error) { parts: []string{magic}, }, nil } else { + name := filepath.Base(dir) + if name == "." { + name, _ = os.Getwd() + name = filepath.Base(name) + } + return &GlobWalkFS{ - rootFS: NewFSWithName(os.DirFS(dir), filepath.Base(dir)), + rootFS: NewFSWithName(os.DirFS(dir), name), dir: dir, }, nil } diff --git a/helpers/fshelper/globwalkfs_test.go b/internal/fshelper/globwalkfs_test.go similarity index 100% rename from helpers/fshelper/globwalkfs_test.go rename to internal/fshelper/globwalkfs_test.go diff --git a/helpers/fshelper/hasmeta.go b/internal/fshelper/hasmeta.go similarity index 100% rename from helpers/fshelper/hasmeta.go rename to internal/fshelper/hasmeta.go diff --git a/internal/fshelper/osfs/osfs.go b/internal/fshelper/osfs/osfs.go new file mode 100644 index 00000000..fb26b7e7 --- /dev/null +++ b/internal/fshelper/osfs/osfs.go @@ -0,0 +1,60 @@ +package osfs + +import ( + "io/fs" + "os" + "path/filepath" + + "github.com/simulot/immich-go/internal/fshelper" +) + +/* + Define a file system that can write, remove, stats,etc... +*/ + +func DirFS(name string) fs.FS { + return dirFS(name) +} + +// check that dirFS implements the interfaces +var ( + _ fshelper.FSCanWrite = dirFS("") + // _ fshelper.FSCanMkdirAll = dirFS("") + _ fshelper.FSCanRemove = dirFS("") + _ fshelper.FSCanStat = dirFS("") + _ fshelper.FSCanLink = dirFS("") +) + +type dirFS string + +func (dir dirFS) Open(name string) (fs.File, error) { + return os.Open(filepath.Join(string(dir), name)) +} + +func (dir dirFS) Stat(name string) (fs.FileInfo, error) { + return os.Stat(filepath.Join(string(dir), name)) +} + +func (dir dirFS) OpenFile(name string, flag int, perm fs.FileMode) (fshelper.WFile, error) { + return os.OpenFile(filepath.Join(string(dir), name), flag, perm) +} + +func (dir dirFS) Mkdir(name string, perm fs.FileMode) error { + return os.Mkdir(filepath.Join(string(dir), name), perm) +} + +func (dir dirFS) Readlink(name string) (string, error) { + return os.Readlink(filepath.Join(string(dir), name)) +} + +func (dir dirFS) Lstat(name string) (fs.FileInfo, error) { + return os.Lstat(filepath.Join(string(dir), name)) +} + +func (dir dirFS) MkSymlink(name, target string) error { + return os.Symlink(filepath.Join(string(dir), name), filepath.Join(string(dir), target)) +} + +func (dir dirFS) Remove(name string) error { + return os.Remove(filepath.Join(string(dir), name)) +} diff --git a/helpers/fshelper/parseArgs.go b/internal/fshelper/parseArgs.go similarity index 87% rename from helpers/fshelper/parseArgs.go rename to internal/fshelper/parseArgs.go index e6358073..40ecfed2 100644 --- a/helpers/fshelper/parseArgs.go +++ b/internal/fshelper/parseArgs.go @@ -1,12 +1,13 @@ package fshelper import ( - "archive/zip" "errors" "fmt" "io/fs" "path/filepath" "strings" + + zipname "github.com/simulot/immich-go/internal/zipName" ) // ParsePath return a list of FS bases on args @@ -31,9 +32,9 @@ func ParsePath(args []string) ([]fs.FS, error) { lowF := strings.ToLower(f) switch { case strings.HasSuffix(lowF, ".tgz") || strings.HasSuffix(lowF, ".tar.gz"): - errs = errors.Join(fmt.Errorf("immich-go cant use tgz archives: %s", filepath.Base(a))) + errs = errors.Join(fmt.Errorf("immich-go can't use tgz archives: %s", filepath.Base(a))) case strings.HasSuffix(lowF, ".zip"): - fsys, err := zip.OpenReader(f) + fsys, err := zipname.OpenReader(f) // zip.OpenReader(f) if err != nil { errs = errors.Join(errs, fmt.Errorf("%s: %w", a, err)) continue diff --git a/helpers/fshelper/readjson.go b/internal/fshelper/readjson.go similarity index 72% rename from helpers/fshelper/readjson.go rename to internal/fshelper/readjson.go index 52c1397f..0a9576e8 100644 --- a/helpers/fshelper/readjson.go +++ b/internal/fshelper/readjson.go @@ -22,3 +22,13 @@ func ReadJSON[T any](fsys fs.FS, name string) (*T, error) { return &object, nil } + +func UnmarshalJSON[T any](b []byte) (*T, error) { + var object T + err := json.Unmarshal(b, &object) + if err != nil { + return nil, err + } + + return &object, nil +} diff --git a/helpers/fshelper/removefs.go b/internal/fshelper/removefs.goNo similarity index 100% rename from helpers/fshelper/removefs.go rename to internal/fshelper/removefs.goNo diff --git a/helpers/fshelper/teereadercloser.go b/internal/fshelper/teereadercloser.go similarity index 100% rename from helpers/fshelper/teereadercloser.go rename to internal/fshelper/teereadercloser.go diff --git a/helpers/gen/maps.go b/internal/gen/maps.go similarity index 54% rename from helpers/gen/maps.go rename to internal/gen/maps.go index 249a9810..3bf6e138 100644 --- a/helpers/gen/maps.go +++ b/internal/gen/maps.go @@ -1,5 +1,11 @@ package gen +import ( + "sort" + + "golang.org/x/exp/constraints" +) + func MapKeys[K comparable, T any](m map[K]T) []K { r := make([]K, len(m)) i := 0 @@ -10,6 +16,19 @@ func MapKeys[K comparable, T any](m map[K]T) []K { return r } +func MapKeysSorted[K constraints.Ordered, T any](m map[K]T) []K { + r := make([]K, len(m)) + i := 0 + for k := range m { + r[i] = k + i++ + } + sort.Slice(r, func(i, j int) bool { + return r[i] < r[j] + }) + return r +} + func MapFilterKeys[K comparable, T any](m map[K]T, f func(i T) bool) []K { r := make([]K, 0, len(m)) for k, v := range m { diff --git a/helpers/gen/slices.go b/internal/gen/slices.go similarity index 100% rename from helpers/gen/slices.go rename to internal/gen/slices.go diff --git a/internal/groups/burst/burst.go b/internal/groups/burst/burst.go new file mode 100644 index 00000000..c4ec6f11 --- /dev/null +++ b/internal/groups/burst/burst.go @@ -0,0 +1,98 @@ +package burst + +import ( + "context" + "time" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/filetypes" + "golang.org/x/exp/constraints" +) + +const frameInterval = 500 * time.Millisecond + +// Group groups photos taken within a period of less than 1 second with a digital camera. +// This addresses photos taken with a digital camera when there isn't any burst indication in the file namee +// +// Ex: IMG_0001.JPG, IMG_0002.JPG, etc. and the date taken is different by a fraction of second +// Ex: IMG_0001.JPG, IMG_0001.RAW, IMG_0002.JPG, IMG_0002.RAW, etc. +// +// Edited images, images identified as as burst already are not considered. +// The in channel receives assets sorted by date taken. +func Group(ctx context.Context, in <-chan *assets.Asset, out chan<- *assets.Asset, gOut chan<- *assets.Group) { + var currentGroup []*assets.Asset + var lastTaken time.Time + + for { + select { + case <-ctx.Done(): + return + case a, ok := <-in: + if !ok { + if len(currentGroup) > 0 { + sendBurstGroup(ctx, out, gOut, currentGroup) + } + return + } + if a.CaptureDate.IsZero() { + // No date taken, no change to group them + select { + case out <- a: + case <-ctx.Done(): + } + continue + } + + // exclude movies, edited or burst images + // exclude images without a date taken + // exclude images taken more than 500ms apart + ni := a.NameInfo + dontGroupMe := ni.Type != filetypes.TypeImage || + a.CaptureDate.IsZero() || + ni.Kind == assets.KindBurst || + ni.Kind == assets.KindEdited || + abs(a.CaptureDate.Sub(lastTaken)) > frameInterval + + if dontGroupMe { + if len(currentGroup) > 0 { + sendBurstGroup(ctx, out, gOut, currentGroup) + } + currentGroup = []*assets.Asset{a} + lastTaken = a.CaptureDate + } else { + currentGroup = append(currentGroup, a) + lastTaken = a.CaptureDate + } + } + } +} + +// abs returns the absolute value of a given integer. +func abs[T constraints.Integer](x T) T { + if x < 0 { + return -x + } + return x +} + +func sendBurstGroup(ctx context.Context, out chan<- *assets.Asset, outg chan<- *assets.Group, as []*assets.Asset) { + if len(as) == 0 { + return + } + if len(as) < 2 { + select { + case out <- as[0]: + case <-ctx.Done(): + } + return + } + + g := assets.NewGroup(assets.GroupByBurst, as...) + g.CoverIndex = 0 // Assuming the first asset is the cover + + select { + case <-ctx.Done(): + return + case outg <- g: + } +} diff --git a/internal/groups/burst/burst_test.go b/internal/groups/burst/burst_test.go new file mode 100644 index 00000000..5d7a455f --- /dev/null +++ b/internal/groups/burst/burst_test.go @@ -0,0 +1,141 @@ +package burst + +import ( + "context" + "reflect" + "testing" + "time" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/filenames" + "github.com/simulot/immich-go/internal/filetypes" + "github.com/simulot/immich-go/internal/fshelper" +) + +func mockAsset(ic *filenames.InfoCollector, name string, dateTaken time.Time) *assets.Asset { + a := assets.Asset{ + File: fshelper.FSName(nil, name), + FileDate: dateTaken, + CaptureDate: dateTaken, + } + a.SetNameInfo(ic.GetInfo(name)) + return &a +} + +func TestGroup(t *testing.T) { + ctx := context.Background() + ic := filenames.NewInfoCollector(time.Local, filetypes.DefaultSupportedMedia) + + baseTime := time.Date(2021, 1, 1, 0, 0, 0, 0, time.Local) + // Create assets with a DateTaken interval of 200 milliseconds + testAssets := []*assets.Asset{ + mockAsset(ic, "IMG_001.jpg", baseTime), + mockAsset(ic, "IMG_002.jpg", baseTime.Add(200*time.Millisecond)), // group 1 + mockAsset(ic, "IMG_003.jpg", baseTime.Add(400*time.Millisecond)), // group 1 + mockAsset(ic, "IMG_004.jpg", baseTime.Add(600*time.Millisecond)), // group 1 + mockAsset(ic, "IMG_005.jpg", baseTime.Add(800*time.Millisecond)), // group 1 + mockAsset(ic, "IMG_006.jpg", baseTime.Add(1000*time.Millisecond)), // group 1 + mockAsset(ic, "IMG_007.jpg", baseTime.Add(1200*time.Millisecond)), // group 1 + mockAsset(ic, "IMG_008.jpg", baseTime.Add(1400*time.Millisecond)), // group 1 + mockAsset(ic, "IMG_009.jpg", baseTime.Add(1600*time.Millisecond)), + mockAsset(ic, "IMG_010.jpg", baseTime.Add(5*time.Second)), + mockAsset(ic, "IMG_011.jpg", baseTime.Add(10*time.Second)), + mockAsset(ic, "IMG_012.jpg", baseTime.Add(10*time.Second+200*time.Millisecond)), // group 2 + mockAsset(ic, "IMG_013.jpg", baseTime.Add(10*time.Second+400*time.Millisecond)), // group 2 + mockAsset(ic, "IMG_014.jpg", baseTime.Add(15*time.Second)), + mockAsset(ic, "IMG_015.jpg", baseTime.Add(20*time.Second)), + mockAsset(ic, "IMG_016.jpg", baseTime.Add(30*time.Second)), + mockAsset(ic, "IMG_017.jpg", baseTime.Add(30*time.Second+200*time.Millisecond)), // group 3 + mockAsset(ic, "IMG_018.jpg", baseTime.Add(30*time.Second+400*time.Millisecond)), // group 3 + } + + expectedAssets := []*assets.Asset{ + mockAsset(ic, "IMG_010.jpg", baseTime.Add(5*time.Second)), + mockAsset(ic, "IMG_014.jpg", baseTime.Add(15*time.Second)), + mockAsset(ic, "IMG_015.jpg", baseTime.Add(20*time.Second)), + } + + expectedGroup := []*assets.Group{ + assets.NewGroup(assets.GroupByBurst, + mockAsset(ic, "IMG_001.jpg", baseTime), + mockAsset(ic, "IMG_002.jpg", baseTime.Add(200*time.Millisecond)), + mockAsset(ic, "IMG_003.jpg", baseTime.Add(400*time.Millisecond)), + mockAsset(ic, "IMG_004.jpg", baseTime.Add(600*time.Millisecond)), + mockAsset(ic, "IMG_005.jpg", baseTime.Add(800*time.Millisecond)), + mockAsset(ic, "IMG_006.jpg", baseTime.Add(1000*time.Millisecond)), + mockAsset(ic, "IMG_007.jpg", baseTime.Add(1200*time.Millisecond)), + mockAsset(ic, "IMG_008.jpg", baseTime.Add(1400*time.Millisecond)), + mockAsset(ic, "IMG_009.jpg", baseTime.Add(1600*time.Millisecond)), + ), + assets.NewGroup(assets.GroupByBurst, + mockAsset(ic, "IMG_011.jpg", baseTime.Add(10*time.Second)), + mockAsset(ic, "IMG_012.jpg", baseTime.Add(10*time.Second+200*time.Millisecond)), + mockAsset(ic, "IMG_013.jpg", baseTime.Add(10*time.Second+400*time.Millisecond)), + ), + assets.NewGroup(assets.GroupByBurst, + mockAsset(ic, "IMG_016.jpg", baseTime.Add(30*time.Second)), + mockAsset(ic, "IMG_017.jpg", baseTime.Add(30*time.Second+200*time.Millisecond)), + mockAsset(ic, "IMG_018.jpg", baseTime.Add(30*time.Second+400*time.Millisecond)), + ), + } + + in := make(chan *assets.Asset, len(testAssets)) + out := make(chan *assets.Asset) + gOut := make(chan *assets.Group) + + go func() { + Group(ctx, in, out, gOut) + close(out) + close(gOut) + }() + + for _, a := range testAssets { + in <- a + } + close(in) + + gotGroups := []*assets.Group{} + gotAssets := []*assets.Asset{} + + doneGroup := false + doneAsset := false + for !doneGroup || !doneAsset { + select { + case group, ok := <-gOut: + if !ok { + doneGroup = true + continue + } + gotGroups = append(gotGroups, group) + case asset, ok := <-out: + if !ok { + doneAsset = true + continue + } + gotAssets = append(gotAssets, asset) + } + } + + if len(gotGroups) != len(expectedGroup) { + t.Errorf("Expected %d groups, got %d", len(expectedGroup), len(gotGroups)) + } else { + for i := range gotGroups { + for j := range gotGroups[i].Assets { + got := gotGroups[i].Assets[j] + expected := expectedGroup[i].Assets[j] + if !reflect.DeepEqual(got, expected) { + t.Errorf("Expected group %d asset %d \n%#v got\n%#v", i, j, expected, got) + } + } + } + } + if len(gotAssets) != len(expectedAssets) { + t.Errorf("Expected %d assets, got %d", len(expectedAssets), len(gotAssets)) + } else { + for i := range gotAssets { + if !reflect.DeepEqual(gotAssets[i], expectedAssets[i]) { + t.Errorf("Expected asset \n%#v got asset \n%#v", expectedAssets[i], gotAssets[i]) + } + } + } +} diff --git a/internal/groups/epsonfastfoto/epsonfastfoto.go b/internal/groups/epsonfastfoto/epsonfastfoto.go new file mode 100644 index 00000000..87a2aafd --- /dev/null +++ b/internal/groups/epsonfastfoto/epsonfastfoto.go @@ -0,0 +1,86 @@ +package epsonfastfoto + +import ( + "context" + "regexp" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/filetypes" +) + +var epsonFastFotoRegex = regexp.MustCompile(`^(.*_\d+)(_[ab])?(\.[a-z]+)$`) + +type Group struct { + lastRadical string + coverIndex int + group []*assets.Asset +} + +func (g *Group) Group(ctx context.Context, in <-chan *assets.Asset, out chan<- *assets.Asset, gOut chan<- *assets.Group) { + for { + select { + case <-ctx.Done(): + return + case a, ok := <-in: + if !ok { + g.sendGroup(ctx, out, gOut) + return + } + ni := a.NameInfo + matches := epsonFastFotoRegex.FindStringSubmatch(a.File.Name()) + if matches == nil { + g.sendGroup(ctx, out, gOut) + select { + case out <- a: + case <-ctx.Done(): + } + continue + } + + radical := matches[1] + // exclude movies, burst images + dontGroupMe := ni.Type != filetypes.TypeImage || + ni.Kind == assets.KindBurst + + if dontGroupMe { + g.sendGroup(ctx, out, gOut) + continue + } + if g.lastRadical != radical { + g.sendGroup(ctx, out, gOut) + } + g.group = append(g.group, a) + g.lastRadical = radical + if matches[2] == "_a" { + g.coverIndex = len(g.group) - 1 + } + } + } +} + +func (g *Group) sendGroup(ctx context.Context, out chan<- *assets.Asset, outg chan<- *assets.Group) { + defer func() { + g.group = nil + g.lastRadical = "" + g.coverIndex = 0 + }() + if len(g.group) == 0 { + return + } + if len(g.group) < 2 { + select { + case out <- g.group[0]: + case <-ctx.Done(): + } + return + } + + gr := assets.NewGroup(assets.GroupByOther, g.group...) + gr.CoverIndex = g.coverIndex + + select { + case <-ctx.Done(): + return + case outg <- gr: + } +} diff --git a/internal/groups/epsonfastfoto/epsonfastfoto_test.go b/internal/groups/epsonfastfoto/epsonfastfoto_test.go new file mode 100644 index 00000000..6b3eaeb1 --- /dev/null +++ b/internal/groups/epsonfastfoto/epsonfastfoto_test.go @@ -0,0 +1,141 @@ +package epsonfastfoto + +import ( + "context" + "reflect" + "testing" + "time" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/filenames" + "github.com/simulot/immich-go/internal/filetypes" + "github.com/simulot/immich-go/internal/fshelper" +) + +func mockAsset(ic *filenames.InfoCollector, name string, dateTaken time.Time) *assets.Asset { + a := assets.Asset{ + File: fshelper.FSName(nil, name), + FileDate: dateTaken, + CaptureDate: dateTaken, + } + a.SetNameInfo(ic.GetInfo(name)) + return &a +} + +func TestGroup(t *testing.T) { + ctx := context.Background() + ic := filenames.NewInfoCollector(time.Local, filetypes.DefaultSupportedMedia) + + baseTime := time.Date(2021, 1, 1, 0, 0, 0, 0, time.Local) + testAssets := []*assets.Asset{ + mockAsset(ic, "SceneryAndWildlife_0001_a.jpg", baseTime), + mockAsset(ic, "SceneryAndWildlife_0001_b.jpg", baseTime.Add(200*time.Millisecond)), + mockAsset(ic, "SceneryAndWildlife_0001.jpg", baseTime.Add(400*time.Millisecond)), + mockAsset(ic, "SceneryAndWildlife_0002_a.jpg", baseTime.Add(600*time.Millisecond)), + mockAsset(ic, "SceneryAndWildlife_0002_b.jpg", baseTime.Add(800*time.Millisecond)), + mockAsset(ic, "SceneryAndWildlife_0002.jpg", baseTime.Add(1000*time.Millisecond)), + mockAsset(ic, "img_0001.jpg", baseTime.Add(1200*time.Millisecond)), + mockAsset(ic, "img_0002.jpg", baseTime.Add(1200*time.Millisecond)), + mockAsset(ic, "SceneryAndWildlife_0003_a.jpg", baseTime.Add(1200*time.Millisecond)), + mockAsset(ic, "SceneryAndWildlife_0003.jpg", baseTime.Add(1400*time.Millisecond)), + mockAsset(ic, "SceneryAndWildlife_0004_a.jpg", baseTime.Add(1600*time.Millisecond)), + mockAsset(ic, "SceneryAndWildlife_0004.jpg", baseTime.Add(1800*time.Millisecond)), + mockAsset(ic, "SceneryAndWildlife_0005_a.jpg", baseTime.Add(2000*time.Millisecond)), + mockAsset(ic, "SceneryAndWildlife_0005_b.jpg", baseTime.Add(2200*time.Millisecond)), + mockAsset(ic, "SceneryAndWildlife_0005.jpg", baseTime.Add(2400*time.Millisecond)), + mockAsset(ic, "img_0005.jpg", baseTime.Add(1200*time.Millisecond)), + } + + expectedAssets := []*assets.Asset{ + mockAsset(ic, "img_0001.jpg", baseTime.Add(1200*time.Millisecond)), + mockAsset(ic, "img_0002.jpg", baseTime.Add(1200*time.Millisecond)), + mockAsset(ic, "img_0005.jpg", baseTime.Add(1200*time.Millisecond)), + } + + expectedGroup := []*assets.Group{ + assets.NewGroup(assets.GroupByOther, + mockAsset(ic, "SceneryAndWildlife_0001_a.jpg", baseTime), + mockAsset(ic, "SceneryAndWildlife_0001_b.jpg", baseTime.Add(200*time.Millisecond)), + mockAsset(ic, "SceneryAndWildlife_0001.jpg", baseTime.Add(400*time.Millisecond)), + ).SetCover(0), + assets.NewGroup(assets.GroupByOther, + mockAsset(ic, "SceneryAndWildlife_0002_a.jpg", baseTime.Add(600*time.Millisecond)), + mockAsset(ic, "SceneryAndWildlife_0002_b.jpg", baseTime.Add(800*time.Millisecond)), + mockAsset(ic, "SceneryAndWildlife_0002.jpg", baseTime.Add(1000*time.Millisecond)), + ).SetCover(0), + assets.NewGroup(assets.GroupByOther, + mockAsset(ic, "SceneryAndWildlife_0003_a.jpg", baseTime.Add(1200*time.Millisecond)), + mockAsset(ic, "SceneryAndWildlife_0003.jpg", baseTime.Add(1400*time.Millisecond)), + ).SetCover(0), + assets.NewGroup(assets.GroupByOther, + mockAsset(ic, "SceneryAndWildlife_0004_a.jpg", baseTime.Add(1600*time.Millisecond)), + mockAsset(ic, "SceneryAndWildlife_0004.jpg", baseTime.Add(1800*time.Millisecond)), + ).SetCover(0), + assets.NewGroup(assets.GroupByOther, + mockAsset(ic, "SceneryAndWildlife_0005_a.jpg", baseTime.Add(2000*time.Millisecond)), + mockAsset(ic, "SceneryAndWildlife_0005_b.jpg", baseTime.Add(2200*time.Millisecond)), + mockAsset(ic, "SceneryAndWildlife_0005.jpg", baseTime.Add(2400*time.Millisecond)), + ).SetCover(0), + } + + in := make(chan *assets.Asset, len(testAssets)) + out := make(chan *assets.Asset) + gOut := make(chan *assets.Group) + + go func() { + g := &Group{} + g.Group(ctx, in, out, gOut) + close(out) + close(gOut) + }() + + for _, a := range testAssets { + in <- a + } + close(in) + + gotGroups := []*assets.Group{} + gotAssets := []*assets.Asset{} + + doneGroup := false + doneAsset := false + for !doneGroup || !doneAsset { + select { + case group, ok := <-gOut: + if !ok { + doneGroup = true + continue + } + gotGroups = append(gotGroups, group) + case asset, ok := <-out: + if !ok { + doneAsset = true + continue + } + gotAssets = append(gotAssets, asset) + } + } + + if len(gotGroups) != len(expectedGroup) { + t.Errorf("Expected %d groups, got %d", len(expectedGroup), len(gotGroups)) + } else { + for i := range gotGroups { + for j := range gotGroups[i].Assets { + got := gotGroups[i].Assets[j] + expected := expectedGroup[i].Assets[j] + if !reflect.DeepEqual(got, expected) { + t.Errorf("Expected group %d asset %d \n%#v got\n%#v", i, j, expected, got) + } + } + } + } + if len(gotAssets) != len(expectedAssets) { + t.Errorf("Expected 0 assets, got %d", len(gotAssets)) + } else { + for i := range gotAssets { + if !reflect.DeepEqual(gotAssets[i], expectedAssets[i]) { + t.Errorf("Expected asset \n%#v got asset \n%#v", expectedAssets[i], gotAssets[i]) + } + } + } +} diff --git a/internal/groups/groups.go b/internal/groups/groups.go new file mode 100644 index 00000000..6cb81327 --- /dev/null +++ b/internal/groups/groups.go @@ -0,0 +1,109 @@ +package groups + +import ( + "context" + "sync" + + "github.com/simulot/immich-go/internal/assets" +) + +// A group of assets link assets that are linked together. This +// allows a specific treatment of the group. +// +// Groups can be: +// - A photo and a movie as for motion picture or live photo +// - A couple of RAW and JPG image +// - A burst of photos +// - A photo and its edited version +// +// A group has an asset that represents the group: +// - for Raw/JPG --> the JPG +// - for Bursts: the photo identified as the cover +// - not relevant for live photo +// +// All group's assets can be added to 0 or more albums + +// Grouper is an interface for a type that can group assets. +type Grouper func(ctx context.Context, in <-chan *assets.Asset, out chan<- *assets.Asset, gOut chan<- *assets.Group) + +/* +A grouper pipeline is a chain of groupers that process assets in sequence. +The 1st grouper should be the one that detects the most specific groups, and the last one should detect the most generic ones. +This way, the most specific groups are detected first, and the most generic ones are detected last. +*/ + +type GrouperPipeline struct { + groupers []Grouper +} + +func NewGrouperPipeline(ctx context.Context, gs ...Grouper) *GrouperPipeline { + g := &GrouperPipeline{ + groupers: gs, + } + return g +} + +// PipeGrouper groups assets in a pipeline of groupers. +// Group opens and closes intermediate channels as required. +func (p *GrouperPipeline) PipeGrouper(ctx context.Context, in chan *assets.Asset) chan *assets.Group { + // Create channels + gOut := make(chan *assets.Group) // output channel for groups + out := make(chan *assets.Asset) // output channel for the last grouper + + inChans := make([]chan *assets.Asset, len(p.groupers)) + outChans := make([]chan *assets.Asset, len(p.groupers)) + + // initialize channels for each grouper + for i := range p.groupers { + if i == 0 { + inChans[i] = in + } else { + inChans[i] = outChans[i-1] + } + if i < len(p.groupers)-1 { + outChans[i] = make(chan *assets.Asset) // intermediate channels between groupers + } else { + outChans[i] = out + } + } + + // call groupers with the appropriate channels + wg := sync.WaitGroup{} + for i := range p.groupers { + wg.Add(1) + go func(i int) { + defer wg.Done() + p.groupers[i](ctx, inChans[i], outChans[i], gOut) + if i < len(p.groupers)-1 { + close(outChans[i]) // close intermediate channels + } + }(i) + } + + // wait for all groupers to finish and close the output channel + go func() { + wg.Wait() + close(out) + }() + + // groups standalone assets + go func() { + defer close(gOut) + for { + select { + case <-ctx.Done(): + return + default: + a, ok := <-out + if !ok { + return + } + if a != nil { + gOut <- assets.NewGroup(assets.GroupByNone, a) + } + } + } + }() + + return gOut +} diff --git a/internal/groups/groups_test.go b/internal/groups/groups_test.go new file mode 100644 index 00000000..efd2b9d8 --- /dev/null +++ b/internal/groups/groups_test.go @@ -0,0 +1,180 @@ +package groups_test + +import ( + "context" + "reflect" + "sort" + "testing" + "time" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/filenames" + "github.com/simulot/immich-go/internal/filetypes" + "github.com/simulot/immich-go/internal/fshelper" + "github.com/simulot/immich-go/internal/groups" + "github.com/simulot/immich-go/internal/groups/burst" + "github.com/simulot/immich-go/internal/groups/series" +) + +func mockAsset(ic *filenames.InfoCollector, name string, dateTaken time.Time) *assets.Asset { + a := assets.Asset{ + File: fshelper.FSName(nil, name), + FileDate: dateTaken, + CaptureDate: dateTaken, + } + a.SetNameInfo(ic.GetInfo(name)) + return &a +} + +func TestGroup(t *testing.T) { + ic := filenames.NewInfoCollector(time.Local, filetypes.DefaultSupportedMedia) + t0 := time.Date(2021, 1, 1, 0, 0, 0, 0, time.Local) + + testAssets := []*assets.Asset{ + mockAsset(ic, "photo1.jpg", t0.Add(50*time.Hour)), + mockAsset(ic, "photo2.jpg", t0.Add(55*time.Hour)), + mockAsset(ic, "IMG_001.jpg", t0), // Group 1 + mockAsset(ic, "IMG_002.jpg", t0.Add(200*time.Millisecond)), // Group 1 + mockAsset(ic, "IMG_003.jpg", t0.Add(400*time.Millisecond)), // Group 1 + mockAsset(ic, "IMG_004.jpg", t0.Add(600*time.Millisecond)), // Group 1 + mockAsset(ic, "IMG_005.jpg", t0.Add(800*time.Millisecond)), // Group 1 + mockAsset(ic, "IMG_006.jpg", t0.Add(1000*time.Millisecond)), // Group 1 + mockAsset(ic, "IMG_007.jpg", t0.Add(1200*time.Millisecond)), // Group 1 + mockAsset(ic, "IMG_008.jpg", t0.Add(1400*time.Millisecond)), // Group 1 + mockAsset(ic, "IMG_009.jpg", t0.Add(1600*time.Millisecond)), // Group 1 + mockAsset(ic, "photo3.jpg", t0.Add(5*time.Hour)), + mockAsset(ic, "photo4.jpg", t0.Add(6*time.Hour)), + mockAsset(ic, "IMG_001.jpg", t0.Add(7*time.Hour)), + mockAsset(ic, "IMG_20231014_183246_BURST001_COVER.jpg", time.Date(2023, 10, 14, 18, 32, 46, 0, time.Local)), // Group 2 + mockAsset(ic, "IMG_20231014_183246_BURST002.jpg", time.Date(2023, 10, 14, 18, 32, 46, 0, time.Local)), // Group 2 + mockAsset(ic, "IMG_003.jpg", t0.Add(9*time.Hour)), // Group 3 + mockAsset(ic, "IMG_003.raw", t0.Add(9*time.Hour)), // Group 3 + mockAsset(ic, "IMG_004.heic", t0.Add(10*time.Hour)), // Group 4 + mockAsset(ic, "IMG_004.jpg", t0.Add(10*time.Hour+100*time.Millisecond)), // Group 4 + mockAsset(ic, "IMG_005.raw", t0.Add(100*time.Hour)), + mockAsset(ic, "IMG_005.jpg", t0.Add(101*time.Hour)), + mockAsset(ic, "00001IMG_00001_BURST20210101153000.jpg", time.Date(2021, 1, 1, 15, 30, 0, 0, time.Local)), // Group 5 + mockAsset(ic, "00002IMG_00002_BURST20210101153000_COVER.jpg", time.Date(2021, 1, 1, 15, 30, 0, 0, time.Local)), // Group 5 + mockAsset(ic, "00003IMG_00003_BURST20210101153000.jpg", time.Date(2021, 1, 1, 15, 30, 0, 0, time.Local)), // Group 5 + mockAsset(ic, "IMG_006.heic", t0.Add(110*time.Hour)), + mockAsset(ic, "photo5.jpg", t0.Add(120*time.Hour)), + mockAsset(ic, "photo6.jpg", t0.Add(130*time.Hour)), + } + + expectedGroup := []*assets.Group{ + assets.NewGroup(assets.GroupByBurst, + mockAsset(ic, "00001IMG_00001_BURST20210101153000.jpg", time.Date(2021, 1, 1, 15, 30, 0, 0, time.Local)), + mockAsset(ic, "00002IMG_00002_BURST20210101153000_COVER.jpg", time.Date(2021, 1, 1, 15, 30, 0, 0, time.Local)), + mockAsset(ic, "00003IMG_00003_BURST20210101153000.jpg", time.Date(2021, 1, 1, 15, 30, 0, 0, time.Local)), + ).SetCover(1), + assets.NewGroup(assets.GroupByBurst, + mockAsset(ic, "IMG_001.jpg", t0), + mockAsset(ic, "IMG_002.jpg", t0.Add(200*time.Millisecond)), + mockAsset(ic, "IMG_003.jpg", t0.Add(400*time.Millisecond)), + mockAsset(ic, "IMG_004.jpg", t0.Add(600*time.Millisecond)), + mockAsset(ic, "IMG_005.jpg", t0.Add(800*time.Millisecond)), + mockAsset(ic, "IMG_006.jpg", t0.Add(1000*time.Millisecond)), + mockAsset(ic, "IMG_007.jpg", t0.Add(1200*time.Millisecond)), + mockAsset(ic, "IMG_008.jpg", t0.Add(1400*time.Millisecond)), + mockAsset(ic, "IMG_009.jpg", t0.Add(1600*time.Millisecond)), + ).SetCover(0), + assets.NewGroup(assets.GroupByBurst, + mockAsset(ic, "IMG_20231014_183246_BURST001_COVER.jpg", time.Date(2023, 10, 14, 18, 32, 46, 0, time.Local)), + mockAsset(ic, "IMG_20231014_183246_BURST002.jpg", time.Date(2023, 10, 14, 18, 32, 46, 0, time.Local)), + ).SetCover(0), + assets.NewGroup(assets.GroupByHeicJpg, + mockAsset(ic, "IMG_004.heic", t0.Add(10*time.Hour)), + mockAsset(ic, "IMG_004.jpg", t0.Add(10*time.Hour+100*time.Millisecond)), + ), + assets.NewGroup(assets.GroupByRawJpg, + mockAsset(ic, "IMG_003.jpg", t0.Add(9*time.Hour)), + mockAsset(ic, "IMG_003.raw", t0.Add(9*time.Hour)), + ), + } + + expectedAssets := []*assets.Asset{ + mockAsset(ic, "photo1.jpg", t0.Add(50*time.Hour)), + mockAsset(ic, "photo2.jpg", t0.Add(55*time.Hour)), + mockAsset(ic, "photo3.jpg", t0.Add(5*time.Hour)), + mockAsset(ic, "photo4.jpg", t0.Add(6*time.Hour)), + mockAsset(ic, "IMG_001.jpg", t0.Add(7*time.Hour)), + mockAsset(ic, "IMG_005.raw", t0.Add(100*time.Hour)), + mockAsset(ic, "IMG_005.jpg", t0.Add(101*time.Hour)), + mockAsset(ic, "IMG_006.heic", t0.Add(110*time.Hour)), + mockAsset(ic, "photo5.jpg", t0.Add(120*time.Hour)), + mockAsset(ic, "photo6.jpg", t0.Add(130*time.Hour)), + } + + // inject assets in the input channel + in := make(chan *assets.Asset) + go func() { + for _, a := range testAssets { + in <- a + } + close(in) + }() + + // collect the outputs in gotGroups and gotAssets + var gotGroups []*assets.Group + var gotAssets []*assets.Asset + ctx := context.Background() + + gOut := groups.NewGrouperPipeline(ctx, burst.Group, series.Group).PipeGrouper(ctx, in) + for g := range gOut { + switch g.Grouping { + case assets.GroupByNone: + gotAssets = append(gotAssets, g.Assets...) + default: + gotGroups = append(gotGroups, g) + } + } + + sortGroupFn := func(s []*assets.Group) func(i, j int) bool { + return func(i, j int) bool { + if s[i].Assets[0].Radical == s[j].Assets[0].Radical { + return s[i].Assets[0].CaptureDate.Before(s[j].Assets[0].CaptureDate) + } + return s[i].Assets[0].Radical < s[j].Assets[0].Radical + } + } + + sort.Slice(expectedGroup, sortGroupFn(expectedGroup)) + sort.Slice(gotGroups, sortGroupFn(gotGroups)) + if len(gotGroups) != len(expectedGroup) { + t.Errorf("Expected %d group, got %d", len(expectedGroup), len(gotGroups)) + } else { + for i := range gotGroups { + for j := range gotGroups[i].Assets { + got := gotGroups[i].Assets[j] + expected := expectedGroup[i].Assets[j] + if !reflect.DeepEqual(got, expected) { + t.Errorf("Expected group %d asset %d \n%#v got\n%#v", i, j, expected, got) + } + } + } + } + + sortAssetFn := func(s []*assets.Asset) func(i, j int) bool { + return func(i, j int) bool { + if s[i].Radical == s[j].Radical { + if s[i].Index == s[j].Index { + return s[i].CaptureDate.Before(s[j].CaptureDate) + } + return s[i].Index < s[j].Index + } + return s[i].Radical < s[j].Radical + } + } + + sort.Slice(expectedAssets, sortAssetFn(expectedAssets)) + sort.Slice(gotAssets, sortAssetFn(gotAssets)) + if len(gotAssets) != len(expectedAssets) { + t.Errorf("Expected %d assets, got %d", len(expectedAssets), len(gotAssets)) + } else { + for i := range gotAssets { + if !reflect.DeepEqual(gotAssets[i], expectedAssets[i]) { + t.Errorf("Expected asset \n%#v got asset \n%#v", expectedAssets[i], gotAssets[i]) + } + } + } +} diff --git a/internal/groups/series/series.go b/internal/groups/series/series.go new file mode 100644 index 00000000..998ad3ed --- /dev/null +++ b/internal/groups/series/series.go @@ -0,0 +1,121 @@ +package series + +/* This package implements a group builder for series of images. +A series is a group of images with the same radical part in their name. +*/ + +import ( + "context" + "time" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/filetypes" + "golang.org/x/exp/constraints" +) + +// Group groups assets by series, based on the radical part of the name. +// the in channel receives assets sorted by radical, then by date taken. +func Group(ctx context.Context, in <-chan *assets.Asset, out chan<- *assets.Asset, gOut chan<- *assets.Group) { + currentRadical := "" + currentGroup := []*assets.Asset{} + + for { + select { + case <-ctx.Done(): + return + case a, ok := <-in: + if !ok { + if len(currentGroup) > 0 { + sendGroup(ctx, out, gOut, currentGroup) + } + return + } + + if r := a.Radical; r != currentRadical { + if len(currentGroup) > 0 { + sendGroup(ctx, out, gOut, currentGroup) + currentGroup = []*assets.Asset{} + } + currentRadical = r + } + currentGroup = append(currentGroup, a) + } + } +} + +func sendGroup(ctx context.Context, out chan<- *assets.Asset, outg chan<- *assets.Group, as []*assets.Asset) { + if len(as) < 2 { + // Not a series + sendAsset(ctx, out, as) + return + } + grouping := assets.GroupByOther + + gotJPG := false + gotRAW := false + gotHEIC := false + + cover := 0 + // determine if the group is a burst + for i, a := range as { + gotJPG = gotJPG || a.Ext == ".jpg" + gotRAW = gotRAW || filetypes.IsRawFile(a.Ext) + gotHEIC = gotHEIC || a.Ext == ".heic" || a.Ext == ".heif" + if grouping == assets.GroupByOther { + switch a.Kind { + case assets.KindBurst: + grouping = assets.GroupByBurst + } + } + if a.IsCover { + cover = i + } + } + + // If we have only two assets, we can try to group them as raw/jpg or heic/jpg + if len(as) == 2 { + if grouping == assets.GroupByOther { + if gotJPG && gotRAW && !gotHEIC { + grouping = assets.GroupByRawJpg + } else if gotJPG && !gotRAW && gotHEIC { + grouping = assets.GroupByHeicJpg + } + } + // check the delay between the two assets, if it's too long, we don't group them + if grouping == assets.GroupByRawJpg || grouping == assets.GroupByHeicJpg { + d := as[0].CaptureDate + if abs(d.Sub(as[1].CaptureDate)) > 1*time.Second { + sendAsset(ctx, out, as) + return + } + } + } + + // good to go + g := assets.NewGroup(grouping, as...) + g.CoverIndex = cover + + select { + case <-ctx.Done(): + return + case outg <- g: + } +} + +// sendAsset sends assets of the group as individual assets to the output channel +func sendAsset(ctx context.Context, out chan<- *assets.Asset, assets []*assets.Asset) { + for _, a := range assets { + select { + case out <- a: + case <-ctx.Done(): + return + } + } +} + +func abs[T constraints.Integer](x T) T { + if x < 0 { + return -x + } + return x +} diff --git a/internal/groups/series/series_test.go b/internal/groups/series/series_test.go new file mode 100644 index 00000000..2801ca8e --- /dev/null +++ b/internal/groups/series/series_test.go @@ -0,0 +1,156 @@ +package series + +import ( + "context" + "reflect" + "sort" + "testing" + "time" + + "github.com/simulot/immich-go/internal/assets" + "github.com/simulot/immich-go/internal/filenames" + "github.com/simulot/immich-go/internal/filetypes" + "github.com/simulot/immich-go/internal/fshelper" +) + +func mockAsset(ic *filenames.InfoCollector, name string, dateTaken time.Time) *assets.Asset { + a := assets.Asset{ + File: fshelper.FSName(nil, name), + FileDate: dateTaken, + CaptureDate: dateTaken, + } + a.SetNameInfo(ic.GetInfo(name)) + return &a +} + +func sortAssetFn(s []*assets.Asset) func(i, j int) bool { + return func(i, j int) bool { + if s[i].Radical == s[j].Radical { + if s[i].Index == s[j].Index { + return s[i].CaptureDate.Before(s[j].CaptureDate) + } + return s[i].Index < s[j].Index + } + return s[i].Radical < s[j].Radical + } +} + +func sortGroupFn(s []*assets.Group) func(i, j int) bool { + return func(i, j int) bool { + if s[i].Assets[0].Radical == s[j].Assets[0].Radical { + return s[i].Assets[0].CaptureDate.Before(s[j].Assets[0].CaptureDate) + } + return s[i].Assets[0].Radical < s[j].Assets[0].Radical + } +} + +func TestGroup(t *testing.T) { + ctx := context.Background() + ic := filenames.NewInfoCollector(time.Local, filetypes.DefaultSupportedMedia) + baseTime := time.Date(2021, 1, 1, 0, 0, 0, 0, time.Local) + + as := []*assets.Asset{ + mockAsset(ic, "IMG_0001.jpg", baseTime), + mockAsset(ic, "IMG_20231014_183246_BURST001_COVER.jpg", baseTime.Add(1*time.Hour)), // group 1 + mockAsset(ic, "IMG_20231014_183246_BURST002.jpg", baseTime.Add(1*time.Hour)), // group 1 + mockAsset(ic, "IMG_20231014_183246_BURST003.jpg", baseTime.Add(1*time.Hour)), // group 1 + mockAsset(ic, "IMG_0003.jpg", baseTime.Add(2*time.Hour)), // group 2 + mockAsset(ic, "IMG_0003.raw", baseTime.Add(2*time.Hour)), // group 2 + mockAsset(ic, "IMG_0004.heic", baseTime.Add(3*time.Hour)), // group 3 + mockAsset(ic, "IMG_0004.jpg", baseTime.Add(3*time.Hour)), // group 3 + mockAsset(ic, "IMG_0005.raw", baseTime.Add(4*time.Hour)), + mockAsset(ic, "IMG_0006.heic", baseTime.Add(4*time.Hour)), + mockAsset(ic, "IMG_0007.raw", baseTime.Add(5*time.Hour)), + mockAsset(ic, "IMG_0007.jpg", baseTime.Add(6*time.Hour)), + } + + expectedAssets := []*assets.Asset{ + mockAsset(ic, "IMG_0001.jpg", baseTime), + mockAsset(ic, "IMG_0005.raw", baseTime.Add(4*time.Hour)), + mockAsset(ic, "IMG_0006.heic", baseTime.Add(4*time.Hour)), + mockAsset(ic, "IMG_0007.raw", baseTime.Add(5*time.Hour)), + mockAsset(ic, "IMG_0007.jpg", baseTime.Add(6*time.Hour)), + } + + expectedGroup := []*assets.Group{ + assets.NewGroup(assets.GroupByBurst, + mockAsset(ic, "IMG_20231014_183246_BURST001_COVER.jpg", baseTime.Add(1*time.Hour)), // group 1 + mockAsset(ic, "IMG_20231014_183246_BURST002.jpg", baseTime.Add(1*time.Hour)), // group 1 + mockAsset(ic, "IMG_20231014_183246_BURST003.jpg", baseTime.Add(1*time.Hour)), // group 1 + ), + assets.NewGroup(assets.GroupByRawJpg, + mockAsset(ic, "IMG_0003.jpg", baseTime.Add(2*time.Hour)), + mockAsset(ic, "IMG_0003.raw", baseTime.Add(2*time.Hour)), + ), + assets.NewGroup(assets.GroupByHeicJpg, + mockAsset(ic, "IMG_0004.heic", baseTime.Add(3*time.Hour)), + mockAsset(ic, "IMG_0004.jpg", baseTime.Add(3*time.Hour)), + ), + } + + sort.Slice(as, sortAssetFn(as)) + sort.Slice(expectedGroup, sortGroupFn(expectedGroup)) + source := make(chan *assets.Asset, len(as)) + out := make(chan *assets.Asset) + gOut := make(chan *assets.Group) + + go func() { + for _, asset := range as { + source <- asset + } + close(source) + }() + + go func() { + Group(ctx, source, out, gOut) + close(out) + close(gOut) + }() + + gotGroups := []*assets.Group{} + gotAssets := []*assets.Asset{} + + doneGroup := false + doneAsset := false + for !doneGroup || !doneAsset { + select { + case group, ok := <-gOut: + if !ok { + doneGroup = true + continue + } + gotGroups = append(gotGroups, group) + case asset, ok := <-out: + if !ok { + doneAsset = true + continue + } + gotAssets = append(gotAssets, asset) + } + } + + sort.Slice(gotGroups, sortGroupFn(gotGroups)) + + if len(gotGroups) != len(expectedGroup) { + t.Errorf("Expected %d groups, got %d", len(expectedGroup), len(gotGroups)) + } else { + for i := range gotGroups { + for j := range gotGroups[i].Assets { + got := gotGroups[i].Assets[j] + expected := expectedGroup[i].Assets[j] + if !reflect.DeepEqual(got, expected) { + t.Errorf("Expected group %d asset %d \n%#v got\n%#v", i, j, expected, got) + } + } + } + } + if len(gotAssets) != len(expectedAssets) { + t.Errorf("Expected %d assets, got %d", len(expectedAssets), len(gotAssets)) + } else { + for i := range gotAssets { + if !reflect.DeepEqual(gotAssets[i], expectedAssets[i]) { + t.Errorf("Expected asset \n%#v got asset \n%#v", expectedAssets[i], gotAssets[i]) + } + } + } +} diff --git a/internal/immichfs/fs_test.go b/internal/immichfs/fs_test.go new file mode 100644 index 00000000..aa46229f --- /dev/null +++ b/internal/immichfs/fs_test.go @@ -0,0 +1,153 @@ +package immichfs + +import ( + "bytes" + "context" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/simulot/immich-go/immich" +) + +func newTestImmichServer(_ *testing.T) *immich.ImmichClient { //nolint + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch { + case r.URL.Path == "/api/users/me": + w.WriteHeader(http.StatusOK) + w.Write([]byte(`{"id":"1","email":"test@email.com"}`)) // nolint + case r.URL.Path == "/api/server/media-types": + w.WriteHeader(http.StatusOK) + w.Write([]byte(`{"image":[".jpg",".png"],"video":[".mp4"]}`)) // nolint + case r.URL.Path == "/api/server/ping": + w.WriteHeader(http.StatusOK) + w.Write([]byte(`{"res":"pong"}`)) // nolint + + case r.URL.Path == "/api/assets/test-asset-id": + w.WriteHeader(http.StatusOK) + // w.Write([]byte(`{"id":"test-asset-id","name":"test-asset","type":"image","size":1024}`)) // nolint + w.Write([]byte(asssetinfo)) // nolint + + case r.URL.Path == "/api/assets/test-asset-id/original": + w.WriteHeader(http.StatusOK) + w.Write([]byte(`original asset content`)) // nolint + } + })) + client, _ := immich.NewImmichClient(server.URL, "test-key") + return client +} + +func TestImmichfs(t *testing.T) { + ctx := context.Background() + client := newTestImmichServer(t) + ifs := NewImmichFS(ctx, "testclient", client) + + file, err := ifs.Open("test-asset-id") + if err != nil { + t.Fatalf("expected no error, got %v", err) + } + + buf := bytes.NewBuffer(nil) + _, err = io.Copy(buf, file) + if err != nil { + t.Fatalf("expected no error, got %v", err) + } + + if buf.String() != "original asset content" { + t.Fatalf("expected 'original asset content', got %v", buf.String()) + } +} + +var asssetinfo = `{ + "id": "test-asset-id", + "deviceAssetId": "2cdcf6af-d13c-4080-a59e-353818a8cf3a.jpg-120645", + "ownerId": "df17ccde-c94a-4b48-bb51-cf977115a722", + "owner": { + "id": "df17ccde-c94a-4b48-bb51-cf977115a722", + "email": "demo@immich.app", + "name": "demo", + "profileImagePath": "", + "avatarColor": "yellow", + "profileChangedAt": "2024-10-15T19:33:53.081Z" + }, + "deviceId": "gl65", + "libraryId": null, + "type": "IMAGE", + "originalPath": "upload/upload/df17ccde-c94a-4b48-bb51-cf977115a722/b8/b1/b8b1cbe1-bd97-4725-962c-62f7cf68b427.jpg", + "originalFileName": "test asset.jpg", + "originalMimeType": "image/jpeg", + "thumbhash": "nPgNDYR5aIiDd4iAh4iHi/yNhwgn", + "fileCreatedAt": "2024-07-07T13:31:46.000Z", + "fileModifiedAt": "2024-07-07T13:31:46.000Z", + "localDateTime": "2024-07-07T13:31:46.000Z", + "updatedAt": "2024-11-07T18:57:19.277Z", + "isFavorite": false, + "isArchived": false, + "isTrashed": false, + "duration": "0:00:00.00000", + "exifInfo": { + "make": null, + "model": null, + "exifImageWidth": 1600, + "exifImageHeight": 1200, + "fileSizeInByte": 120645, + "orientation": null, + "dateTimeOriginal": "2024-07-07T13:31:46.000Z", + "modifyDate": "2024-07-07T13:31:46.000Z", + "timeZone": null, + "lensModel": null, + "fNumber": null, + "focalLength": null, + "iso": null, + "exposureTime": null, + "latitude": null, + "longitude": null, + "city": null, + "state": null, + "country": null, + "description": "", + "projectionType": null, + "rating": null + }, + "livePhotoVideoId": null, + "tags": [ + { + "id": "a694d813-a787-4694-b012-60896e5cb5fd", + "parentId": "85cdba4b-3614-4fac-bf32-d5276de6ab72", + "name": "outdoors", + "value": "activities/outdoors", + "createdAt": "2024-11-07T18:57:08.281Z", + "updatedAt": "2024-11-08T18:11:28.816Z" + } + ], + "people": [ + { + "id": "9d9ca38f-48e1-4e46-9ddf-a12a500a7284", + "name": "", + "birthDate": null, + "thumbnailPath": "upload/thumbs/df17ccde-c94a-4b48-bb51-cf977115a722/9d/9c/9d9ca38f-48e1-4e46-9ddf-a12a500a7284.jpeg", + "isHidden": false, + "updatedAt": "2024-11-07T18:21:26.716Z", + "faces": [ + { + "id": "fe231f45-0069-4ab8-8653-f2b20075957b", + "imageHeight": 1200, + "imageWidth": 1600, + "boundingBoxX1": 822, + "boundingBoxX2": 1043, + "boundingBoxY1": 240, + "boundingBoxY2": 548, + "sourceType": "machine-learning" + } + ] + } + ], + "unassignedFaces": [], + "checksum": "3A/J/6HAHpyOpSQirnn/9NSpWwA=", + "stack": null, + "isOffline": false, + "hasMetadata": true, + "duplicateId": null, + "resized": true +}` diff --git a/internal/immichfs/immich.go b/internal/immichfs/immich.go new file mode 100644 index 00000000..41d23b0b --- /dev/null +++ b/internal/immichfs/immich.go @@ -0,0 +1,124 @@ +package immichfs + +import ( + "context" + "io" + "io/fs" + "time" + + "github.com/simulot/immich-go/immich" +) + +/* +Implement the immichfs package let read assets from an immich server + +*/ + +var _ fs.FS = (*ImmichFS)(nil) + +type ImmichFS struct { + ctx context.Context + client immich.ImmichInterface + url string +} + +// NewImmichFS creates a new ImmichFS using the client +func NewImmichFS(ctx context.Context, url string, client immich.ImmichInterface) *ImmichFS { + return &ImmichFS{ + ctx: ctx, + client: client, + url: url, + } +} + +var _ fs.File = (*ImmichFile)(nil) + +type ImmichFile struct { + ctx context.Context + cancel func(err error) + info *fsFileInfo + + rc io.ReadCloser +} + +// Open opens the named file for reading. +// name is the ID of the asset +func (ifs *ImmichFS) Open(name string) (fs.File, error) { + ctx, cancel := context.WithCancelCause(ifs.ctx) + + fi, err := ifs.Stat(name) + if err != nil { + cancel(err) + return nil, err + } + + rc, err := ifs.client.DownloadAsset(ctx, name) + if err != nil { + cancel(err) + return nil, err + } + file := &ImmichFile{ + ctx: ctx, + cancel: cancel, + info: fi, + rc: rc, + } + + return file, nil +} + +func (ifs *ImmichFS) Name() string { + return ifs.url +} + +// Read reads up to len(b) bytes from the file. It returns the number of bytes read and an error, if any. +func (file *ImmichFile) Read(b []byte) (n int, err error) { + return file.rc.Read(b) +} + +// Close closes the file, rendering it unusable for I/O. +func (file *ImmichFile) Close() error { + if file.rc != nil { + file.cancel(file.rc.Close()) + } + return nil +} + +// Stat returns a FileInfo describing the file. +// name is the ID of the asset +func (file *ImmichFile) Stat() (fs.FileInfo, error) { + return file.info, nil +} + +// Stat returns a FileInfo describing the file. +// Name is the ID of the asset +func (ifs *ImmichFS) Stat(name string) (*fsFileInfo, error) { + a, err := ifs.client.GetAssetInfo(ifs.ctx, name) + if err != nil { + return nil, err + } + return &fsFileInfo{ + name: a.OriginalFileName, + size: a.ExifInfo.FileSizeInByte, + mode: fs.FileMode(0o444), // read-only mode + modTime: a.ExifInfo.DateTimeOriginal.Unix(), + isDir: false, + }, nil +} + +var _ fs.FileInfo = (*fsFileInfo)(nil) + +type fsFileInfo struct { + name string + size int64 + mode fs.FileMode + modTime int64 + isDir bool +} + +func (fi *fsFileInfo) Name() string { return fi.name } +func (fi *fsFileInfo) Size() int64 { return fi.size } +func (fi *fsFileInfo) Mode() fs.FileMode { return fi.mode } +func (fi *fsFileInfo) ModTime() time.Time { return time.Unix(fi.modTime, 0) } +func (fi *fsFileInfo) IsDir() bool { return fi.isDir } +func (fi *fsFileInfo) Sys() interface{} { return nil } diff --git a/logger/journal.go b/internal/journal/journal.go similarity index 99% rename from logger/journal.go rename to internal/journal/journal.go index 0dbac89f..2c916950 100644 --- a/logger/journal.go +++ b/internal/journal/journal.go @@ -1,4 +1,4 @@ -package logger +package journal import ( "strings" diff --git a/logger/log.go b/internal/journal/log.go similarity index 99% rename from logger/log.go rename to internal/journal/log.go index b7f48fe2..d30a2c2a 100644 --- a/logger/log.go +++ b/internal/journal/log.go @@ -1,4 +1,4 @@ -package logger +package journal import ( "bytes" diff --git a/logger/logger.go b/internal/journal/logger.go similarity index 96% rename from logger/logger.go rename to internal/journal/logger.go index e4421203..ac5d7eea 100644 --- a/logger/logger.go +++ b/internal/journal/logger.go @@ -1,4 +1,4 @@ -package logger +package journal import "io" diff --git a/logger/nologger.go b/internal/journal/nologger.go similarity index 98% rename from logger/nologger.go rename to internal/journal/nologger.go index 753c3394..1411f18b 100644 --- a/logger/nologger.go +++ b/internal/journal/nologger.go @@ -1,4 +1,4 @@ -package logger +package journal import "io" diff --git a/helpers/namematcher/list.go b/internal/namematcher/list.go similarity index 93% rename from helpers/namematcher/list.go rename to internal/namematcher/list.go index 216920e3..d661e151 100644 --- a/helpers/namematcher/list.go +++ b/internal/namematcher/list.go @@ -28,6 +28,14 @@ func New(patterns ...string) (List, error) { return l, nil } +func MustList(patterns ...string) List { + l, err := New(patterns...) + if err != nil { + panic(err.Error()) + } + return l +} + func (l List) Match(name string) bool { for _, re := range l.re { if re.MatchString(name) { @@ -136,3 +144,7 @@ func (l List) String() string { func (l *List) Get() any { return *l } + +func (l List) Type() string { + return "FileList" +} diff --git a/helpers/namematcher/list_test.go b/internal/namematcher/list_test.go similarity index 100% rename from helpers/namematcher/list_test.go rename to internal/namematcher/list_test.go diff --git a/helpers/stacking/stack.go b/internal/stacking--nogo/stack.gono similarity index 92% rename from helpers/stacking/stack.go rename to internal/stacking--nogo/stack.gono index 0c978e5c..22d979bd 100644 --- a/helpers/stacking/stack.go +++ b/internal/stacking--nogo/stack.gono @@ -8,8 +8,9 @@ import ( "strings" "time" - "github.com/simulot/immich-go/helpers/gen" - "github.com/simulot/immich-go/immich" + "github.com/simulot/immich-go/internal/gen" + cliflags "github.com/simulot/immich-go/internal/cliFlags" + "github.com/simulot/immich-go/internal/metadata" ) type Key struct { @@ -33,12 +34,12 @@ const ( ) type StackBuilder struct { - dateRange immich.DateRange // Set capture date range + dateRange cliflags.DateRange // Set capture date range stacks map[Key]Stack - supportedMedia immich.SupportedMedia + supportedMedia filetypes.SupportedMedia } -func NewStackBuilder(supportedMedia immich.SupportedMedia) *StackBuilder { +func NewStackBuilder(supportedMedia filetypes.SupportedMedia) *StackBuilder { sb := StackBuilder{ supportedMedia: supportedMedia, stacks: map[Key]Stack{}, diff --git a/helpers/stacking/statck_test.go b/internal/stacking--nogo/statck_test.gono similarity index 77% rename from helpers/stacking/statck_test.go rename to internal/stacking--nogo/statck_test.gono index 6c49b505..e95e8a46 100644 --- a/helpers/stacking/statck_test.go +++ b/internal/stacking--nogo/statck_test.gono @@ -7,8 +7,7 @@ import ( "time" "github.com/kr/pretty" - "github.com/simulot/immich-go/immich" - "github.com/simulot/immich-go/immich/metadata" + "github.com/simulot/immich-go/internal/metadata" ) type asset struct { @@ -26,31 +25,31 @@ func Test_Stack(t *testing.T) { { name: "no stack JPG+DNG", input: []asset{ - {ID: "1", FileName: "IMG_1234.JPG", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.15.00")}, - {ID: "2", FileName: "IMG_1234.DNG", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.45.00")}, + {ID: "1", FileName: "IMG_1234.JPG", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.15.00", time.UTC)}, + {ID: "2", FileName: "IMG_1234.DNG", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.45.00", time.UTC)}, }, want: []Stack{}, }, { name: "issue #67", input: []asset{ - {ID: "1", FileName: "IMG_5580.HEIC", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.15.00")}, - {ID: "2", FileName: "IMG_5580.MP4", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.15.00")}, + {ID: "1", FileName: "IMG_5580.HEIC", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.15.00", time.UTC)}, + {ID: "2", FileName: "IMG_5580.MP4", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.15.00", time.UTC)}, }, want: []Stack{}, }, { name: "stack JPG+DNG", input: []asset{ - {ID: "1", FileName: "IMG_1234.JPG", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.15.00")}, - {ID: "2", FileName: "IMG_1234.DNG", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.15.00")}, + {ID: "1", FileName: "IMG_1234.JPG", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.15.00", time.UTC)}, + {ID: "2", FileName: "IMG_1234.DNG", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.15.00", time.UTC)}, }, want: []Stack{ { CoverID: "1", IDs: []string{"2"}, - Date: metadata.TakeTimeFromName("2023-10-01 10.15.00"), + Date: metadata.TakeTimeFromName("2023-10-01 10.15.00", time.UTC), Names: []string{"IMG_1234.JPG", "IMG_1234.DNG"}, StackType: StackRawJpg, }, @@ -59,16 +58,16 @@ func Test_Stack(t *testing.T) { { name: "stack BURST", input: []asset{ - {ID: "1", FileName: "IMG_20231014_183244.jpg", DateTaken: metadata.TakeTimeFromName("IMG_20231014_183244.jpg")}, - {ID: "2", FileName: "IMG_20231014_183246_BURST001_COVER.jpg", DateTaken: metadata.TakeTimeFromName("IMG_20231014_183246_BURST001_COVER.jpg")}, - {ID: "3", FileName: "IMG_20231014_183246_BURST002.jpg", DateTaken: metadata.TakeTimeFromName("IMG_20231014_183246_BURST002.jpg")}, - {ID: "4", FileName: "IMG_20231014_183246_BURST003.jpg", DateTaken: metadata.TakeTimeFromName("IMG_20231014_183246_BURST003.jpg")}, + {ID: "1", FileName: "IMG_20231014_183244.jpg", DateTaken: metadata.TakeTimeFromName("IMG_20231014_183244.jpg", time.UTC)}, + {ID: "2", FileName: "IMG_20231014_183246_BURST001_COVER.jpg", DateTaken: metadata.TakeTimeFromName("IMG_20231014_183246_BURST001_COVER.jpg", time.UTC)}, + {ID: "3", FileName: "IMG_20231014_183246_BURST002.jpg", DateTaken: metadata.TakeTimeFromName("IMG_20231014_183246_BURST002.jpg", time.UTC)}, + {ID: "4", FileName: "IMG_20231014_183246_BURST003.jpg", DateTaken: metadata.TakeTimeFromName("IMG_20231014_183246_BURST003.jpg", time.UTC)}, }, want: []Stack{ { CoverID: "2", IDs: []string{"3", "4"}, - Date: metadata.TakeTimeFromName("IMG_20231014_183246_BURST001_COVER.jpg"), + Date: metadata.TakeTimeFromName("IMG_20231014_183246_BURST001_COVER.jpg", time.UTC), Names: []string{"IMG_20231014_183246_BURST001_COVER.jpg", "IMG_20231014_183246_BURST002.jpg", "IMG_20231014_183246_BURST003.jpg"}, StackType: StackBurst, }, @@ -78,23 +77,23 @@ func Test_Stack(t *testing.T) { { name: "stack JPG+CR3", input: []asset{ - {ID: "1", FileName: "3H2A0018.CR3", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.15.00")}, - {ID: "2", FileName: "3H2A0018.JPG", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.15.00")}, - {ID: "3", FileName: "3H2A0019.CR3", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.15.00")}, - {ID: "4", FileName: "3H2A0019.JPG", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.15.00")}, + {ID: "1", FileName: "3H2A0018.CR3", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.15.00", time.UTC)}, + {ID: "2", FileName: "3H2A0018.JPG", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.15.00", time.UTC)}, + {ID: "3", FileName: "3H2A0019.CR3", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.15.00", time.UTC)}, + {ID: "4", FileName: "3H2A0019.JPG", DateTaken: metadata.TakeTimeFromName("2023-10-01 10.15.00", time.UTC)}, }, want: []Stack{ { CoverID: "2", IDs: []string{"1"}, - Date: metadata.TakeTimeFromName("2023-10-01 10.15.00"), + Date: metadata.TakeTimeFromName("2023-10-01 10.15.00", time.UTC), Names: []string{"3H2A0018.CR3", "3H2A0018.JPG"}, StackType: StackRawJpg, }, { CoverID: "4", IDs: []string{"3"}, - Date: metadata.TakeTimeFromName("2023-10-01 10.15.00"), + Date: metadata.TakeTimeFromName("2023-10-01 10.15.00", time.UTC), Names: []string{"3H2A0019.CR3", "3H2A0019.JPG"}, StackType: StackRawJpg, }, @@ -103,14 +102,14 @@ func Test_Stack(t *testing.T) { { name: "issue #12 example1", input: []asset{ - {ID: "1", FileName: "PXL_20231026_210642603.dng", DateTaken: metadata.TakeTimeFromName("PXL_20231026_210642603.dng")}, - {ID: "2", FileName: "PXL_20231026_210642603.jpg", DateTaken: metadata.TakeTimeFromName("PXL_20231026_210642603.jpg")}, + {ID: "1", FileName: "PXL_20231026_210642603.dng", DateTaken: metadata.TakeTimeFromName("PXL_20231026_210642603.dng", time.UTC)}, + {ID: "2", FileName: "PXL_20231026_210642603.jpg", DateTaken: metadata.TakeTimeFromName("PXL_20231026_210642603.jpg", time.UTC)}, }, want: []Stack{ { CoverID: "2", IDs: []string{"1"}, - Date: metadata.TakeTimeFromName("PXL_20231026_210642603.dng"), + Date: metadata.TakeTimeFromName("PXL_20231026_210642603.dng", time.UTC), Names: []string{"PXL_20231026_210642603.dng", "PXL_20231026_210642603.jpg"}, StackType: StackRawJpg, }, @@ -119,14 +118,14 @@ func Test_Stack(t *testing.T) { { name: "issue #12 example 2", input: []asset{ - {ID: "3", FileName: "20231026_205755225.dng", DateTaken: metadata.TakeTimeFromName("20231026_205755225.dng")}, - {ID: "4", FileName: "20231026_205755225.MP.jpg", DateTaken: metadata.TakeTimeFromName("20231026_205755225.MP.jpg")}, + {ID: "3", FileName: "20231026_205755225.dng", DateTaken: metadata.TakeTimeFromName("20231026_205755225.dng", time.UTC)}, + {ID: "4", FileName: "20231026_205755225.MP.jpg", DateTaken: metadata.TakeTimeFromName("20231026_205755225.MP.jpg", time.UTC)}, }, want: []Stack{ { CoverID: "4", IDs: []string{"3"}, - Date: metadata.TakeTimeFromName("20231026_205755225.MP.jpg"), + Date: metadata.TakeTimeFromName("20231026_205755225.MP.jpg", time.UTC), Names: []string{"20231026_205755225.dng", "20231026_205755225.MP.jpg"}, StackType: StackRawJpg, }, @@ -135,32 +134,32 @@ func Test_Stack(t *testing.T) { { name: "issue #12 example 3", input: []asset{ - {ID: "3", FileName: "20231026_205755225.dng", DateTaken: metadata.TakeTimeFromName("20231026_205755225.dng")}, - {ID: "4", FileName: "20231026_205755225.MP.jpg", DateTaken: metadata.TakeTimeFromName("20231026_205755225.MP.jpg")}, - {ID: "5", FileName: "PXL_20231207_032111247.RAW-02.ORIGINAL.dng", DateTaken: metadata.TakeTimeFromName("PXL_20231207_032111247.RAW-02.ORIGINAL.dng")}, - {ID: "6", FileName: "PXL_20231207_032111247.RAW-01.COVER.jpg", DateTaken: metadata.TakeTimeFromName("PXL_20231207_032111247.RAW-01.COVER.jpg")}, - {ID: "7", FileName: "PXL_20231207_032108788.RAW-02.ORIGINAL.dng", DateTaken: metadata.TakeTimeFromName("PXL_20231207_032108788.RAW-02.ORIGINAL.dng")}, - {ID: "8", FileName: "PXL_20231207_032108788.RAW-01.MP.COVER.jpg", DateTaken: metadata.TakeTimeFromName("PXL_20231207_032108788.RAW-01.MP.COVER.jpg")}, + {ID: "3", FileName: "20231026_205755225.dng", DateTaken: metadata.TakeTimeFromName("20231026_205755225.dng", time.UTC)}, + {ID: "4", FileName: "20231026_205755225.MP.jpg", DateTaken: metadata.TakeTimeFromName("20231026_205755225.MP.jpg", time.UTC)}, + {ID: "5", FileName: "PXL_20231207_032111247.RAW-02.ORIGINAL.dng", DateTaken: metadata.TakeTimeFromName("PXL_20231207_032111247.RAW-02.ORIGINAL.dng", time.UTC)}, + {ID: "6", FileName: "PXL_20231207_032111247.RAW-01.COVER.jpg", DateTaken: metadata.TakeTimeFromName("PXL_20231207_032111247.RAW-01.COVER.jpg", time.UTC)}, + {ID: "7", FileName: "PXL_20231207_032108788.RAW-02.ORIGINAL.dng", DateTaken: metadata.TakeTimeFromName("PXL_20231207_032108788.RAW-02.ORIGINAL.dng", time.UTC)}, + {ID: "8", FileName: "PXL_20231207_032108788.RAW-01.MP.COVER.jpg", DateTaken: metadata.TakeTimeFromName("PXL_20231207_032108788.RAW-01.MP.COVER.jpg", time.UTC)}, }, want: []Stack{ { CoverID: "4", IDs: []string{"3"}, - Date: metadata.TakeTimeFromName("20231026_205755225.dng"), + Date: metadata.TakeTimeFromName("20231026_205755225.dng", time.UTC), Names: []string{"20231026_205755225.dng", "20231026_205755225.MP.jpg"}, StackType: StackRawJpg, }, { CoverID: "6", IDs: []string{"5"}, - Date: metadata.TakeTimeFromName("PXL_20231207_032111247.RAW-02.ORIGINAL.dng"), + Date: metadata.TakeTimeFromName("PXL_20231207_032111247.RAW-02.ORIGINAL.dng", time.UTC), Names: []string{"PXL_20231207_032111247.RAW-02.ORIGINAL.dng", "PXL_20231207_032111247.RAW-01.COVER.jpg"}, StackType: StackBurst, }, { CoverID: "8", IDs: []string{"7"}, - Date: metadata.TakeTimeFromName("PXL_20231207_032108788.RAW-02.ORIGINAL.dng"), + Date: metadata.TakeTimeFromName("PXL_20231207_032108788.RAW-02.ORIGINAL.dng", time.UTC), Names: []string{"PXL_20231207_032108788.RAW-02.ORIGINAL.dng", "PXL_20231207_032108788.RAW-01.MP.COVER.jpg"}, StackType: StackBurst, }, @@ -169,16 +168,16 @@ func Test_Stack(t *testing.T) { { name: "stack: Samsung #99", input: []asset{ - {ID: "1", FileName: "20231207_101605_001.jpg", DateTaken: metadata.TakeTimeFromName("20231207_101605_001.jpg")}, - {ID: "2", FileName: "20231207_101605_002.jpg", DateTaken: metadata.TakeTimeFromName("20231207_101605_002.jpg")}, - {ID: "3", FileName: "20231207_101605_003.jpg", DateTaken: metadata.TakeTimeFromName("20231207_101605_003.jpg")}, - {ID: "4", FileName: "20231207_101605_004.jpg", DateTaken: metadata.TakeTimeFromName("20231207_101605_004.jpg")}, + {ID: "1", FileName: "20231207_101605_001.jpg", DateTaken: metadata.TakeTimeFromName("20231207_101605_001.jpg", time.UTC)}, + {ID: "2", FileName: "20231207_101605_002.jpg", DateTaken: metadata.TakeTimeFromName("20231207_101605_002.jpg", time.UTC)}, + {ID: "3", FileName: "20231207_101605_003.jpg", DateTaken: metadata.TakeTimeFromName("20231207_101605_003.jpg", time.UTC)}, + {ID: "4", FileName: "20231207_101605_004.jpg", DateTaken: metadata.TakeTimeFromName("20231207_101605_004.jpg", time.UTC)}, }, want: []Stack{ { CoverID: "1", IDs: []string{"2", "3", "4"}, - Date: metadata.TakeTimeFromName("20231207_101605_001.jpg"), + Date: metadata.TakeTimeFromName("20231207_101605_001.jpg", time.UTC), Names: []string{"20231207_101605_001.jpg", "20231207_101605_002.jpg", "20231207_101605_003.jpg", "20231207_101605_004.jpg"}, StackType: StackBurst, }, @@ -187,15 +186,15 @@ func Test_Stack(t *testing.T) { { name: " stack: Huawei Nexus 6P #100 ", input: []asset{ - {ID: "1", FileName: "00001IMG_00001_BURST20171111030039.jpg", DateTaken: metadata.TakeTimeFromName("00001IMG_00001_BURST20171111030039.jpg")}, - {ID: "2", FileName: "00002IMG_00002_BURST20171111030039.jpg", DateTaken: metadata.TakeTimeFromName("00002IMG_00002_BURST20171111030039.jpg")}, - {ID: "3", FileName: "00003IMG_00003_BURST20171111030039_COVER.jpg", DateTaken: metadata.TakeTimeFromName("00003IMG_00003_BURST20171111030039_COVER.jpg")}, + {ID: "1", FileName: "00001IMG_00001_BURST20171111030039.jpg", DateTaken: metadata.TakeTimeFromName("00001IMG_00001_BURST20171111030039.jpg", time.UTC)}, + {ID: "2", FileName: "00002IMG_00002_BURST20171111030039.jpg", DateTaken: metadata.TakeTimeFromName("00002IMG_00002_BURST20171111030039.jpg", time.UTC)}, + {ID: "3", FileName: "00003IMG_00003_BURST20171111030039_COVER.jpg", DateTaken: metadata.TakeTimeFromName("00003IMG_00003_BURST20171111030039_COVER.jpg", time.UTC)}, }, want: []Stack{ { CoverID: "1", IDs: []string{"2", "3"}, - Date: metadata.TakeTimeFromName("00001IMG_00001_BURST20171111030039.jpg"), + Date: metadata.TakeTimeFromName("00001IMG_00001_BURST20171111030039.jpg", time.UTC), Names: []string{"00001IMG_00001_BURST20171111030039.jpg", "00002IMG_00002_BURST20171111030039.jpg", "00003IMG_00003_BURST20171111030039_COVER.jpg"}, StackType: StackBurst, }, @@ -205,7 +204,7 @@ func Test_Stack(t *testing.T) { for _, tt := range tc { t.Run(tt.name, func(t *testing.T) { - sb := NewStackBuilder(immich.DefaultSupportedMedia) + sb := NewStackBuilder(filetypes.DefaultSupportedMedia) for _, a := range tt.input { sb.ProcessAsset(a.ID, a.FileName, a.DateTaken) } diff --git a/internal/tzone/flags.go b/internal/tzone/flags.go new file mode 100644 index 00000000..bf648029 --- /dev/null +++ b/internal/tzone/flags.go @@ -0,0 +1,45 @@ +package tzone + +import ( + "strings" + "time" + + "github.com/thlib/go-timezone-local/tzlocal" +) + +type Timezone struct { + name string + TZ *time.Location +} + +func (tz *Timezone) Set(tzName string) error { + var err error + + tzName = strings.TrimSpace(tzName) + switch strings.ToUpper(tzName) { + case "LOCAL": + tzName, err = tzlocal.RuntimeTZ() + if err != nil { + return err + } + tz.name = "Local" + case "UTC": + tzName = "UTC" + default: + tz.name = tzName + } + tz.TZ, err = time.LoadLocation(tzName) + return err +} + +func (tz *Timezone) String() string { + return tz.name +} + +func (tz *Timezone) Type() string { + return "timezone" +} + +func (tz *Timezone) Location() *time.Location { + return tz.TZ +} diff --git a/helpers/tzone/timezone.go b/internal/tzone/timezone.go similarity index 100% rename from helpers/tzone/timezone.go rename to internal/tzone/timezone.go diff --git a/ui/ask.go b/internal/ui/ask.go similarity index 100% rename from ui/ask.go rename to internal/ui/ask.go diff --git a/ui/size.go b/internal/ui/size.go similarity index 91% rename from ui/size.go rename to internal/ui/size.go index ba3bcca6..6da33ae0 100644 --- a/ui/size.go +++ b/internal/ui/size.go @@ -5,7 +5,7 @@ import ( "math" ) -func FormatBytes(s int) string { +func FormatBytes(s int64) string { suffixes := []string{"B", "KB", "MB", "GB"} bytes := float64(s) base := 1024.0 diff --git a/internal/worker/worker.go b/internal/worker/worker.go new file mode 100644 index 00000000..7a910ddb --- /dev/null +++ b/internal/worker/worker.go @@ -0,0 +1,55 @@ +package worker + +import ( + "sync" +) + +// Task represents a unit of work to be processed by the worker pool. +type Task func() + +// Pool manages a pool of worker goroutines. +type Pool struct { + tasks chan Task + wg sync.WaitGroup + quit chan struct{} +} + +// NewPool creates a new Pool with a specified number of workers. +func NewPool(numWorkers int) *Pool { + pool := &Pool{ + tasks: make(chan Task), + quit: make(chan struct{}), + } + + for i := 0; i < numWorkers; i++ { + pool.wg.Add(1) + go pool.worker() + } + + return pool +} + +// worker is the function that each worker goroutine runs. +func (p *Pool) worker() { + defer p.wg.Done() + for { + select { + case task := <-p.tasks: + task() + case <-p.quit: + return + } + } +} + +// Submit adds a task to the worker pool. +func (p *Pool) Submit(task Task) { + p.tasks <- task +} + +// Stop stops all the workers and waits for them to finish. +func (p *Pool) Stop() { + close(p.quit) + p.wg.Wait() + close(p.tasks) +} diff --git a/internal/worker/worker_test.go b/internal/worker/worker_test.go new file mode 100644 index 00000000..e812a609 --- /dev/null +++ b/internal/worker/worker_test.go @@ -0,0 +1,32 @@ +package worker + +import ( + "sync" + "testing" +) + +func TestPool(t *testing.T) { + var mu sync.Mutex + results := make([]int, 0) + + // Create a worker pool with 3 workers. + pool := NewPool(3) + + // Submit some tasks to the pool. + for i := 0; i < 10; i++ { + taskNum := i + pool.Submit(func() { + mu.Lock() + results = append(results, taskNum) + mu.Unlock() + }) + } + + // Stop the worker pool and wait for all workers to finish. + pool.Stop() + + // Check if all tasks were processed. + if len(results) != 10 { + t.Errorf("Expected 10 tasks to be processed, but got %d", len(results)) + } +} diff --git a/internal/zipName/zip.go b/internal/zipName/zip.go new file mode 100644 index 00000000..f2dae430 --- /dev/null +++ b/internal/zipName/zip.go @@ -0,0 +1,29 @@ +package zipname + +import ( + stdZip "archive/zip" + "path/filepath" + "strings" +) + +type ZipReadCloser struct { + *stdZip.ReadCloser + name string +} + +func OpenReader(f string) (*ZipReadCloser, error) { + z, err := stdZip.OpenReader(f) + if err != nil { + return nil, err + } + name := filepath.Base(f) + name = strings.TrimSuffix(name, filepath.Ext(name)) + return &ZipReadCloser{ + ReadCloser: z, + name: name, + }, nil +} + +func (z ZipReadCloser) Name() string { + return z.name +} diff --git a/main.go b/main.go index 79aef77d..26572c2c 100644 --- a/main.go +++ b/main.go @@ -3,143 +3,73 @@ package main import ( "context" "errors" - "flag" "fmt" - "log/slog" "os" "os/signal" - "runtime/debug" - "github.com/simulot/immich-go/cmd" - "github.com/simulot/immich-go/cmd/duplicate" - "github.com/simulot/immich-go/cmd/metadata" - "github.com/simulot/immich-go/cmd/stack" - "github.com/simulot/immich-go/cmd/tool" - "github.com/simulot/immich-go/cmd/upload" - "github.com/simulot/immich-go/ui" - "github.com/telemachus/humane" + "github.com/simulot/immich-go/app" + "github.com/simulot/immich-go/app/cmd" + "github.com/spf13/cobra" + "github.com/spf13/viper" ) -var ( - version = "dev" - commit = "none" - date = "unknown" -) - -func getCommitInfo() string { - dirty := false - buildvcs := false - - buildinfo, _ := debug.ReadBuildInfo() - for _, s := range buildinfo.Settings { - switch s.Key { - case "vcs.revision": - buildvcs = true - commit = s.Value - case "vcs.modified": - if s.Value == "true" { - dirty = true - } - case "vcs.time": - date = s.Value +// immich-go entry point +func main() { + ctx := context.Background() + err := immichGoMain(ctx) + if err != nil { + if e := context.Cause(ctx); e != nil { + err = e } + _, _ = fmt.Fprintln(os.Stderr, err) + os.Exit(1) } - if buildvcs && dirty { - commit += "-dirty" - } - return commit -} - -func printVersion() { - fmt.Printf("immich-go %s, commit %s, built at %s\n", version, getCommitInfo(), date) } -func main() { - var err error - +// makes immich-go breakable with ^C and run it +func immichGoMain(ctx context.Context) error { // Create a context with cancel function to gracefully handle Ctrl+C events - ctx, cancel := context.WithCancelCause(context.Background()) + ctx, cancel := context.WithCancelCause(ctx) // Handle Ctrl+C signal (SIGINT) signalChannel := make(chan os.Signal, 1) signal.Notify(signalChannel, os.Interrupt) + // Watch for ^C to be pressed go func() { <-signalChannel fmt.Println("\nCtrl+C received. Shutting down...") cancel(errors.New("Ctrl+C received")) // Cancel the context when Ctrl+C is received }() - select { - case <-ctx.Done(): - err = ctx.Err() - default: - err = Run(ctx) - } - if err != nil { - if e := context.Cause(ctx); e != nil { - err = e - } - fmt.Println(err.Error()) - os.Exit(1) - } + return runImmichGo(ctx) } -func Run(ctx context.Context) error { - app := cmd.SharedFlags{ - Log: slog.New(humane.NewHandler(os.Stdout, &humane.Options{Level: slog.LevelInfo})), - Banner: ui.NewBanner(version, commit, date), - } - fs := flag.NewFlagSet("main", flag.ExitOnError) - fs.BoolFunc("version", "Get immich-go version", func(s string) error { - printVersion() - os.Exit(0) - return nil - }) +// Run immich-go +func runImmichGo(ctx context.Context) error { + viper.SetEnvPrefix("IMMICHGO") - app.InitSharedFlags() - app.SetFlags(fs) + // Create the application context - err := fs.Parse(os.Args[1:]) - if err != nil { - app.Log.Error(err.Error()) - return err + // Add the root command + c := &cobra.Command{ + Use: "immich-go", + Short: "Immich-go is a command line application to interact with the Immich application using its API", + Long: `An alternative to the immich-CLI command that doesn't depend on nodejs installation. It tries its best for importing google photos takeout archives.`, + Version: app.Version, } + cobra.EnableTraverseRunHooks = true // doc: cobra/site/content/user_guide.md + a := app.New(ctx, c) - printVersion() - fmt.Println(app.Banner.String()) + // add immich-go commands + c.AddCommand(app.NewVersionCommand(ctx, a)) + cmd.AddCommands(c, ctx, a) - if len(fs.Args()) == 0 { - err = errors.New("missing command upload|duplicate|stack|tool") + // let's start + err := c.ExecuteContext(ctx) + if err != nil && a.Log().GetSLog() != nil { + a.Log().Error(err.Error()) } - if err != nil { - app.Log.Error(err.Error()) - return err - } - - cmd := fs.Args()[0] - switch cmd { - case "upload": - err = upload.UploadCommand(ctx, &app, fs.Args()[1:]) - case "duplicate": - err = duplicate.DuplicateCommand(ctx, &app, fs.Args()[1:]) - case "metadata": - err = metadata.MetadataCommand(ctx, &app, fs.Args()[1:]) - case "stack": - err = stack.NewStackCommand(ctx, &app, fs.Args()[1:]) - case "tool": - err = tool.CommandTool(ctx, &app, fs.Args()[1:]) - default: - err = fmt.Errorf("unknown command: %q", cmd) - } - - if err != nil { - app.Log.Error(err.Error()) - } - fmt.Println("Check the log file: ", app.LogFile) - if app.APITraceWriter != nil { - fmt.Println("Check the trace file: ", app.APITraceWriterName) - } return err } diff --git a/readme.md b/readme.md index 755640a0..6f3781d5 100644 --- a/readme.md +++ b/readme.md @@ -2,333 +2,563 @@ **Immich-Go** is an open-source tool designed to streamline uploading large photo collections to your self-hosted Immich server. -![screen](/docs/v.20.gif) +> ⚠️ This is an early version, not yet extensively tested
+> ⚠️ Keep a backup copy of your files for safety
+## Support the project `Immich-go` -## You can now support my work on `Immich-go`: +- [GitHub Sponsor](https://github.com/sponsors/simulot) +- [PayPal Donation](https://www.paypal.com/donate/?hosted_button_id=VGU2SQE88T2T4) -- [Github Sponsor](https://github.com/sponsors/simulot) -- [Paypal Donation](https://www.paypal.com/donate/?hosted_button_id=VGU2SQE88T2T4) +## What Makes Immich-Go Special? +### Simple Installation: + * Immich-Go doesn't require NodeJS or Docker for installation. This makes it easy to get started, even for those less familiar with technical environments. + * Immich-Go can run on either your workstation or a NAS. -## Key Features: +### Handles Large Photo Collections: + * **Upload Large Google Photos Takeouts:** Immich-Go excels at handling massive archives downloaded from Google Photos using Google Takeout. It efficiently processes these archives while preserving valuable metadata like GPS location, capture date, and album information. Read the [best practices](#google-photos-best-practices) below for more information. + * **Upload Collections:** You can upload photos directly from your computer folders, folder trees, and compressed ZIP archives. + * **Is Duplicate-aware:** Immich-Go identifies and discards duplicate photos, keeping only the highest-quality version on the server. + * **Archive Your Immich Server:** Write the content of your Immich server to a folder tree, ready to be archived or migrated to another server. -* **Effortlessly Upload Large Google Photos Takeouts:** Immich-Go excels at handling the massive archives you download from Google Photos using Google Takeout. It efficiently processes these archives while preserving valuable metadata like GPS location, capture date, and album information. -* **Flexible Uploads:** Immich-Go isn't limited to Google Photos. You can upload photos directly from your computer folders, folders tree and ZIP archives. -* **Simple Installation:** Immich-Go doesn't require NodeJS or Docker for installation. This makes it easy to get started, even for those less familiar with technical environments. -* **Prioritize Quality:** Immich-Go discards any lower-resolution versions that might be included in Google Photos Takeout, ensuring you have the best possible copies on your Immich server. -* **Stack burst and raw/jpg photos**: Group together related photos in Immich. +### Has Many Options: +* Stack burst photos +* Manage coupled RAW and JPEG files, HEIC and JPEG files +* Use tags +* ... and much more +### Runs on Any Platform: + * Immich-Go is available for Windows, MacOS, Linux, and FreeBSD. It can run on any platform where the Go language is ported. -## Google Photos Best Practices: +## Requirements -* **Taking Out Your Photos:** - * Choose the ZIP format when creating your takeout for easier import. - * Select the largest file size available (50GB) to minimize the number of archive parts. - * Download all parts to your computer. +* **Immich Server:** You need a running Immich server to use Immich-Go. + * Prepare the server's URL (http://your-ip:2283 or https://your-domain.tld) + * Collect an API key for each Immich user. +* **Basic Knowledge of Command Line:** Immich-Go is a command-line tool, so you should be comfortable using a terminal. -* **Importing Your Photos:** - * If your takeout is in ZIP format, you can import it directly without needing to unzip the files first. - * It's important to import all the parts of the takeout together, since some data might be spread across multiple files. -
Use `/path/to/your/files/takeout-*.zip` as file name. - * For **.tgz** files (compressed tar archives), you'll need to decompress all the files into a single folder before importing. When using the import tool, don't forget the `-google-photos` option. - * You can remove any unwanted files or folders from your takeout before importing. - * Restarting an interrupted import won't cause any problems and it will resume the work where it was left. +## Upgrading from the Original `immich-go`, Version 0.22 and Earlier -* **Lot of files are not imported**: What are the options? - * Verify if all takeout parts have been included in the processing. - * Request another takeout, either for an entire year or in smaller increments. - * Force the import of files despite the missing JSON. Use the option `-upload-when-missing-JSON` +This version is a complete rewrite of the original `immich-go` project. It is designed to be more efficient, more reliable, and easier to use. It is also more flexible, with more options and features. As a consequence, the command line options have changed. Please refer to the documentation for the new options. -For insights into the reasoning behind this alternative to `immich-cli`, please read the motivation [here](docs/motivation.md). +The visible changes are: +- **Adoption of the Linux Convention** for the command line options: use 2 dashes for long options. +- Complete restructuring of the CLI logic: + - The `upload` command accepts 3 sub-commands: `from-google-photos`, `from-folder`, `from-immich`. This removes all ambiguity from the options. + - The new `archive` command takes advantage of this sub-command logic. It is possible to archive from a Google Photos takeout, a folder tree, or an Immich server. +The upgrade process consists of installing the new version over the previous one. You can check the version of the installed `immich-go` by running `immich-go --version`. -> ⚠️ This an early version, not yet extensively tested
-> ⚠️ Keep a backup copy of your files for safety
+# Installation +## Prerequisites + +- For pre-built binaries: No prerequisites needed +- For building from source: + - Go 1.23 or higher + - Git +- Optional dependencies: + - ExifTool: Enhanced metadata extraction capabilities + - On Linux: `sudo apt-get install exiftool` + - On MacOS: `brew install exiftool` + - On Windows: Download from [ExifTool website](https://exiftool.org/) + +## Pre-built Binaries + +The easiest way to install Immich-Go is to download the pre-built binary for your system from the [GitHub releases page](https://github.com/simulot/immich-go/releases). + +### Supported Platforms: +- **Operating Systems** + - MacOS + - Windows + - Linux + - FreeBSD + +- **Architectures** + - AMD64 (x86_64) + - ARM + +### Installation Steps + +1. Visit the [GitHub latest release page](https://github.com/simulot/immich-go/releases/latest) +2. Download the archive for your operating system and architecture: + - Windows: `immich-go_Windows_amd64.zip` + - MacOS: `immich-go_Darwin_amd64.tar.gz` + - Linux: `immich-go_Linux_amd64.tar.gz` + - FreeBSD: `immich-go_Freebsd_amd64.tar.gz` + - and more... + +3. Extract the archive: + ```bash + # For Linux/MacOS/FreeBSD + tar -xzf immich-go_*_amd64.tar.gz + + # For Windows + # Use your preferred zip tool to extract the archive + ``` + +4. (Optional) Move the binary to a directory in your PATH: + ```bash + # Linux/MacOS/FreeBSD + sudo mv immich-go /usr/local/bin/ + + # Windows + # Move immich-go.exe to a directory in your PATH + ``` + +## Building from Source +If pre-built binaries are not available, you can build Immich-Go from source. + +### Prerequisites +- Go 1.23 or higher +- Git + +### Build Steps +```bash +# Clone the repository +git clone https://github.com/simulot/immich-go.git -# Executing `immich-go` -The `immich-go` is a command line tool that must be run from a terminal window. -The `immich-go` program uses the Immich API. Hence it needs the server address and a valid API key. +# Change to the project directory +cd immich-go -**On Linux, macOS and FreeBSD friends:** +# Build the binary +go build -```sh -./immich-go -server=URL -key=KEY -general_options COMMAND -command_options... {path/to/files} +# (Optional) Install to GOPATH/bin +go install ``` -> [!IMPORTANT] -> macOS users have to explicitly allow immich-go to run. Open Privacy & Security > Security Settings to allow +## Installation with Nix -**On Windows systems:** -```powershell -.\immich-go -server=URL -key=KEY -general_options COMMAND -command_options... {path/to/files} +`immich-go` is packaged with [nix](https://nixos.org/) and distributed via [nixpkgs](https://search.nixos.org/packages?channel=unstable&type=packages&query=immich-go). +You can try `immich-go` without installing it with: + +```bash +nix-shell -I "nixpkgs=https://github.com/NixOS/nixpkgs/archive/nixos-unstable-small.tar.gz" -p immich-go +# Or with flakes enabled +nix run "github:nixos/nixpkgs?ref=nixos-unstable-small#immich-go" -- -help ``` -## How boolean options are handled - -Boolean options have a default value indicated below. Mentioning any option on the common line changes the option to TRUE. -To force an option to FALSE, use the following syntax: `-option=FALSE`. - -Example: Immich-go check the server's SSL certificate. you can disable this behavior by turning on the `skip-verify-ssl` option. Just add `-skip-verify-ssl`. -`-skip-verify-ssl` is equivalent to `-skip-verify-ssl=TRUE`. To turn off the feature (which is the default behavior), use `-skip-verify-ssl=FALSE` - -## Shared options with all commands - -| **Parameter** | **Description** | **Default value** | -| ---------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `-use-configuration=path/to/config/file` | Specifies the configuration file to use.
Server URL and the API key are stored into the immich-go configuration file. They can be omitted for the next runs. | Linux `$HOME/.config/immich-go/immich-go.json`
Windows `%AppData%\immich-go\immich-go.json`
macOS `$HOME/Library/Application Support/immich-go/immich-go.json` | -| `-server=URL` | URL of the Immich service, example http://:2283 or https://your-domain.tld | | -| `-api=URL` | URL of the Immich api endpoint (http://container_ip:3301) | | -| `-device-uuid=VALUE` | Force the device identification | `$HOSTNAME` | -| `-client-timeout=duration` | Set the timeout for server calls. The duration is a decimal number with a unit suffix, such as "300ms", "1.5m" or "45m". Valid time units are "ms", "s", "m", "h". | `5m` | -| `-skip-verify-ssl` | Skip SSL verification for use with self-signed certificates | `false` | -| `-key=KEY` | A key generated by the user. Uploaded photos will belong to the key's owner. | | -| `-log-level=LEVEL` | Adjust the log verbosity as follows:
- `ERROR`: Display only errors
- `WARNING`: Same as previous one plus non-blocking error
- `INFO`: Information messages | `INFO` | -| `-log-file=/path/to/log/file` | Write all messages to a file | Linux `$HOME/.cache/immich-go/immich-go_YYYY-MM-DD_HH-MI-SS.log`
Windows `%LocalAppData%\immich-go\immich-go_YYYY-MM-DD_HH-MI-SS.log`
macOS `$HOME/Library/Caches/immich-go/immich-go_YYYY-MM-DD_HH-MI-SS.log` | -| `-log-json` | Output the log as line-delimited JSON file | `false` | -| `-time-zone=time_zone_name` | Set the time zone for dates without time zone information | The system's time zone | -| `-no-ui` | Disable the user interface | `false` | -| `-debug-counters` | Enable the generation a CSV beside the log file | `false` | -| `-api-trace` | Enable trace of API calls | `false` | - -## Command `upload` - -Use this command for uploading photos and videos from a local directory, a zipped folder or all zip files that the Google Photos takeout procedure has generated. - -### Switches and options: - -| **Parameter** | **Description** | **Default value** | -|--------------------------------------|-------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------| -| `-album="ALBUM NAME"` | Import assets into the Immich album `ALBUM NAME`. | | -| `-dry-run` | Preview all actions as they would be done. | `FALSE` | -| `-create-album-folder` | Generate immich albums after folder names. | `FALSE` | -| `-use-full-path-album-name` | Use the full path to the file to determine the album name. | `FALSE` | -| `-album-name-path-separator` | Determines how multiple (sub) folders, if any, will be joined | ` ` | -| `-create-stacks` | Stack jpg/raw or bursts. | `FALSE` | -| `-stack-jpg-raw` | Control the stacking of jpg/raw photos. | `FALSE` | -| `-stack-burst` | Control the stacking bursts. | `FALSE` | -| `-select-types=".ext,.ext,.ext..."` | List of accepted extensions. | | -| `-exclude-types=".ext,.ext,.ext..."` | List of excluded extensions. | | -| `-when-no-date=FILE\|NOW` | When the date of take can't be determined, use the FILE's date or the current time NOW. | `FILE` | -| `-exclude-files=pattern` | Ignore files based on a pattern. Case insensitive. Repeat the option for each pattern do you need. | `@eaDir/`
`@__thumb/`
`SYNOFILE_THUMB_*.*`
`Lightroom Catalog/`
`thumbnails/` | - -### Date selection: -Fine-tune import based on specific dates: - -| **Parameter** | **Description** | -| ------------------ | ---------------------------------------------- | -| `-date=YYYY-MM-DD` | import photos taken on a particular day. | -| `-date=YYYY-MM` | select photos taken during a particular month. | -| `-date=YYYY` | select photos taken during a particular year. | - -### Exclude files based on a pattern - -Use the `-exclude-files=PATTERN` to exclude certain files or directories from the upload. Repeat the option for each pattern do you need. The following directories are excluded automatically: -- @eaDir/ -- @__thumb/ -- SYNOFILE_THUMB_\*.\* -- Lightroom Catalog/ -- thumbnails/ -- .DS_Store/ - - -Example, the following command excludes any files in directories called backup or draft and any file with name finishing with "copy)" as PXL_20231006_063121958 (another copy).jpg: -```sh -immich-go -server=xxxxx -key=yyyyy upload -exclude-files=backup/ -exclude-files=draft/ -exclude=copy).* /path/to/your/files +Or you can add `immich-go` to your `configuration.nix` in the `environment.systemPackages` section. + +## Verifying the Installation + +After installation, verify that immich-go is working correctly: + +```bash +immich-go --version ``` -### Google Photos options: -Specialized options for Google Photos management: - -| **Parameter** | **Description** | **Default value** | -|-------------------------------------|----------------------------------------------------------------------------------|-------------------| -| `-google-photos` | import from a Google Photos structured archive, recreating corresponding albums. | | -| `-from-album="GP Album"` | Create the album in `immich` and import album's assets. | | -| `-create-albums` | Controls creation of Google Photos albums in Immich. | `TRUE` | -| `-keep-untitled-albums` | Untitled albums are imported into `immich` with the name of the folder as title. | `FALSE` | -| `-use-album-folder-as-name` | Use the folder's name instead of the album title. | `FALSE` | -| `-keep-partner` | Specifies inclusion or exclusion of partner-taken photos. | `TRUE` | -| `-partner-album="partner's album"` | import assets from partner into given album. | | -| `-discard-archived` | don't import archived assets. | `FALSE` | -| `-auto-archive` | Automatically archive photos that are also archived in Google Photos | `TRUE` | -| `-upload-when-missing-JSON` | Upload photos not associated with a JSON metadata file | `FALSE` | - -Read [here](docs/google-takeout.md) to understand why Google Photos takeout isn't easy to handle. - -### Burst detection -Currently the bursts following this schema are detected: -- xxxxx_BURSTnnn.* -- xxxxx_BURSTnnn_COVER.* -- xxxxx.RAW-01.COVER.jpg and xxxxx.RAW-02.ORIGINAL.dng -- xxxxx.RAW-01.MP.COVER.jpg and xxxxx.RAW-02.ORIGINAL.dng -- xxxxxIMG_xxxxx_BURSTyyyymmddhhmmss.jpg and xxxxxIMG_xxxxx_BURSTyyyymmddhhmmss_COVER.jpg (Huawei Nexus 6P) -- yyyymmdd_hhmmss_xxx.jpg (Samsung) - -All images must be taken during the same minute. -The COVER image will be the parent image of the stack - -### Couple jpg/raw detection -Both images should be taken in the same minute. -The JPG image will be the cover. - -Please open an issue to cover more possibilities. - -### Example Usage: uploading a Google Photos takeout archive - -To illustrate, here's a command importing photos from a Google Photos takeout archive captured between June 1st and June 30th, 2019, while auto-generating albums: - -```sh -./immich-go -server=http://mynas:2283 -key=zzV6k65KGLNB9mpGeri9n8Jk1VaNGHSCdoH1dY8jQ upload --create-albums -google-photos -date=2019-06 ~/Download/takeout-*.zip +This should display the version number of immich-go. +# Running Immich-Go + +Immich-Go is a command-line tool. You need to run it from a terminal or command prompt. + +## Commands and Sub-Commands Logic + +The general syntax for running Immich-Go is: + +```bash +immich-go command sub-command options path/to/files ``` - -### Metadata determination +Commands must be combined with sub-commands and options to perform the required action. +* immich-go + * [upload](#the-upload-command) + * from-folder + * from-google-photos + * from-immich + * [archive](#the-archive-command) + * from-folder + * from-google-photos + * from-immich + * version + +Examples: +```bash +## Upload photos from a local folder to your Immich server +immich-go upload from-folder --server=http://your-ip:2283 --api-key=your-api-key /path/to/your/photos -Immich-go get the first available data in the following order. +## Archive photos from your Immich server to a local folder +immich-go archive from-immich --from-server=http://your-ip:2283 --from-api-key=your-api-key --write-to-folder=/path/to/archive -#### Date of capture: +## Upload a Google Photos takeout to your Immich server +immich-go upload from-google-photos --server=http://your-ip:2283 --api-key=your-api-key /path/to/your/takeout-*.zip +``` -* Google Photos takeout - 1. Google Photos JSON field `photoTakenTime` - 1. Photo's file name: ex `PXL_20220909_154515546.jpg` - 1. Photo's exif data -* Folder import - 1. XMP file - 1. Photo's file name: ex `PXL_20220909_154515546.jpg` - 1. Photo's file path: ex `/photos/2022/11/09/IMG_1234.HEIC` - 1. Photo's exif data +> **Note:** Depending on your OS, you may need to invoke the program differently when Immich-Go is in the current directory: +> - Linux, MacOS, FreeBSD: `./immich-go` +> - Windows: `.\immich-go` -#### GPS location: +### Global Options +The following options are shared by all commands: -* Google Photos takeout - 1. Google Photos JSON field `geoDataExif` - 1. Google Photos JSON field `geoData` - 1. Google Photos album JSON field `locationEnrichment` - 1. Photo's exif data -* Folder import - 1. XMP file - 1. Photo's exif data +| **Parameter** | **Description** | +| -------------- | ---------------------------------------------------- | +| -h, --help | Help for Immich-Go | +| -l, --log-file | Write log messages to a file | +| --log-level | Log level (DEBUG\|INFO\|WARN\|ERROR) (default "INFO") | +| --log-type | Log format (TEXT\|JSON) (default "TEXT") | +| -v, --version | Version for Immich-Go | +# The **upload** command: +The **upload** command loads photos and videos from the source designated by the sub-command to the Immich server. +**Upload** accepts three sub-commands: + * [from-folder](#from-folder-sub-command) to upload photos from a local folder or a zipped archive + * [from-google-photos](#from-google-photos-sub-command) to upload photos from a Google Photos takeout archive + * [from-immich](#from-immich-sub-command) to upload photos from an Immich server to another Immich server +Examples: +```bash +immich-go upload from-folder --server=http://your-ip:2283 --api-key=your-api-key /path/to/your/photos +immich-go upload from-google-photos --server=http://your-ip:2283 --api-key=your-api-key /path/to/your/takeout-*.zip +``` -#### When importing a Google Photos takeout archive: - `immich-go` takes the photo's date from the associated JSON file. -> The server ignores the date provided by immich-go and takes the MP4's date even when it is incorrect. ->
See [#322 Creation timestamp from metadata is wrong](https://github.com/simulot/immich-go/issues/332) +The **upload** command need the following options to manage the connection with the Immich server: -#### When importing photos from a directory: -Immich-go tries to determine the date of capture with the file name, or the file path. +| **Parameter** | **Default value** | **Description** | +| -------------------- | :---------------: | ---------------------------------------------------------------------------------------------------------------------------------- | +| -s, --server | | Immich server address (example http://your-ip:2283 or https://your-domain) (**MANDATORY**) | +| -k, --api-key | | API Key (**MANDATORY**) | +| --api-trace | `FALSE` | Enable trace of api calls | +| --client-timeout | `5m0s` | Set server calls timeout | +| --device-uuid string | `$LOCALHOST` | Set a device UUID | +| --dry-run | | Simulate all server actions | +| --skip-verify-ssl | `FALSE` | Skip SSL verification | +| --time-zone | | Override the system time zone (example: Europe/Paris) | +| --session-tag | `FALSE` | Tag uploaded photos with a tag "{immich-go}/YYYY-MM-DD HH-MM-SS" | +| --tag strings | | Add tags to the imported assets. Can be specified multiple times. Hierarchy is supported using a / separator (e.g. 'tag1/subtag1') | -Ex: -| Path | Photo's capture date | -| --------------------------------------- | -------------------- | -| photos/album/PXL_20220909_154515546.jpg | 2022-09-09 15:51:55 | -| photos/scanned/19991231.jpg | 1999-12-31 00:00:00 | -| photos/20221109/IMG_1234.HEIC | 2022-11-19 00:00:00 | -| photos/2022.11.09T20.30/IMG_1234.HEIC | 2022-11-19 20:30:00 | -| photos/2022/11/09/IMG_1234.HEIC | 2022-11-19 00:00:00 | -If the path can't be used to determine the capture date, immich-go read the file's `metadata` or `exif`. +## **--client-timeout** +Increase the **--client-timeout** when you have some timeout issues with the server, specialy when uploading large files. +## **--session-tag** +Thanks to the **--session-tag** option, it's easy to identify all photos uploaded during a session, and remove them if needed. +This tag is formatted as `{immich-go}/YYYY-MM-DD HH-MM-SS`. The tag can be deleted without removing the photos. +# The **archive** command: -## Command `duplicate` +The **archive** command writes the content taken from the source given by the sub-command to a folder tree. -Use this command for analyzing the content of your `immich` server to find any files that share the same file name, the date of capture, but having different size. -Before deleting the inferior copies, the system gets all albums they belong to, and add the superior copy to them. +The command accepts three sub-commands: + * [from-folder](#from-folder-sub-command) to create a folder archive from a local folder or a zipped archive + * [from-google-photos](#from-google-photos-sub-command) to create a folder archive from a Google Photos takeout archive + * [from-immich](#from-immich-sub-command) to create a folder archive from an Immich server -### Switches and options: -| **Parameter** | **Description** | **Default value** | -| ------------------- | ----------------------------------------------------------- | ----------------------- | -| `-yes` | Assume Yes to all questions | `FALSE` | -| `-date` | Check only assets have a date of capture in the given range | `1850-01-04,2030-01-01` | -| `-ignore-tz-errors` | Ignore timezone difference when searching for duplicates | `FALSE` | -| `-ignore-extension` | Ignore filetype extensions when searching for duplicates | `FALSE` | +All photos and videos are sorted by date of capture, following this schema: `Folder/YYYY/YYYY-MM/photo.jpg`. -### Example Usage: clean the `immich` server after having merged a Google Photos archive and the original files +Here is an example of how your folder structure might look: -This command examines the immich server content, remove less quality images, and preserve albums. +``` +Folder/ +├── 2022/ +│ ├── 2022-01/ +│ │ ├── photo01.jpg +│ │ └── photo01.jpg.JSON +│ ├── 2022-02/ +│ │ ├── photo02.jpg +│ │ └── photo02.jpg.JSON +│ └── ... +├── 2023/ +│ ├── 2023-03/ +│ │ ├── photo03.jpg +│ │ └── photo03.jpg.JSON +│ ├── 2023-04/ +│ │ ├── photo04.jpg +│ │ └── photo04.jpg.JSON +│ └── ... +├── 2024/ +│ ├── 2024-05/ +│ │ ├── photo05.jpg +│ │ └── photo05.jpg.JSON +│ ├── 2024-06/ +│ │ ├── photo06.jpg +│ │ └── photo06.jpg.JSON +│ └── ... +``` + +This structure ensures that photos are neatly organized by year and month within the specified folder, making it easy to locate and manage them. +This folder tree is ready to be archived or migrated to another server. + +The general syntax is: -```sh -./immich-go -server=http://mynas:2283 -key=zzV6k65KGLNB9mpGeri9n8Jk1VaNGHSCdoH1dY8jQ duplicate -yes +```bash +immich-go archive from-sub-command --write-to-folder=folder options ``` -## Command `stack` +# **from-folder** sub command: + +The **from-folder** sub-command processes a folder tree to upload photos to the Immich server. + +| **Parameter** | **Default value** | **Description** | +| ----------------------- | :-----------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| --album-path-joiner | `" / "` | Specify a string to use when joining multiple folder names to create an album name (e.g. ' ',' - ') | +| --ban-file | [See banned files](#banned-file-list) | Exclude a file based on a pattern (case-insensitive). Can be specified multiple times. | +| --date-from-name | `TRUE` | Use the date from the filename if the date isn't available in the metadata (Only for .jpg,mp4,.heic,.dng,cr2,.cr3,). | +| --date-range | | Only import photos taken within the specified date range. [See date range possibilities](#date-range) | +| --exclude-extensions | | Comma-separated list of extension to exclude. (e.g. .gif,.PM) | +| --folder-as-album | `NONE` | Import all files in albums defined by the folder structure. Can be set to 'FOLDER' to use the folder name as the album name, or 'PATH' to use the full path as the album name | +| --folder-as-tags | `FALSE` | Use the folder structure as tags, (ex: the file holiday/summer 2024/file.jpg will have the tag holiday/summer 2024) | +| --ignore-sidecar-files | `FALSE` | Don't upload sidecar with the photo. | +| --include-extensions | all | Comma-separated list of extension to include. (e.g. .jpg,.heic) | +| --into-album | | Specify an album to import all files into | +| --manage-burst | | Manage burst photos. Possible values: Stack, StackKeepRaw tackKeepJPEG | +| --manage-epson-fastfoto | `FALSE` | Manage Epson FastFoto file | +| --manage-heic-jpeg | | Manage coupled HEIC and JPEG files. Possible values: KeepHeic, KeepJPG, StackCoverHeic, StackCoverJPG | +| --manage-raw-jpeg | | Manage coupled RAW and JPEG files. Possible values: KeepRaw, KeepJPG, StackCoverRaw, StackCoverJPG | +| --recursive | `TRUE` | Explore the folder and all its sub-folders | +| --session-tag | | Tag uploaded photos with a tag "{immich-go}/YYYY-MM-DD HH-MM-SS" | +| --tag | | Add tags to the imported assets. Can be specified multiple times. Hierarchy is supported using a / separator (e.g. 'tag1/subtag1') | -The possibility to stack images has been introduced with `immich` version 1.83. -Let use it to group burst and jpg/raw images together. -### Switches and options: -| **Parameter** | **Description** | **Default value** | -| ------------------ | ----------------------------------------------------------- | ----------------------- | -| `-yes` | Assume Yes to all questions | `FALSE` | -| `-date=date_range` | Check only assets have a date of capture in the given range | `1850-01-04,2030-01-01` | +## Date of capture +The Immich server takes the date of capture from the metadata of the photo, or in the XMP sidecar file if present. +However, some photos may not have this information. In this case, Immich-go can infer the date of capture from the filename. -## Command `tool` +The option `--date-from-name` instructs Immich-go to extract the date of capture from the filename if the date isn't available in the metadata. -This command introduces command line tools to manipulate your `immich` server +Immich-go can extract the date of capture without the help of an external tool such as ExifTool. It supports basic formats .heic, .heif, .jpg, +.jpeg, .dng, .cr2, .mp4, .mov, .cr3. -### Sub command `album delete [regexp]` +> note: `--date-from-name` slows down the process because immich-go needs to parse files to check if the capture date is present in the file. -This command deletes albums that match with the given pattern -#### Switches -`-yes` Assume Yes to all questions (default: FALSE).
+# **From-google-photos** sub command: -#### Example -```sh -./immich-go -server=http://mynas:2283 -key=zzV6k65KGLNB9mpGeri9n8Jk1VaNGHSCdoH1dY8jQ tool album delete \d{4}-\d{2}-\d{2} +The **from-google-photos** sub-command processes a Google Photos takeout archive to upload photos to the Immich server. + +| **Parameter** | **Default value** | **Description** | +| ---------------------------- | :-----------------------------------: | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| --ban-file FileList | [See banned files](#banned-file-list) | Exclude a file based on a pattern (case-insensitive). Can be specified multiple times. | +| --date-range | | Only import photos taken within the specified date range [See date range possibilities](#date-range) | +| --exclude-extensions | | Comma-separated list of extension to exclude. (e.g. .gif,.PM) | +| --from-album-name string | | Only import photos from the specified Google Photos album | +| -a, --include-archived | `TRUE` | Import archived Google Photos | +| --include-extensions | `all` | Comma-separated list of extension to include. (e.g. .jpg,.heic) | +| -p, --include-partner | `TRUE` | Import photos from your partner's Google Photos account | +| -t, --include-trashed | `FALSE` | Import photos that are marked as trashed in Google Photos | +| -u, --include-unmatched | `FALSE` | Import photos that do not have a matching JSON file in the takeout | +| --include-untitled-albums | `FALSE` | Include photos from albums without a title in the import process | +| --manage-burst | | Manage burst photos. Possible values: Stack, StackKeepRaw, StackKeepJPEG. [See option's details](#burst-detection-and-management) | +| --manage-epson-fastfoto | `FALSE` | Manage Epson FastFoto file (default: false) | +| --manage-heic-jpeg | | Manage coupled HEIC and JPEG files. Possible values: KeepHeic, KeepJPG, StackCoverHeic, StackCoverJPG [See option's details](#management-of-coupled-heic-and-jpeg-files) | +| --manage-raw-jpeg | | Manage coupled RAW and JPEG files. Possible values: KeepRaw, KeepJPG, StackCoverRaw, StackCoverJPG. [See options's details](#management-of-coupled-raw-and-jpeg-files) | +| --partner-shared-album | | Add partner's photo to the specified album name | +| --session-tag | `FALSE` | Tag uploaded photos with a tag "{immich-go}/YYYY-MM-DD HH-MM-SS" | +| --sync-albums | `TRUE` | Automatically create albums in Immich that match the albums in your Google Photos takeout (default true) | +| --tag strings | | Add tags to the imported assets. Can be specified multiple times. Hierarchy is supported using a / separator (e.g. 'tag1/subtag1') | +| --takeout-tag | `FALSE` | Tag uploaded photos with a tag "{takeout}/takeout-YYYYMMDDTHHMMSSZ" (default true) | + + +## Google Photos Best Practices: + +* **Taking Out Your Photos:** + * Choose the ZIP format when creating your takeout for easier import. + * Select the largest file size available (50GB) to minimize the number of archive parts. + * Download all parts to your computer. + +* **Importing Your Photos:** + * If your takeout is in ZIP format, you can import it directly without needing to unzip the files first. + * It's important to import all parts of the takeout together, as some data might be spread across multiple files. Use `/path/to/your/files/takeout-*.zip` as the file name. + * For **.tgz** files (compressed tar archives), you'll need to decompress all the files into a single folder before importing. + * You can remove any unwanted files or folders from your takeout before importing. + * Restarting an interrupted import won't cause any problems and will resume where it left off. + +* **If Many Files Are Not Imported:** + * Verify if all takeout parts have been included in the processing. Have you used the `takeout-*.zip` file name pattern? + * Sometimes, the takeout result is incomplete. Request another takeout, either for an entire year or in smaller increments. + * Force the import of files despite missing JSON files using the option `--upload-when-missing-JSON`. + +## Takeout Tag + +Immich-Go can tag all imported photos with a takeout tag. The tag is formatted as `{takeout}/takeout-YYYYMMDDTHHMMSSZ`. This tag can be used to identify all photos imported from a Google Photos takeout, making it easy to remove them if needed. + +## Burst Detection and Management + +The system detects burst photos in the following cases: + +| Case | Description | +| ------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| Series of photos | When the time difference between two photos is less than 500 ms | +| Huawei smartphones | Based on file names:
- IMG_20231014_183246_BURST001_COVER.jpg
- IMG_20231014_183246_BURST002.jpg
- IMG_20231014_183246_BURST003.jpg | +| Nexus smartphones | Based on file names:
- 00001IMG_00001_BURST20171111030039.jpg
-...
-00014IMG_00014_BURST20171111030039.jpg
-00015IMG_00015_BURST20171111030039_COVER.jpg | +| Pixel smartphones | Based on file names:
- PXL_20230330_184138390.MOTION-01.COVER.jpg
- PXL_20230330_184138390.MOTION-02.ORIGINAL.jpg | +| Samsung smartphones | Based on file names:
- 20231207_101605_001.jpg
- 20231207_101605_002.jpg
- 20231207_101605_xxx.jpg | +| Sony Xperia | Based on file names:
- DSC_0001_BURST20230709220904977.JPG
- ...
- DSC_0035_BURST20230709220904977_COVER.JPG | + +The option `--manage-burst` instructs Immich-Go on how to manage burst photos. The following options are available: + +| Option | Description | +| --------------- | -------------------------------------------------------------------------------------------------------------------------------------------- | +| `stack` | Stack all burst photos together. When the cover photo can't be identified with the file name, the first photo of the burst is used as the cover. | +| `stackKeepRaw` | Stack all burst photos together. Keep only the RAW photos. | +| `stackKeepJPEG` | Stack all burst photos together. Keep only the JPEG photos. | + +## Management of Coupled HEIC and JPEG Files + +The option `--manage-heic-jpeg` instructs Immich-Go on how to manage coupled HEIC and JPEG files. The following options are available: + +| Option | Description | +| ---------------- | ------------------------------------------------------------------- | +| `KeepHeic` | Keep only the HEIC file. | +| `KeepJPG` | Keep only the JPEG file. | +| `StackCoverHeic` | Stack the HEIC and JPEG files together. The HEIC file is the cover. | +| `StackCoverJPG` | Stack the HEIC and JPEG files together. The JPEG file is the cover. | + +## Management of Coupled RAW and JPEG Files + +The option `--manage-raw-jpeg` instructs Immich-Go on how to manage coupled RAW and JPEG files. The following options are available: + +| Option | Description | +| --------------- | ------------------------------------------------------------------ | +| `KeepRaw` | Keep only the RAW file. | +| `KeepJPG` | Keep only the JPEG file. | +| `StackCoverRaw` | Stack the RAW and JPEG files together. The RAW file is the cover. | +| `StackCoverJPG` | Stack the RAW and JPEG files together. The JPEG file is the cover. | + +## Management of Epson FastFoto Scanned Photos + +This device outputs three files for each scanned photo: the original scan, a "corrected" scan, and the backside of the photo if it has writing on it. The structure looks like this: +- specified-image-name.jpg Original +- specified-image-name_a.jpg Corrected +- specified-image-name_b.jpg Back of Photo + +The option `--manage-epson-fastfoto=TRUE` instructs Immich-Go to stack related photos, with the corrected scan as the cover. + + +# **from-immich** sub-command: + +The sub-command **from-immich** processes an Immich server to upload photos to another Immich server. + +| **Parameter** | **Default value** | **Description** | +| ------------------------------ | :---------------: | ------------------------------------------------------------------------------------ | +| --from-server | | Immich server address (example http://your-ip:2283 or https://your-domain) | +| --from-api-key string | | API Key | +| --from-album | | Get assets only from those albums, can be used multiple times | +| --from-api-trace | `FALSE` | Enable trace of api calls | +| --from-client-timeout duration | `5m0s` | Set server calls timeout (default 5m0s) | +| --from-date-range | | Get assets only within this date range. [See date range possibilities](#date-range) | +| --from-skip-verify-ssl | `FALSE` | Skip SSL verification | + + +# Additional information and best practices + +## **XMP** files process + +**XMP** files found in source folder are passed to Immich server without any modification. Immich uses them to collect photo's date of capture, tags, description and GPS location. + +## Google photos **JSON** files process + +Google photos **JSON** files found in source folders are opened by Immich-go to get the album belonging, the date of capture, the GPS location, the favorite status, the partner status, the archive status and the trashed status. This information is used to trigger Immich features. + +## Folder archive **JSON** files process + +Those files are generated by the **archive** command. Their are used to restore immich features like album, date of capture, GPS location, rating, tags and archive status. + +```json +{ + "fileName": "example.jpg", + "latitude": 37.7749, + "longitude": -122.4194, + "dateTaken": "2023-10-01T12:34:56Z", + "description": "A beautiful view of the Golden Gate Bridge.", + "albums": [ + { + "title": "San Francisco Trip", + "description": "Photos from my trip to San Francisco", + } + ], + "tags": [ + { + "value": "USA/California/San Francisco" + }, + + ], + "rating": 5, + "trashed": false, + "archived": false, + "favorited": true, + "fromPartner": false +} ``` -This command deletes all albums created with de pattern YYYY-MM-DD +## Session tags +Immich-go can tag all imported photos with a session tag. The tag is formatted as `{immich-go}/YYYY-MM-DD HH-MM-SS`. This tag can be used to identify all photos imported during a session. This it easy to remove them if needed. -# Installation -## Installation from the Github release: +## Banned file list +The following files are excluded automatically: +- `@eaDir/` +- `@__thumb/` +- `SYNOFILE_THUMB_*.*` +- `Lightroom Catalog/` +- `thumbnails/` +- `.DS_Store/` +- `._*.*` -Installing `immich-go` is a straightforward process. Visit the [latest release page](https://github.com/simulot/immich-go/releases/latest) and select the binary file compatible with your system: +## Date range -- Darwin arm-64, x86-64 -- Linux arm-64, armv6-64, x86-64 -- Windows arm-64, x86-64 -- Freebsd arm-64, x86-64 +The `--date-range` option allows you to process photos taken within a specific date range. The following date range formats are supported: -Download the archive corresponding to your OS/Architecture on your machine and decompress it. +| **Parameter** | **Description** | +| ----------------------------------- | ---------------------------------------------- | +| `-date-range=YYYY-MM-DD` | import photos taken on a particular day. | +| `-date-range=YYYY-MM` | select photos taken during a particular month. | +| `-date-range=YYYY` | select photos taken during a particular year. | +| `-date-range=YYYY-MM-DD,YYYY-MM-DD` | select photos taken between two dates. | -Open a command windows, go to the directory where immich-go resides, and type the command `immich-go` with mandatory parameters and command. -⚠️ Please note that the linux x86-64 version is the only one tested. +# Examples -## Installation from sources +#### Importing a Google Takeout with Stacking JPEG and RAW -For a source-based installation, ensure you have the necessary Go language development tools (https://go.dev/doc/install) in place. -Download the source files or clone the repository. +To import a Google Photos takeout and stack JPEG and RAW files together, with the RAW file as the cover, use the following command: ```bash -go build -ldflags "-X 'main.version=$(git describe --tag)' -X 'main.date=$(date)'" +immich-go upload from-google-photos --server=http://your-ip:2283 --api-key=your-api-key --manage-raw-jpeg=StackCoverRaw /path/to/your/takeout-*.zip ``` -## Installation with Nix +#### Uploading Photos from a Local Folder -`immich-go` is packaged with [nix](https://nixos.org/) and distributed via [nixpkgs](https://search.nixos.org/packages?channel=unstable&type=packages&query=immich-go). -You can try `immich-go` without installing it with: +To upload photos from a local folder to your Immich server, use the following command: ```bash -nix-shell -I "nixpkgs=https://github.com/NixOS/nixpkgs/archive/nixos-unstable-small.tar.gz" -p immich-go -# Or with flakes enabled -nix run "github:nixos/nixpkgs?ref=nixos-unstable-small#immich-go" -- -help +immich-go upload from-folder --server=http://your-ip:2283 --api-key=your-api-key /path/to/your/photos +``` + +#### Archiving Photos from Immich Server + +To archive photos from your Immich server to a local folder, use the following command: + +```bash +immich-go archive from-immich --server=http://your-ip:2283 --api-key=your-api-key --write-to-folder=/path/to/archive +``` + +#### Transferring Photos Between Immich Servers + +To transfer photos from one Immich server to another, use the following command: + +```bash +immich-go upload from-immich --from-server=http://source-ip:2283 --from-api-key=source-api-key --server=http://destination-ip:2283 --api-key=destination-api-key +``` + +#### Importing Photos with Specific Date Range + +To import photos taken within a specific date range from a local folder, use the following command: + +```bash +immich-go upload from-folder --server=http://your-ip:2283 --api-key=your-api-key --exiftool-enabled --date-range=2022-01-01,2022-12-31 /path/to/your/photos ``` -Or you can add `immich-go` to your `configuration.nix` in the `environment.systemPackages` section. # Acknowledgments @@ -336,12 +566,11 @@ Kudos to the Immich team for their stunning project! 🤩 This program use following 3rd party libraries: - [https://github.com/rivo/tview](https://github.com/rivo/tview) the terminal user interface -- [github.com/rwcarlsen/goexif](github.com/rwcarlsen/goexif) to get date of capture from JPEG files -- [github.com/thlib/go-timezone-local](github.com/thlib/go-timezone-local) for its windows timezone management A big thank you to the project contributors: - [rodneyosodo](https://github.com/rodneyosodo) gitub CI, go linter, and advice - [sigmahour](https://github.com/sigmahour) SSL management - [mrwulf](https://github.com/mrwulf) Partner sharing album - [erkexzcx](https://github.com/erkexzcx) Date determination based on file path and file name +- [benjamonnguyen](https://github.com/benjamonnguyen) Tag API calls diff --git a/ui/banner.go b/ui/banner.go deleted file mode 100644 index 70088ee2..00000000 --- a/ui/banner.go +++ /dev/null @@ -1,50 +0,0 @@ -package ui - -import ( - "fmt" - "strings" -) - -type Banner struct { - b []string - version, commit, date string -} - -// Banner Ascii art -// Generator : http://patorjk.com/software/taag-v1/ -// Font: Three point - -func NewBanner(version, commit, date string) Banner { - return Banner{ - b: []string{ - ". _ _ _ _ . _|_ _ _ ", - "|| | || | ||(_| | ─ (_|(_)", - " _) ", - }, - version: version, - commit: commit, - date: date, - } -} - -// String generate a string with new lines and place the given text on the latest line -func (b Banner) String() string { - const lenVersion = 20 - var text string - if b.version != "" { - text = fmt.Sprintf("v %s", b.version) - } - sb := strings.Builder{} - for i := range b.b { - if i == len(b.b)-1 && text != "" { - if len(text) >= lenVersion { - text = text[:lenVersion] - } - sb.WriteString(b.b[i][:lenVersion-len(text)] + text + b.b[i][lenVersion:]) - } else { - sb.WriteString(b.b[i]) - } - sb.WriteRune('\n') - } - return sb.String() -}