summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.cirrus.yml8
-rw-r--r--.clang-format27
-rw-r--r--.github/workflows/main.yml2
-rw-r--r--.gitlab-ci.yml2
-rw-r--r--Documentation/BreakingChanges.adoc55
-rw-r--r--Documentation/CodingGuidelines17
-rw-r--r--Documentation/Makefile7
-rw-r--r--Documentation/MyFirstObjectWalk.adoc39
-rw-r--r--Documentation/RelNotes/2.43.7.adoc73
-rw-r--r--Documentation/RelNotes/2.44.4.adoc7
-rw-r--r--Documentation/RelNotes/2.45.4.adoc7
-rw-r--r--Documentation/RelNotes/2.46.4.adoc7
-rw-r--r--Documentation/RelNotes/2.47.3.adoc8
-rw-r--r--Documentation/RelNotes/2.48.2.adoc8
-rw-r--r--Documentation/RelNotes/2.49.1.adoc12
-rw-r--r--Documentation/RelNotes/2.50.1.adoc8
-rw-r--r--Documentation/RelNotes/2.51.0.adoc262
-rw-r--r--Documentation/SubmittingPatches11
-rw-r--r--Documentation/asciidoc.conf.in2
-rw-r--r--Documentation/asciidoctor-extensions.rb.in4
-rw-r--r--Documentation/config/branch.adoc4
-rw-r--r--Documentation/config/feature.adoc10
-rw-r--r--Documentation/config/format.adoc6
-rw-r--r--Documentation/config/gpg.adoc2
-rw-r--r--Documentation/config/imap.adoc11
-rw-r--r--Documentation/config/log.adoc56
-rw-r--r--Documentation/config/merge.adoc14
-rw-r--r--Documentation/config/pack.adoc4
-rw-r--r--Documentation/config/repack.adoc7
-rw-r--r--Documentation/config/sendemail.adoc29
-rw-r--r--Documentation/diff-generate-patch.adoc2
-rw-r--r--Documentation/diff-options.adoc40
-rw-r--r--Documentation/git-apply.adoc9
-rw-r--r--Documentation/git-cat-file.adoc13
-rw-r--r--Documentation/git-config.adoc28
-rw-r--r--Documentation/git-diff.adoc10
-rw-r--r--Documentation/git-fast-export.adoc17
-rw-r--r--Documentation/git-fast-import.adoc38
-rw-r--r--Documentation/git-imap-send.adoc68
-rw-r--r--Documentation/git-log.adoc86
-rw-r--r--Documentation/git-merge.adoc12
-rw-r--r--Documentation/git-pack-objects.adoc35
-rw-r--r--Documentation/git-pack-refs.adoc5
-rw-r--r--Documentation/git-repack.adoc5
-rw-r--r--Documentation/git-send-email.adoc311
-rw-r--r--Documentation/git-stash.adoc29
-rw-r--r--Documentation/git-whatchanged.adoc10
-rw-r--r--Documentation/gitcredentials.adoc4
-rw-r--r--Documentation/gitprotocol-v2.adoc4
-rw-r--r--Documentation/gitremote-helpers.adoc2
-rw-r--r--Documentation/glossary-content.adoc5
-rw-r--r--Documentation/line-range-format.adoc26
-rw-r--r--Documentation/line-range-options.adoc10
-rw-r--r--Documentation/merge-options.adoc3
-rw-r--r--Documentation/meson.build7
-rw-r--r--Documentation/pretty-formats.adoc283
-rw-r--r--Documentation/pretty-options.adoc76
-rw-r--r--Documentation/rev-list-description.adoc6
-rw-r--r--Documentation/rev-list-options.adoc399
-rw-r--r--Documentation/technical/api-path-walk.adoc9
-rw-r--r--Documentation/technical/build-systems.adoc5
-rw-r--r--Documentation/technical/sparse-checkout.adoc2
-rw-r--r--Documentation/user-manual.adoc6
-rwxr-xr-xGIT-VERSION-GEN2
-rw-r--r--Makefile15
l---------RelNotes2
-rw-r--r--apply.c18
-rw-r--r--archive-tar.c2
-rw-r--r--archive-zip.c2
-rw-r--r--archive.c6
-rw-r--r--attr.c4
-rw-r--r--bisect.c8
-rw-r--r--bisect.h8
-rw-r--r--blame.c24
-rw-r--r--bloom.c84
-rw-r--r--bloom.h54
-rw-r--r--branch.c4
-rw-r--r--builtin/am.c5
-rw-r--r--builtin/apply.c2
-rw-r--r--builtin/backfill.c6
-rw-r--r--builtin/blame.c6
-rw-r--r--builtin/cat-file.c76
-rw-r--r--builtin/checkout.c6
-rw-r--r--builtin/clone.c14
-rw-r--r--builtin/commit-graph.c21
-rw-r--r--builtin/commit-tree.c4
-rw-r--r--builtin/commit.c6
-rw-r--r--builtin/config.c12
-rw-r--r--builtin/count-objects.c6
-rw-r--r--builtin/describe.c5
-rw-r--r--builtin/diff.c4
-rw-r--r--builtin/difftool.c4
-rw-r--r--builtin/fast-export.c72
-rw-r--r--builtin/fast-import.c162
-rw-r--r--builtin/fetch-pack.c7
-rw-r--r--builtin/fetch.c169
-rw-r--r--builtin/fsck.c31
-rw-r--r--builtin/gc.c442
-rw-r--r--builtin/grep.c26
-rw-r--r--builtin/hash-object.c4
-rw-r--r--builtin/index-pack.c31
-rw-r--r--builtin/log.c152
-rw-r--r--builtin/ls-files.c4
-rw-r--r--builtin/ls-remote.c2
-rw-r--r--builtin/ls-tree.c6
-rw-r--r--builtin/merge-file.c2
-rw-r--r--builtin/merge-tree.c14
-rw-r--r--builtin/merge.c66
-rw-r--r--builtin/mktag.c8
-rw-r--r--builtin/mktree.c10
-rw-r--r--builtin/multi-pack-index.c6
-rw-r--r--builtin/notes.c8
-rw-r--r--builtin/pack-objects.c684
-rw-r--r--builtin/pack-redundant.c12
-rw-r--r--builtin/patch-id.c2
-rw-r--r--builtin/prune.c30
-rw-r--r--builtin/pull.c5
-rw-r--r--builtin/rebase.c1
-rw-r--r--builtin/receive-pack.c109
-rw-r--r--builtin/reflog.c3
-rw-r--r--builtin/remote.c28
-rw-r--r--builtin/repack.c203
-rw-r--r--builtin/replace.c12
-rw-r--r--builtin/replay.c1
-rw-r--r--builtin/rev-list.c38
-rw-r--r--builtin/send-pack.c9
-rw-r--r--builtin/shortlog.c5
-rw-r--r--builtin/show-index.c2
-rw-r--r--builtin/show-ref.c6
-rw-r--r--builtin/stash.c480
-rw-r--r--builtin/submodule--helper.c133
-rw-r--r--builtin/tag.c10
-rw-r--r--builtin/unpack-file.c4
-rw-r--r--builtin/unpack-objects.c12
-rw-r--r--builtin/update-index.c6
-rw-r--r--builtin/update-ref.c25
-rw-r--r--builtin/worktree.c2
-rw-r--r--builtin/write-tree.c1
-rw-r--r--bulk-checkin.c6
-rw-r--r--bundle-uri.c29
-rw-r--r--bundle.c10
-rw-r--r--cache-tree.c17
-rwxr-xr-xci/print-test-failures.sh2
-rwxr-xr-xci/run-style-check.sh18
-rw-r--r--combine-diff.c4
-rw-r--r--commit-graph.c106
-rw-r--r--commit-graph.h20
-rw-r--r--commit.c29
-rw-r--r--commit.h8
-rw-r--r--compat/bswap.h110
-rw-r--r--compat/mingw-posix.h1
-rw-r--r--compat/mingw.c4
-rw-r--r--config.c45
-rw-r--r--config.h2
-rw-r--r--config.mak.uname37
-rw-r--r--configure.ac69
-rw-r--r--connect.c6
-rw-r--r--connected.c2
-rw-r--r--contrib/coccinelle/commit.cocci3
-rw-r--r--contrib/coccinelle/the_repository.cocci2
-rwxr-xr-xcontrib/credential/netrc/git-credential-netrc.perl14
-rw-r--r--contrib/credential/netrc/meson.build2
-rwxr-xr-xcontrib/credential/netrc/test.pl8
-rw-r--r--contrib/credential/wincred/git-credential-wincred.c22
-rw-r--r--contrib/emacs/README33
-rw-r--r--contrib/emacs/git-blame.el6
-rw-r--r--contrib/emacs/git.el6
-rw-r--r--contrib/examples/README20
-rwxr-xr-xcontrib/git-resurrect.sh181
-rw-r--r--contrib/hooks/multimail/README.Git7
-rwxr-xr-xcontrib/hooks/post-receive-email759
-rwxr-xr-xcontrib/hooks/pre-auto-gc-battery42
-rwxr-xr-xcontrib/hooks/setgitperms.perl214
-rwxr-xr-xcontrib/hooks/update-paranoid421
-rw-r--r--contrib/mw-to-git/.gitignore2
-rw-r--r--contrib/mw-to-git/.perlcriticrc28
-rw-r--r--contrib/mw-to-git/Git/Mediawiki.pm101
-rw-r--r--contrib/mw-to-git/Makefile61
-rwxr-xr-xcontrib/mw-to-git/bin-wrapper/git14
-rwxr-xr-xcontrib/mw-to-git/git-mw.perl368
-rwxr-xr-xcontrib/mw-to-git/git-remote-mediawiki.perl1390
-rw-r--r--contrib/mw-to-git/git-remote-mediawiki.txt7
-rw-r--r--contrib/mw-to-git/t/.gitignore4
-rw-r--r--contrib/mw-to-git/t/Makefile32
-rw-r--r--contrib/mw-to-git/t/README124
-rwxr-xr-xcontrib/mw-to-git/t/install-wiki.sh55
-rw-r--r--contrib/mw-to-git/t/push-pull-tests.sh144
-rwxr-xr-xcontrib/mw-to-git/t/t9360-mw-to-git-clone.sh257
-rwxr-xr-xcontrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh24
-rwxr-xr-xcontrib/mw-to-git/t/t9362-mw-to-git-utf8.sh347
-rwxr-xr-xcontrib/mw-to-git/t/t9363-mw-to-git-export-import.sh218
-rwxr-xr-xcontrib/mw-to-git/t/t9364-pull-by-rev.sh17
-rwxr-xr-xcontrib/mw-to-git/t/t9365-continuing-queries.sh23
-rwxr-xr-xcontrib/mw-to-git/t/test-gitmw-lib.sh432
-rwxr-xr-xcontrib/mw-to-git/t/test-gitmw.pl223
-rw-r--r--contrib/mw-to-git/t/test.config40
-rw-r--r--contrib/persistent-https/LICENSE202
-rw-r--r--contrib/persistent-https/Makefile43
-rw-r--r--contrib/persistent-https/README72
-rw-r--r--contrib/persistent-https/client.go189
-rw-r--r--contrib/persistent-https/main.go82
-rw-r--r--contrib/persistent-https/proxy.go190
-rw-r--r--contrib/persistent-https/socket.go97
-rw-r--r--contrib/remote-helpers/README15
-rwxr-xr-xcontrib/remote-helpers/git-remote-bzr11
-rwxr-xr-xcontrib/remote-helpers/git-remote-hg11
-rwxr-xr-xcontrib/remotes2config.sh33
-rwxr-xr-xcontrib/stats/git-common-hash26
-rwxr-xr-xcontrib/stats/mailmap.pl70
-rw-r--r--contrib/subtree/README2
-rw-r--r--contrib/subtree/git-subtree.adoc19
-rwxr-xr-xcontrib/subtree/git-subtree.sh66
-rw-r--r--contrib/subtree/meson.build2
-rwxr-xr-xcontrib/subtree/t/t7900-subtree.sh113
-rw-r--r--contrib/thunderbird-patch-inline/README20
-rwxr-xr-xcontrib/thunderbird-patch-inline/appp.sh55
-rw-r--r--contrib/workdir/.gitattributes1
-rwxr-xr-xcontrib/workdir/git-new-workdir105
-rw-r--r--daemon.c29
-rw-r--r--diagnose.c12
-rw-r--r--diff-no-index.c95
-rw-r--r--diff.c20
-rw-r--r--dir.c21
-rw-r--r--dir.h23
-rw-r--r--entry.c6
-rw-r--r--environment.c4
-rw-r--r--environment.h3
-rw-r--r--fetch-pack.c32
-rw-r--r--fmt-merge-msg.c6
-rw-r--r--fsck.c4
-rw-r--r--git-compat-util.h14
-rwxr-xr-xgit-gui/git-gui.sh877
-rw-r--r--git-gui/lib/about.tcl10
-rw-r--r--git-gui/lib/blame.tcl49
-rw-r--r--git-gui/lib/branch.tcl6
-rw-r--r--git-gui/lib/branch_checkout.tcl15
-rw-r--r--git-gui/lib/branch_create.tcl36
-rw-r--r--git-gui/lib/branch_delete.tcl12
-rw-r--r--git-gui/lib/branch_rename.tcl26
-rw-r--r--git-gui/lib/browser.tcl25
-rw-r--r--git-gui/lib/checkout_op.tcl29
-rw-r--r--git-gui/lib/choose_font.tcl25
-rw-r--r--git-gui/lib/choose_repository.tcl565
-rw-r--r--git-gui/lib/choose_rev.tcl52
-rw-r--r--git-gui/lib/class.tcl1
-rw-r--r--git-gui/lib/commit.tcl23
-rw-r--r--git-gui/lib/console.tcl20
-rw-r--r--git-gui/lib/database.tcl15
-rw-r--r--git-gui/lib/diff.tcl39
-rw-r--r--git-gui/lib/error.tcl11
-rw-r--r--git-gui/lib/index.tcl16
-rw-r--r--git-gui/lib/line.tcl7
-rw-r--r--git-gui/lib/merge.tcl27
-rw-r--r--git-gui/lib/mergetool.tcl8
-rw-r--r--git-gui/lib/option.tcl72
-rw-r--r--git-gui/lib/remote.tcl10
-rw-r--r--git-gui/lib/remote_add.tcl28
-rw-r--r--git-gui/lib/remote_branch_delete.tcl46
-rw-r--r--git-gui/lib/search.tcl13
-rw-r--r--git-gui/lib/shortcut.tcl14
-rw-r--r--git-gui/lib/sshkey.tcl28
-rw-r--r--git-gui/lib/status_bar.tcl13
-rw-r--r--git-gui/lib/themed.tcl110
-rw-r--r--git-gui/lib/tools.tcl7
-rw-r--r--git-gui/lib/tools_dlg.tcl66
-rw-r--r--git-gui/lib/transport.tcl40
-rw-r--r--git-gui/lib/win32.tcl9
-rw-r--r--git-gui/po/bg.po3118
-rwxr-xr-xgit-send-email.perl24
-rw-r--r--git.c6
-rwxr-xr-xgitk-git/gitk1405
-rw-r--r--gpg-interface.c16
-rw-r--r--gpg-interface.h12
-rw-r--r--grep.c6
-rw-r--r--hash.h11
-rw-r--r--help.c3
-rw-r--r--http-backend.c2
-rw-r--r--http-push.c20
-rw-r--r--http-walker.c12
-rw-r--r--http.c6
-rw-r--r--ident.c4
-rw-r--r--imap-send.c410
-rw-r--r--line-log.c5
-rw-r--r--list-objects-filter.c4
-rw-r--r--list-objects.c6
-rw-r--r--log-tree.c2
-rw-r--r--loose.c46
-rw-r--r--mailinfo.c2
-rw-r--r--mailmap.c4
-rw-r--r--match-trees.c6
-rw-r--r--merge-blobs.c10
-rw-r--r--merge-ort.c8
-rw-r--r--mergetools/vimdiff4
-rw-r--r--meson.build116
-rw-r--r--midx-write.c2
-rw-r--r--midx.c6
-rw-r--r--notes-cache.c4
-rw-r--r--notes-merge.c4
-rw-r--r--notes.c19
-rw-r--r--object-file.c94
-rw-r--r--object-file.h12
-rw-r--r--object-name.c40
-rw-r--r--object-store.h338
-rw-r--r--object.c8
-rw-r--r--odb.c (renamed from object-store.c)413
-rw-r--r--odb.h473
-rw-r--r--oss-fuzz/fuzz-pack-idx.c2
-rw-r--r--pack-bitmap-write.c11
-rw-r--r--pack-bitmap.c102
-rw-r--r--pack-bitmap.h1
-rw-r--r--pack-check.c2
-rw-r--r--pack-mtimes.c2
-rw-r--r--pack-objects.h14
-rw-r--r--pack-revindex.c2
-rw-r--r--pack-write.c10
-rw-r--r--packfile.c29
-rw-r--r--packfile.h8
-rw-r--r--parse-options.c155
-rw-r--r--parse-options.h7
-rw-r--r--path-walk.c6
-rw-r--r--path-walk.h7
-rw-r--r--path.c4
-rw-r--r--pathspec.c6
-rw-r--r--pathspec.h11
-rw-r--r--perl/Git.pm13
-rw-r--r--pkt-line.c2
-rw-r--r--po/meson.build1
-rw-r--r--preload-index.c7
-rw-r--r--prio-queue.c45
-rw-r--r--prio-queue.h8
-rw-r--r--promisor-remote.c6
-rw-r--r--protocol-caps.c4
-rw-r--r--reachable.c2
-rw-r--r--read-cache.c17
-rw-r--r--ref-filter.c6
-rw-r--r--reflog.c22
-rw-r--r--reflog.h2
-rw-r--r--refs.c61
-rw-r--r--refs.h10
-rw-r--r--refs/files-backend.c9
-rw-r--r--remote-curl.c2
-rw-r--r--remote.c150
-rw-r--r--remote.h8
-rw-r--r--replace-object.c2
-rw-r--r--replace-object.h2
-rw-r--r--repo-settings.c3
-rw-r--r--repo-settings.h1
-rw-r--r--repository.c22
-rw-r--r--repository.h11
-rw-r--r--rerere.c7
-rw-r--r--revision.c147
-rw-r--r--revision.h14
-rw-r--r--sane-ctype.h9
-rw-r--r--scalar.c1
-rw-r--r--send-pack.c11
-rw-r--r--sequencer.c13
-rw-r--r--serve.c2
-rw-r--r--server-info.c2
-rw-r--r--setup.c28
-rw-r--r--setup.h2
-rw-r--r--shallow.c14
-rw-r--r--strbuf.c28
-rw-r--r--strbuf.h12
-rw-r--r--streaming.c10
-rw-r--r--string-list.c48
-rw-r--r--sub-process.h2
-rw-r--r--submodule-config.c21
-rw-r--r--submodule.c32
-rw-r--r--submodule.h9
-rw-r--r--subprojects/expat.wrap18
-rw-r--r--subprojects/pcre2.wrap18
-rw-r--r--t/README4
-rw-r--r--t/helper/test-bitmap.c8
-rw-r--r--t/helper/test-bloom.c8
-rw-r--r--t/helper/test-find-pack.c2
-rw-r--r--t/helper/test-pack-mtimes.c2
-rw-r--r--t/helper/test-parse-options.c17
-rw-r--r--t/helper/test-partial-clone.c4
-rw-r--r--t/helper/test-path-walk.c2
-rw-r--r--t/helper/test-read-graph.c8
-rw-r--r--t/helper/test-read-midx.c2
-rw-r--r--t/helper/test-ref-store.c4
-rw-r--r--t/helper/test-string-list.c96
-rw-r--r--t/meson.build10
-rwxr-xr-xt/perf/p1501-rev-parse-oneline.sh71
-rwxr-xr-xt/perf/p5313-pack-objects.sh37
-rwxr-xr-xt/t0000-basic.sh39
-rwxr-xr-xt/t0001-init.sh45
-rwxr-xr-xt/t0021-conversion.sh4
-rwxr-xr-xt/t0050-filesystem.sh30
-rwxr-xr-xt/t0063-string-list.sh142
-rwxr-xr-xt/t0411-clone-from-partial.sh6
-rw-r--r--t/t0450/adoc-help-mismatches1
-rwxr-xr-xt/t0610-reftable-basics.sh6
-rwxr-xr-xt/t0612-reftable-jgit-compatibility.sh13
-rwxr-xr-xt/t0613-reftable-write-options.sh24
-rwxr-xr-xt/t1006-cat-file.sh111
-rwxr-xr-xt/t1007-hash-object.sh6
-rwxr-xr-xt/t1021-rerere-in-workdir.sh58
-rwxr-xr-xt/t1300-config.sh11
-rwxr-xr-xt/t1400-update-ref.sh74
-rwxr-xr-xt/t1410-reflog.sh28
-rwxr-xr-xt/t1416-ref-transaction-hooks.sh2
-rwxr-xr-xt/t1517-outside-repo.sh7
-rwxr-xr-xt/t2400-worktree-add.sh20
-rwxr-xr-xt/t3000-ls-files-others.sh19
-rwxr-xr-xt/t3418-rebase-continue.sh13
-rwxr-xr-xt/t3600-rm.sh5
-rwxr-xr-xt/t3903-stash.sh173
-rwxr-xr-xt/t4000-diff-format.sh2
-rwxr-xr-xt/t4013-diff-various.sh27
-rw-r--r--t/t4018/r-indent6
-rw-r--r--t/t4018/r-indent-nested10
-rw-r--r--t/t4018/r-noindent6
-rwxr-xr-xt/t4041-diff-submodule-option.sh22
-rwxr-xr-xt/t4042-diff-textconv-caching.sh12
-rwxr-xr-xt/t4053-diff-no-index.sh75
-rwxr-xr-xt/t4060-diff-submodule-option-diff-format.sh9
-rwxr-xr-xt/t4140-apply-ita.sh31
-rwxr-xr-xt/t4150-am.sh2
-rwxr-xr-xt/t4202-log.sh40
-rwxr-xr-xt/t4203-mailmap.sh33
-rwxr-xr-xt/t4216-log-bloom.sh23
-rwxr-xr-xt/t5004-archive-corner-cases.sh5
-rwxr-xr-xt/t5300-pack-object.sh25
-rwxr-xr-xt/t5306-pack-nobase.sh5
-rwxr-xr-xt/t5310-pack-bitmaps.sh43
-rwxr-xr-xt/t5316-pack-delta-depth.sh9
-rwxr-xr-xt/t5323-pack-redundant.sh5
-rwxr-xr-xt/t5331-pack-objects-stdin.sh122
-rwxr-xr-xt/t5332-multi-pack-reuse.sh7
-rwxr-xr-xt/t5333-pseudo-merge-bitmaps.sh21
-rwxr-xr-xt/t5408-send-pack-stdin.sh15
-rwxr-xr-xt/t5505-remote.sh14
-rwxr-xr-xt/t5516-fetch-push.sh27
-rwxr-xr-xt/t5538-push-shallow.sh41
-rwxr-xr-xt/t5558-clone-bundle-uri.sh23
-rwxr-xr-xt/t6422-merge-rename-corner-cases.sh10
-rwxr-xr-xt/t6601-path-walk.sh20
-rwxr-xr-xt/t7007-show.sh24
-rwxr-xr-xt/t7401-submodule-summary.sh18
-rwxr-xr-xt/t7406-submodule-update.sh64
-rwxr-xr-xt/t7422-submodule-output.sh9
-rwxr-xr-xt/t7450-bad-git-dotfiles.sh33
-rwxr-xr-xt/t7528-signed-commit-ssh.sh32
-rwxr-xr-xt/t7600-merge.sh74
-rwxr-xr-xt/t7704-repack-cruft.sh145
-rwxr-xr-xt/t7815-grep-binary.sh2
-rwxr-xr-xt/t7900-maintenance.sh19
-rwxr-xr-xt/t9001-send-email.sh7
-rwxr-xr-xt/t9300-fast-import.sh12
-rwxr-xr-xt/t9301-fast-import-notes.sh2
-rwxr-xr-xt/t9350-fast-export.sh102
-rwxr-xr-xt/t9500-gitweb-standalone-no-errors.sh16
-rwxr-xr-xt/t9822-git-p4-path-encoding.sh13
-rwxr-xr-xt/t9835-git-p4-metadata-encoding-python2.sh24
-rwxr-xr-xt/t9836-git-p4-metadata-encoding-python3.sh24
-rwxr-xr-xt/t9903-bash-prompt.sh4
-rw-r--r--t/test-lib-functions.sh21
-rw-r--r--t/test-lib.sh31
-rw-r--r--t/unit-tests/u-prio-queue.c23
-rw-r--r--t/unit-tests/u-string-list.c227
-rw-r--r--tag.c10
-rw-r--r--tmp-objdir.c30
-rw-r--r--transport.c2
-rw-r--r--tree-walk.c18
-rw-r--r--tree.c6
-rw-r--r--unpack-trees.c2
-rw-r--r--upload-pack.c4
-rw-r--r--usage.c14
-rw-r--r--userdiff.c4
-rw-r--r--walker.c17
-rw-r--r--xdiff-interface.c4
473 files changed, 11848 insertions, 14863 deletions
diff --git a/.cirrus.yml b/.cirrus.yml
index 1fbdc2652b..fef04a3840 100644
--- a/.cirrus.yml
+++ b/.cirrus.yml
@@ -5,11 +5,13 @@ freebsd_task:
env:
GIT_PROVE_OPTS: "--timer --jobs 10"
GIT_TEST_OPTS: "--no-chain-lint --no-bin-wrappers"
- MAKEFLAGS: "-j4"
+ GIT_SKIP_TESTS: t7815.12
+ MAKEFLAGS: -j4
DEFAULT_TEST_TARGET: prove
+ DEFAULT_UNIT_TEST_TARGET: unit-tests-prove
DEVELOPER: 1
freebsd_instance:
- image_family: freebsd-13-4
+ image_family: freebsd-14-3
memory: 2G
install_script:
pkg install -y gettext gmake perl5
@@ -19,4 +21,4 @@ freebsd_task:
build_script:
- su git -c gmake
test_script:
- - su git -c 'gmake DEFAULT_UNIT_TEST_TARGET=unit-tests-prove test unit-tests'
+ - su git -c 'gmake test unit-tests'
diff --git a/.clang-format b/.clang-format
index 9547fe1b77..dcfd0aad60 100644
--- a/.clang-format
+++ b/.clang-format
@@ -12,7 +12,15 @@ UseTab: Always
TabWidth: 8
IndentWidth: 8
ContinuationIndentWidth: 8
-ColumnLimit: 80
+
+# While we do want to enforce a character limit of 80 characters, we often
+# allow lines to overflow that limit to prioritize readability. Setting a
+# character limit here with penalties has been finicky and creates too many
+# false positives.
+#
+# NEEDSWORK: It would be nice if we can find optimal settings to ensure we
+# can re-enable the limit here.
+ColumnLimit: 0
# C Language specifics
Language: Cpp
@@ -210,16 +218,11 @@ MaxEmptyLinesToKeep: 1
# No empty line at the start of a block.
KeepEmptyLinesAtTheStartOfBlocks: false
-# Penalties
-# This decides what order things should be done if a line is too long
-PenaltyBreakAssignment: 5
-PenaltyBreakBeforeFirstCallParameter: 5
-PenaltyBreakComment: 5
-PenaltyBreakFirstLessLess: 0
-PenaltyBreakOpenParenthesis: 300
-PenaltyBreakString: 5
-PenaltyExcessCharacter: 10
-PenaltyReturnTypeOnItsOwnLine: 300
-
# Don't sort #include's
SortIncludes: false
+
+# Remove optional braces of control statements (if, else, for, and while)
+# according to the LLVM coding style. This avoids braces on simple
+# single-statement bodies of statements but keeps braces if one side of
+# if/else if/.../else cascade has multi-statement body.
+RemoveBracesLLVM: true
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 7dbf9f7f12..d122e79415 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -298,7 +298,7 @@ jobs:
path: build
- name: Test
shell: pwsh
- run: meson test -C build --list | Select-Object -Skip 1 | Select-String .* | Group-Object -Property { $_.LineNumber % 10 } | Where-Object Name -EQ ${{ matrix.nr }} | ForEach-Object { meson test -C build --no-rebuild --print-errorlogs $_.Group }
+ run: meson test -C build --no-rebuild --print-errorlogs --slice "$(1+${{ matrix.nr }})/10"
regular:
name: ${{matrix.vector.jobname}} (${{matrix.vector.pool}})
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index bb6d5b976c..af10ebb59a 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -178,7 +178,7 @@ test:msvc-meson:
- job: "build:msvc-meson"
artifacts: true
script:
- - meson test -C build --list | Select-Object -Skip 1 | Select-String .* | Group-Object -Property { $_.LineNumber % $Env:CI_NODE_TOTAL + 1 } | Where-Object Name -EQ $Env:CI_NODE_INDEX | ForEach-Object { meson test -C build --no-rebuild --print-errorlogs $_.Group; if (!$?) { exit $LASTEXITCODE } }
+ - meson test -C build --no-rebuild --print-errorlogs --slice $Env:CI_NODE_INDEX/$Env:CI_NODE_TOTAL
parallel: 10
test:fuzz-smoke-tests:
diff --git a/Documentation/BreakingChanges.adoc b/Documentation/BreakingChanges.adoc
index 61bdd586b9..f8d2eba061 100644
--- a/Documentation/BreakingChanges.adoc
+++ b/Documentation/BreakingChanges.adoc
@@ -118,6 +118,53 @@ Cf. <2f5de416-04ba-c23d-1e0b-83bb655829a7@zombino.com>,
<20170223155046.e7nxivfwqqoprsqj@LykOS.localdomain>,
<CA+EOSBncr=4a4d8n9xS4FNehyebpmX8JiUwCsXD47EQDE+DiUQ@mail.gmail.com>.
+* The default storage format for references in newly created repositories will
+ be changed from "files" to "reftable". The "reftable" format provides
+ multiple advantages over the "files" format:
++
+ ** It is impossible to store two references that only differ in casing on
+ case-insensitive filesystems with the "files" format. This issue is common
+ on Windows and macOS platforms. As the "reftable" backend does not use
+ filesystem paths to encode reference names this problem goes away.
+ ** Similarly, macOS normalizes path names that contain unicode characters,
+ which has the consequence that you cannot store two names with unicode
+ characters that are encoded differently with the "files" backend. Again,
+ this is not an issue with the "reftable" backend.
+ ** Deleting references with the "files" backend requires Git to rewrite the
+ complete "packed-refs" file. In large repositories with many references
+ this file can easily be dozens of megabytes in size, in extreme cases it
+ may be gigabytes. The "reftable" backend uses tombstone markers for
+ deleted references and thus does not have to rewrite all of its data.
+ ** Repository housekeeping with the "files" backend typically performs
+ all-into-one repacks of references. This can be quite expensive, and
+ consequently housekeeping is a tradeoff between the number of loose
+ references that accumulate and slow down operations that read references,
+ and compressing those loose references into the "packed-refs" file. The
+ "reftable" backend uses geometric compaction after every write, which
+ amortizes costs and ensures that the backend is always in a
+ well-maintained state.
+ ** Operations that write multiple references at once are not atomic with the
+ "files" backend. Consequently, Git may see in-between states when it reads
+ references while a reference transaction is in the process of being
+ committed to disk.
+ ** Writing many references at once is slow with the "files" backend because
+ every reference is created as a separate file. The "reftable" backend
+ significantly outperforms the "files" backend by multiple orders of
+ magnitude.
+ ** The reftable backend uses a binary format with prefix compression for
+ reference names. As a result, the format uses less space compared to the
+ "packed-refs" file.
++
+Users that get immediate benefit from the "reftable" backend could continue to
+opt-in to the "reftable" format manually by setting the "init.defaultRefFormat"
+config. But defaults matter, and we think that overall users will have a better
+experience with less platform-specific quirks when they use the new backend by
+default.
++
+A prerequisite for this change is that the ecosystem is ready to support the
+"reftable" format. Most importantly, alternative implementations of Git like
+JGit, libgit2 and Gitoxide need to support it.
+
=== Removals
* Support for grafting commits has long been superseded by git-replace(1).
@@ -183,6 +230,14 @@ These features will be removed.
timeframe, in preference to its synonym "--annotate-stdin". Git 3.0
removes the support for "--stdin" altogether.
+* The git-whatchanged(1) command has outlived its usefulness more than
+ 10 years ago, and takes more keystrokes to type than its rough
+ equivalent `git log --raw`. We have nominated the command for
+ removal, have changed the command to refuse to work unless the
+ `--i-still-use-this` option is given, and asked the users to report
+ when they do so. So far there hasn't been a single complaint.
++
+The command will be removed.
== Superseded features that will not be deprecated
diff --git a/Documentation/CodingGuidelines b/Documentation/CodingGuidelines
index c1046abfb7..528b42d1dd 100644
--- a/Documentation/CodingGuidelines
+++ b/Documentation/CodingGuidelines
@@ -298,6 +298,9 @@ For C programs:
. since late 2021 with 44ba10d6, we have had variables declared in
the for loop "for (int i = 0; i < 10; i++)".
+ . since late 2023 with 8277dbe987 we have been using the bool type
+ from <stdbool.h>.
+
New C99 features that we cannot use yet:
. %z and %zu as a printf() argument for a size_t (the %z being for
@@ -315,6 +318,9 @@ For C programs:
encouraged to have a blank line between the end of the declarations
and the first statement in the block.
+ - Do not explicitly initialize global variables to 0 or NULL;
+ instead, let BSS take care of the zero initialization.
+
- NULL pointers shall be written as NULL, not as 0.
- When declaring pointers, the star sides with the variable
@@ -877,6 +883,17 @@ Characters are also surrounded by underscores:
As a side effect, backquoted placeholders are correctly typeset, but
this style is not recommended.
+ When documenting multiple related `git config` variables, place them on
+ a separate line instead of separating them by commas. For example, do
+ not write this:
+ `core.var1`, `core.var2`::
+ Description common to `core.var1` and `core.var2`.
+
+Instead write this:
+ `core.var1`::
+ `core.var2`::
+ Description common to `core.var1` and `core.var2`.
+
Synopsis Syntax
The synopsis (a paragraph with [synopsis] attribute) is automatically
diff --git a/Documentation/Makefile b/Documentation/Makefile
index b109d25e9c..df2ce187eb 100644
--- a/Documentation/Makefile
+++ b/Documentation/Makefile
@@ -510,7 +510,12 @@ lint-docs-meson:
awk "/^manpages = {$$/ {flag=1 ; next } /^}$$/ { flag=0 } flag { gsub(/^ \047/, \"\"); gsub(/\047 : [157],\$$/, \"\"); print }" meson.build | \
grep -v -e '#' -e '^$$' | \
sort >tmp-meson-diff/meson.adoc && \
- ls git*.adoc scalar.adoc | grep -v -e git-bisect-lk2009.adoc -e git-pack-redundant.adoc -e git-tools.adoc >tmp-meson-diff/actual.adoc && \
+ ls git*.adoc scalar.adoc | \
+ grep -v -e git-bisect-lk2009.adoc \
+ -e git-pack-redundant.adoc \
+ -e git-tools.adoc \
+ -e git-whatchanged.adoc \
+ >tmp-meson-diff/actual.adoc && \
if ! cmp tmp-meson-diff/meson.adoc tmp-meson-diff/actual.adoc; then \
echo "Meson man pages differ from actual man pages:"; \
diff -u tmp-meson-diff/meson.adoc tmp-meson-diff/actual.adoc; \
diff --git a/Documentation/MyFirstObjectWalk.adoc b/Documentation/MyFirstObjectWalk.adoc
index bfe8f5f561..413a9fdb05 100644
--- a/Documentation/MyFirstObjectWalk.adoc
+++ b/Documentation/MyFirstObjectWalk.adoc
@@ -43,7 +43,7 @@ Open up a new file `builtin/walken.c` and set up the command handler:
#include "builtin.h"
#include "trace.h"
-int cmd_walken(int argc, const char **argv, const char *prefix)
+int cmd_walken(int argc, const char **argv, const char *prefix, struct repository *repo)
{
trace_printf(_("cmd_walken incoming...\n"));
return 0;
@@ -83,23 +83,36 @@ int cmd_walken(int argc, const char **argv, const char *prefix)
}
----
-Also add the relevant line in `builtin.h` near `cmd_whatchanged()`:
+Also add the relevant line in `builtin.h` near `cmd_version()`:
----
-int cmd_walken(int argc, const char **argv, const char *prefix);
+int cmd_walken(int argc, const char **argv, const char *prefix, struct repository *repo);
----
-Include the command in `git.c` in `commands[]` near the entry for `whatchanged`,
+Include the command in `git.c` in `commands[]` near the entry for `version`,
maintaining alphabetical ordering:
----
{ "walken", cmd_walken, RUN_SETUP },
----
-Add it to the `Makefile` near the line for `builtin/worktree.o`:
+Add an entry for the new command in the both the Make and Meson build system,
+before the entry for `worktree`:
+- In the `Makefile`:
----
+...
BUILTIN_OBJS += builtin/walken.o
+...
+----
+
+- In the `meson.build` file:
+----
+builtin_sources = [
+ ...
+ 'builtin/walken.c',
+ ...
+]
----
Build and test out your command, without forgetting to ensure the `DEVELOPER`
@@ -193,7 +206,7 @@ initialization functions.
Next, we should have a look at any relevant configuration settings (i.e.,
settings readable and settable from `git config`). This is done by providing a
-callback to `git_config()`; within that callback, you can also invoke methods
+callback to `repo_config()`; within that callback, you can also invoke methods
from other components you may need that need to intercept these options. Your
callback will be invoked once per each configuration value which Git knows about
(global, local, worktree, etc.).
@@ -221,14 +234,14 @@ static int git_walken_config(const char *var, const char *value,
}
----
-Make sure to invoke `git_config()` with it in your `cmd_walken()`:
+Make sure to invoke `repo_config()` with it in your `cmd_walken()`:
----
-int cmd_walken(int argc, const char **argv, const char *prefix)
+int cmd_walken(int argc, const char **argv, const char *prefix, struct repository *repo)
{
...
- git_config(git_walken_config, NULL);
+ repo_config(repo, git_walken_config, NULL);
...
}
@@ -250,14 +263,14 @@ We'll also need to include the `revision.h` header:
...
-int cmd_walken(int argc, const char **argv, const char *prefix)
+int cmd_walken(int argc, const char **argv, const char *prefix, struct repository *repo)
{
/* This can go wherever you like in your declarations.*/
struct rev_info rev;
...
- /* This should go after the git_config() call. */
- repo_init_revisions(the_repository, &rev, prefix);
+ /* This should go after the repo_config() call. */
+ repo_init_revisions(repo, &rev, prefix);
...
}
@@ -305,7 +318,7 @@ Then let's invoke `final_rev_info_setup()` after the call to
`repo_init_revisions()`:
----
-int cmd_walken(int argc, const char **argv, const char *prefix)
+int cmd_walken(int argc, const char **argv, const char *prefix, struct repository *repo)
{
...
diff --git a/Documentation/RelNotes/2.43.7.adoc b/Documentation/RelNotes/2.43.7.adoc
new file mode 100644
index 0000000000..95702a036e
--- /dev/null
+++ b/Documentation/RelNotes/2.43.7.adoc
@@ -0,0 +1,73 @@
+Git v2.43.7 Release Notes
+=========================
+
+This release includes fixes for CVE-2025-27613, CVE-2025-27614,
+CVE-2025-46334, CVE-2025-46835, CVE-2025-48384, CVE-2025-48385, and
+CVE-2025-48386.
+
+Fixes since v2.43.6
+-------------------
+
+ * CVE-2025-27613, Gitk:
+
+ When a user clones an untrusted repository and runs Gitk without
+ additional command arguments, any writable file can be created and
+ truncated. The option "Support per-file encoding" must have been
+ enabled. The operation "Show origin of this line" is affected as
+ well, regardless of the option being enabled or not.
+
+ * CVE-2025-27614, Gitk:
+
+ A Git repository can be crafted in such a way that a user who has
+ cloned the repository can be tricked into running any script
+ supplied by the attacker by invoking `gitk filename`, where
+ `filename` has a particular structure.
+
+ * CVE-2025-46334, Git GUI (Windows only):
+
+ A malicious repository can ship versions of sh.exe or typical
+ textconv filter programs such as astextplain. On Windows, path
+ lookup can find such executables in the worktree. These programs
+ are invoked when the user selects "Git Bash" or "Browse Files" from
+ the menu.
+
+ * CVE-2025-46835, Git GUI:
+
+ When a user clones an untrusted repository and is tricked into
+ editing a file located in a maliciously named directory in the
+ repository, then Git GUI can create and overwrite any writable
+ file.
+
+ * CVE-2025-48384, Git:
+
+ When reading a config value, Git strips any trailing carriage
+ return and line feed (CRLF). When writing a config entry, values
+ with a trailing CR are not quoted, causing the CR to be lost when
+ the config is later read. When initializing a submodule, if the
+ submodule path contains a trailing CR, the altered path is read
+ resulting in the submodule being checked out to an incorrect
+ location. If a symlink exists that points the altered path to the
+ submodule hooks directory, and the submodule contains an executable
+ post-checkout hook, the script may be unintentionally executed
+ after checkout.
+
+ * CVE-2025-48385, Git:
+
+ When cloning a repository Git knows to optionally fetch a bundle
+ advertised by the remote server, which allows the server-side to
+ offload parts of the clone to a CDN. The Git client does not
+ perform sufficient validation of the advertised bundles, which
+ allows the remote side to perform protocol injection.
+
+ This protocol injection can cause the client to write the fetched
+ bundle to a location controlled by the adversary. The fetched
+ content is fully controlled by the server, which can in the worst
+ case lead to arbitrary code execution.
+
+ * CVE-2025-48386, Git:
+
+ The wincred credential helper uses a static buffer (`target`) as a
+ unique key for storing and comparing against internal storage. This
+ credential helper does not properly bounds check the available
+ space remaining in the buffer before appending to it with
+ `wcsncat()`, leading to potential buffer overflows.
diff --git a/Documentation/RelNotes/2.44.4.adoc b/Documentation/RelNotes/2.44.4.adoc
new file mode 100644
index 0000000000..8db4d5b537
--- /dev/null
+++ b/Documentation/RelNotes/2.44.4.adoc
@@ -0,0 +1,7 @@
+Git v2.44.4 Release Notes
+=========================
+
+This release merges up the fixes that appears in v2.43.7 to address
+the following CVEs: CVE-2025-27613, CVE-2025-27614, CVE-2025-46334,
+CVE-2025-46835, CVE-2025-48384, CVE-2025-48385, and CVE-2025-48386.
+See the release notes for v2.43.7 for details.
diff --git a/Documentation/RelNotes/2.45.4.adoc b/Documentation/RelNotes/2.45.4.adoc
new file mode 100644
index 0000000000..5b50d8daf0
--- /dev/null
+++ b/Documentation/RelNotes/2.45.4.adoc
@@ -0,0 +1,7 @@
+Git v2.45.4 Release Notes
+=========================
+
+This release merges up the fixes that appears in v2.43.7, and v2.44.4
+to address the following CVEs: CVE-2025-27613, CVE-2025-27614,
+CVE-2025-46334, CVE-2025-46835, CVE-2025-48384, CVE-2025-48385, and
+CVE-2025-48386. See the release notes for v2.43.7 for details.
diff --git a/Documentation/RelNotes/2.46.4.adoc b/Documentation/RelNotes/2.46.4.adoc
new file mode 100644
index 0000000000..622f4c752f
--- /dev/null
+++ b/Documentation/RelNotes/2.46.4.adoc
@@ -0,0 +1,7 @@
+Git v2.46.4 Release Notes
+=========================
+
+This release merges up the fixes that appears in v2.43.7, v2.44.4, and
+v2.45.4 to address the following CVEs: CVE-2025-27613, CVE-2025-27614,
+CVE-2025-46334, CVE-2025-46835, CVE-2025-48384, CVE-2025-48385, and
+CVE-2025-48386. See the release notes for v2.43.7 for details.
diff --git a/Documentation/RelNotes/2.47.3.adoc b/Documentation/RelNotes/2.47.3.adoc
new file mode 100644
index 0000000000..bc2a2b833b
--- /dev/null
+++ b/Documentation/RelNotes/2.47.3.adoc
@@ -0,0 +1,8 @@
+Git v2.47.3 Release Notes
+=========================
+
+This release merges up the fixes that appears in v2.43.7, v2.44.4,
+v2.45.4, and v2.46.4 to address the following CVEs: CVE-2025-27613,
+CVE-2025-27614, CVE-2025-46334, CVE-2025-46835, CVE-2025-48384,
+CVE-2025-48385, and CVE-2025-48386. See the release notes for v2.43.7
+for details.
diff --git a/Documentation/RelNotes/2.48.2.adoc b/Documentation/RelNotes/2.48.2.adoc
new file mode 100644
index 0000000000..f3f2f90c2b
--- /dev/null
+++ b/Documentation/RelNotes/2.48.2.adoc
@@ -0,0 +1,8 @@
+Git v2.48.2 Release Notes
+=========================
+
+This release merges up the fixes that appears in v2.43.7, v2.44.4,
+v2.45.4, v2.46.4, and v2.47.3 to address the following CVEs:
+CVE-2025-27613, CVE-2025-27614, CVE-2025-46334, CVE-2025-46835,
+CVE-2025-48384, CVE-2025-48385, and CVE-2025-48386. See the release
+notes for v2.43.7 for details.
diff --git a/Documentation/RelNotes/2.49.1.adoc b/Documentation/RelNotes/2.49.1.adoc
new file mode 100644
index 0000000000..c619e8b495
--- /dev/null
+++ b/Documentation/RelNotes/2.49.1.adoc
@@ -0,0 +1,12 @@
+Git v2.49.1 Release Notes
+=========================
+
+This release merges up the fixes that appear in v2.43.7, v2.44.4,
+v2.45.4, v2.46.4, v2.47.3, and v2.48.2 to address the following CVEs:
+CVE-2025-27613, CVE-2025-27614, CVE-2025-46334, CVE-2025-46835,
+CVE-2025-48384, CVE-2025-48385, and CVE-2025-48386. See the release
+notes for v2.43.7 for details.
+
+It also contains some updates to various CI bits to work around
+and/or to adjust to the deprecation of use of Ubuntu 20.04 GitHub
+Actions CI, updates to to Fedora base image.
diff --git a/Documentation/RelNotes/2.50.1.adoc b/Documentation/RelNotes/2.50.1.adoc
new file mode 100644
index 0000000000..aa4a71adbc
--- /dev/null
+++ b/Documentation/RelNotes/2.50.1.adoc
@@ -0,0 +1,8 @@
+Git v2.50.1 Release Notes
+=========================
+
+This release merges up the fixes that appear in v2.43.7, v2.44.4,
+v2.45.4, v2.46.4, v2.47.3, v2.48.2, and v2.49.1 to address the
+following CVEs: CVE-2025-27613, CVE-2025-27614, CVE-2025-46334,
+CVE-2025-46835, CVE-2025-48384, CVE-2025-48385, and
+CVE-2025-48386. See the release notes for v2.43.7 for details.
diff --git a/Documentation/RelNotes/2.51.0.adoc b/Documentation/RelNotes/2.51.0.adoc
new file mode 100644
index 0000000000..f9e6a54109
--- /dev/null
+++ b/Documentation/RelNotes/2.51.0.adoc
@@ -0,0 +1,262 @@
+Git v2.51 Release Notes
+=======================
+
+UI, Workflows & Features
+------------------------
+
+ * Userdiff patterns for the R language have been added.
+
+ * Documentation for "git send-email" has been updated with a bit more
+ credential helper and OAuth information.
+
+ * "git cat-file --batch" learns to understand %(objectmode) atom to
+ allow the caller to tell missing objects (due to repository
+ corruption) and submodules (whose commit objects are OK to be
+ missing) apart.
+
+ * "git diff --no-index dirA dirB" can limit the comparison with
+ pathspec at the end of the command line, just like normal "git
+ diff".
+
+ * "git subtree" (in contrib/) learned to grok GPG signing its commits.
+
+ * "git whatchanged" that is longer to type than "git log --raw"
+ which is its modern rough equivalent has outlived its usefulness
+ more than 10 years ago. Plan to deprecate and remove it.
+
+ * An interchange format for stash entries is defined, and subcommand
+ of "git stash" to import/export has been added.
+
+ * "git merge/pull" has been taught the "--compact-summary" option to
+ use the compact-summary format, intead of diffstat, when showing
+ the summary of the incoming changes.
+
+ * "git imap-send" has been broken for a long time, which has been
+ resurrected and then taught to talk OAuth2.0 etc.
+
+ * Some error messages from "git imap-send" has been updated.
+
+ * When "git daemon" sees a signal while attempting to accept() a new
+ client, instead of retrying, it skipped it by mistake, which has
+ been corrected.
+
+ * The reftable ref backend has matured enough; Git 3.0 will make it
+ the default format in a newly created repositories by default.
+
+ * "netrc" credential helper has been improved to understand textual
+ service names (like smtp) in addition to the numeric port numbers
+ (like 25).
+
+ * Lift the limitation to use changed-path filter in "git log" so that
+ it can be used for a pathspec with multiple literal paths.
+
+ * Clean up the way how signature on commit objects are exported to
+ and imported from fast-import stream.
+
+
+Performance, Internal Implementation, Development Support etc.
+--------------------------------------------------------------
+
+ * "git pack-objects" learned to find delta bases from blobs at the
+ same path, using the --path-walk API.
+
+ * CodingGuidelines update.
+
+ * Add settings for Solaris 10 & 11.
+
+ * Meson-based build/test framework now understands TAP output
+ generated by our tests.
+
+ * "Do not explicitly initialize to zero" rule has been clarified in
+ the CodingGuidelines document.
+
+ * A test helper "test_seq" function learned the "-f <fmt>" option,
+ which allowed us to simplify a lot of test scripts.
+
+ * A lot of stale stuff has been removed from the contrib/ hierarchy.
+
+ * "git push" and "git fetch" are taught to update refs in batches to
+ gain performance.
+
+ * Some code paths in the "git prune" used to ignore passed in
+ repository object and used the_repository singleton instance
+ instead, which has been corrected.
+
+ * Update ".clang-format" and ".editorconfig" to match our style guide
+ a bit better.
+
+ * "make coccicheck" succeeds even when spatch made suggestions, which
+ has been updated to fail in such a case.
+
+ * Code clean-up around object access API.
+
+ * Define .precision to more canned parse-options type to avoid bugs
+ coming from using a variable with a wrong type to capture the
+ parsed values.
+
+ * Flipping the default hash function to SHA-256 at Git 3.0 boundary
+ is planned.
+
+ * Declare weather-balloon we raised for "bool" type 18 months ago a
+ success and officially allow using the type in our codebase.
+
+ * GIT_TEST_INSTALLED was not honored in the recent topic related to
+ SHA256 hashes, which has been corrected.
+
+ * The pop_most_recent_commit() function can have quite expensive
+ worst case performance characteristics, which has been optimized by
+ using prio-queue data structure.
+
+
+Fixes since v2.50
+-----------------
+
+Unless otherwise noted, all the changes in 2.50.X maintenance track,
+including security updates, are included in this release.
+
+ * A memory-leak in an error code path has been plugged.
+ (merge 7082da85cb ly/commit-graph-graph-write-leakfix later to maint).
+
+ * A memory-leak in an error code path has been plugged.
+ (merge aedebdb6b9 ly/fetch-pack-leakfix later to maint).
+
+ * Some leftover references to documentation source files that no
+ longer exist, due to recent ".txt" -> ".adoc" renaming, have been
+ corrected.
+ (merge 3717a5775a jw/doc-txt-to-adoc-refs later to maint).
+
+ * "git stash -p <pathspec>" improvements.
+ (merge 468817bab2 pw/stash-p-pathspec-fixes later to maint).
+
+ * "git send-email" incremented its internal message counter when a
+ message was edited, which made logic that treats the first message
+ specially misbehave, which has been corrected.
+ (merge 2cc27b3501 ag/send-email-edit-threading-fix later to maint).
+
+ * "git stash" recorded a wrong branch name when submodules are
+ present in the current checkout, which has been corrected.
+ (merge ffb36c64f2 kj/stash-onbranch-submodule-fix later to maint).
+
+ * When asking to apply mailmap to both author and committer field
+ while showing a commit object, the field that appears later was not
+ correctly parsed and replaced, which has been corrected.
+ (merge abf94a283f sa/multi-mailmap-fix later to maint).
+
+ * "git maintenance" lacked the care "git gc" had to avoid holding
+ onto the repository lock for too long during packing refs, which
+ has been remedied.
+ (merge 1b5074e614 ps/maintenance-ref-lock later to maint).
+
+ * Avoid regexp_constraint and instead use comparison_constraint when
+ listing functions to exclude from application of coccinelle rules,
+ as spatch can be built with different regexp engine X-<.
+ (merge f2ad545813 jc/cocci-avoid-regexp-constraint later to maint).
+
+ * Updating submodules from the upstream did not work well when
+ submodule's HEAD is detached, which has been improved.
+ (merge ca62f524c1 jk/submodule-remote-lookup-cleanup later to maint).
+
+ * Remove unnecessary check from "git daemon" code.
+ (merge 0c856224d2 cb/daemon-fd-check-fix later to maint).
+
+ * Use of sysctl() system call to learn the total RAM size used on
+ BSDs has been corrected.
+ (merge 781c1cf571 cb/total-ram-bsd-fix later to maint).
+
+ * Drop FreeBSD 4 support and declare that we support only FreeBSD 12
+ or later, which has memmem() supported.
+ (merge 0392f976a7 bs/config-mak-freebsd later to maint).
+
+ * A diff-filter with negative-only specification like "git log
+ --diff-filter=d" did not trigger correctly, which has been fixed.
+ (merge 375ac087c5 jk/all-negative-diff-filter-fix later to maint).
+
+ * A failure to open the index file for writing due to conflicting
+ access did not state what went wrong, which has been corrected.
+ (merge 9455397a5c hy/read-cache-lock-error-fix later to maint).
+
+ * Tempfile removal fix in the codepath to sign commits with SSH keys.
+ (merge 4498127b04 re/ssh-sign-buffer-fix later to maint).
+
+ * Code and test clean-up around string-list API.
+ (merge 6e5b26c3ff sj/string-list later to maint).
+
+ * "git apply -N" should start from the current index and register
+ only new files, but it instead started from an empty index, which
+ has been corrected.
+ (merge 2b49d97fcb rp/apply-intent-to-add-fix later to maint).
+
+ * Leakfix with a new and a bit invasive test on pack-bitmap files.
+ (merge bfd5522e98 ly/load-bitmap-leakfix later to maint).
+
+ * "git fetch --prune" used to be O(n^2) expensive when there are many
+ refs, which has been corrected.
+ (merge 87d8d8c5d0 ph/fetch-prune-optim later to maint).
+
+ * When a ref creation at refs/heads/foo/bar fails, the files backend
+ now removes refs/heads/foo/ if the directory is otherwise not used.
+ (merge a3a7f20516 ps/refs-files-remove-empty-parent later to maint).
+
+ * "pack-objects" has been taught to avoid pointing into objects in
+ cruft packs from midx.
+
+ * "git remote" now detects remote names that overlap with each other
+ (e.g., remote nickname "outer" and "outer/inner" are used at the
+ same time), as it will lead to overlapping remote-tracking
+ branches.
+ (merge a5a727c448 jk/remote-avoid-overlapping-names later to maint).
+
+ * The gpg.program configuration variable, which names a pathname to
+ the (custom) GPG compatible program, can now be spelled with ~tilde
+ expansion.
+ (merge 7d275cd5c0 jb/gpg-program-variable-is-a-pathname later to maint).
+
+ * Our <sane-ctype.h> header file relied on that the system-supplied
+ <ctype.h> header is not later included, which would override our
+ macro definitions, but "amazon linux" broke this assumption. Fix
+ this by preemptively including <ctype.h> near the beginning of
+ <sane-ctype.h> ourselves.
+ (merge 9d3b33125f ps/sane-ctype-workaround later to maint).
+
+ * Clean-up compat/bswap.h mess.
+ (merge f4ac32c03a ss/compat-bswap-revamp later to maint).
+
+ * Meson-based build did not handle libexecdir setting correctly,
+ which has been corrected.
+ (merge 056dbe8612 rj/meson-libexecdir-fix later to maint).
+
+ * Document that we do not require "real" name when signing your
+ patches off.
+ (merge 1f0fed312a bc/contribution-under-non-real-names later to maint).
+
+ * "git commit" that concludes a conflicted merge failed to notice and remove
+ existing comment added automatically (like "# Conflicts:") when the
+ core.commentstring is set to 'auto'.
+ (merge 92b7c7c9f5 ac/auto-comment-char-fix later to maint).
+
+ * Other code cleanup, docfix, build fix, etc.
+ (merge b257adb571 lo/my-first-ow-doc-update later to maint).
+ (merge 8b34b6a220 ly/sequencer-update-squash-is-fixup-only later to maint).
+ (merge 5dceb8bd05 ly/do-not-localize-bug-messages later to maint).
+ (merge 61372dd613 ly/commit-buffer-reencode-leakfix later to maint).
+ (merge 81cd1eef7d ly/pack-bitmap-root-leakfix later to maint).
+ (merge bfc9f9cc64 ly/submodule-update-failure-leakfix later to maint).
+ (merge 65dff89c6b ma/doc-diff-cc-headers later to maint).
+ (merge efb61591ee jm/bundle-uri-debug-output-to-fp later to maint).
+ (merge a3d278bb64 ly/prepare-show-merge-leakfix later to maint).
+ (merge 1fde1c5daf ac/preload-index-wo-the-repository later to maint).
+ (merge 855cfc65ae rm/t2400-modernize later to maint).
+ (merge 2939494284 ly/run-builtin-use-passed-in-repo later to maint).
+ (merge ff73f375bb jg/mailinfo-leakfix later to maint).
+ (merge 996f14c02b jj/doc-branch-markup-fix later to maint).
+ (merge 1e77de1864 cb/ci-freebsd-update-to-14.3 later to maint).
+ (merge b0e9d25865 jk/fix-leak-send-pack later to maint).
+ (merge f3a9558c8c bs/remote-helpers-doc-markup-fix later to maint).
+ (merge c4e9775c60 kh/doc-config-subcommands later to maint).
+ (merge de404249ab ps/perlless-test-fixes later to maint).
+ (merge 953049eed8 ts/merge-orig-head-doc-fix later to maint).
+ (merge 0c83bbc704 rj/freebsd-sysinfo-build-fix later to maint).
+ (merge ad7780b38f ps/doc-pack-refs-auto-with-files-backend-fix later to maint).
+ (merge f4fa8a3687 rh/doc-glob-pathspec-fix later to maint).
+ (merge b27be108c8 ja/doc-git-log-markup later to maint).
+ (merge 14d7583beb pw/config-kvi-remove-path later to maint).
diff --git a/Documentation/SubmittingPatches b/Documentation/SubmittingPatches
index 958e3cc3d5..86ca7f6a78 100644
--- a/Documentation/SubmittingPatches
+++ b/Documentation/SubmittingPatches
@@ -408,8 +408,15 @@ your patch differs from project to project, so it may be different
from that of the project you are accustomed to.
[[real-name]]
-Also notice that a real name is used in the `Signed-off-by` trailer. Please
-don't hide your real name.
+Please use a known identity in the `Signed-off-by` trailer, since we cannot
+accept anonymous contributions. It is common, but not required, to use some form
+of your real name. We realize that some contributors are not comfortable doing
+so or prefer to contribute under a pseudonym or preferred name and we can accept
+your patch either way, as long as the name and email you use are distinctive,
+identifying, and not misleading.
+
+The goal of this policy is to allow us to have sufficient information to contact
+you if questions arise about your contribution.
[[commit-trailers]]
If you like, you can put extra trailers at the end:
diff --git a/Documentation/asciidoc.conf.in b/Documentation/asciidoc.conf.in
index 9d9139306e..ff9ea0a294 100644
--- a/Documentation/asciidoc.conf.in
+++ b/Documentation/asciidoc.conf.in
@@ -43,7 +43,7 @@ ifdef::doctype-book[]
endif::doctype-book[]
[literal-inlinemacro]
-{eval:re.sub(r'(&lt;[-a-zA-Z0-9.]+&gt;)', r'<emphasis>\1</emphasis>', re.sub(r'([\[\s|()>]|^|\]|&gt;)(\.?([-a-zA-Z0-9:+=~@\\\*\/_^\$]+\.?)+|,)',r'\1<literal>\2</literal>', re.sub(r'(\.\.\.?)([^\]$.])', r'<literal>\1</literal>\2', macros.passthroughs[int(attrs['passtext'][1:-1])] if attrs['passtext'][1:-1].isnumeric() else attrs['passtext'][1:-1])))}
+{eval:re.sub(r'(&lt;[-a-zA-Z0-9.]+&gt;)', r'<emphasis>\1</emphasis>', re.sub(r'([\[\s|()>]|^|\]|&gt;)(\.?([-a-zA-Z0-9:+=~@\\\*\/_^\$%]+\.?)+|,)',r'\1<literal>\2</literal>', re.sub(r'(\.\.\.?)([^\]$.])', r'<literal>\1</literal>\2', macros.passthroughs[int(attrs['passtext'][1:-1])] if attrs['passtext'][1:-1].isnumeric() else attrs['passtext'][1:-1])))}
endif::backend-docbook[]
diff --git a/Documentation/asciidoctor-extensions.rb.in b/Documentation/asciidoctor-extensions.rb.in
index 8b7b161349..fe64a62d96 100644
--- a/Documentation/asciidoctor-extensions.rb.in
+++ b/Documentation/asciidoctor-extensions.rb.in
@@ -73,7 +73,7 @@ module Git
elsif type == :monospaced
node.text.gsub(/(\.\.\.?)([^\]$\.])/, '<literal>\1</literal>\2')
.gsub(/^\.\.\.?$/, '<literal>\0</literal>')
- .gsub(%r{([\[\s|()>.]|^|\]|&gt;)(\.?([-a-zA-Z0-9:+=~@/_^\$\\\*]+\.{0,2})+|,)}, '\1<literal>\2</literal>')
+ .gsub(%r{([\[\s|()>.]|^|\]|&gt;)(\.?([-a-zA-Z0-9:+=~@/_^\$\\\*%]+\.{0,2})+|,)}, '\1<literal>\2</literal>')
.gsub(/(&lt;[-a-zA-Z0-9.]+&gt;)/, '<emphasis>\1</emphasis>')
else
open, close, supports_phrase = QUOTE_TAGS[type]
@@ -102,7 +102,7 @@ module Git
if node.type == :monospaced
node.text.gsub(/(\.\.\.?)([^\]$.])/, '<code>\1</code>\2')
.gsub(/^\.\.\.?$/, '<code>\0</code>')
- .gsub(%r{([\[\s|()>.]|^|\]|&gt;)(\.?([-a-zA-Z0-9:+=~@,/_^\$\\\*]+\.{0,2})+)}, '\1<code>\2</code>')
+ .gsub(%r{([\[\s|()>.]|^|\]|&gt;)(\.?([-a-zA-Z0-9:+=~@,/_^\$\\\*%]+\.{0,2})+)}, '\1<code>\2</code>')
.gsub(/(&lt;[-a-zA-Z0-9.]+&gt;)/, '<em>\1</em>')
else
diff --git a/Documentation/config/branch.adoc b/Documentation/config/branch.adoc
index e35ea7ac64..a4db9fa5c8 100644
--- a/Documentation/config/branch.adoc
+++ b/Documentation/config/branch.adoc
@@ -69,9 +69,9 @@ This option defaults to `never`.
`git fetch`) to lookup the default branch for merging. Without
this option, `git pull` defaults to merge the first refspec fetched.
Specify multiple values to get an octopus merge.
- If you wish to setup `git pull` so that it merges into <name> from
+ If you wish to setup `git pull` so that it merges into _<name>_ from
another branch in the local repository, you can point
- branch.<name>.merge to the desired branch, and use the relative path
+ `branch.<name>.merge` to the desired branch, and use the relative path
setting `.` (a period) for `branch.<name>.remote`.
`branch.<name>.mergeOptions`::
diff --git a/Documentation/config/feature.adoc b/Documentation/config/feature.adoc
index f061b64b74..924f5ff4e3 100644
--- a/Documentation/config/feature.adoc
+++ b/Documentation/config/feature.adoc
@@ -20,6 +20,16 @@ walking fewer objects.
+
* `pack.allowPackReuse=multi` may improve the time it takes to create a pack by
reusing objects from multiple packs instead of just one.
++
+* `pack.usePathWalk` may speed up packfile creation and make the packfiles be
+significantly smaller in the presence of certain filename collisions with Git's
+default name-hash.
++
+* `init.defaultRefFormat=reftable` causes newly initialized repositories to use
+the reftable format for storing references. This new format solves issues with
+case-insensitive filesystems, compresses better and performs significantly
+better with many use cases. Refer to Documentation/technical/reftable.adoc for
+more information on this new storage format.
feature.manyFiles::
Enable config options that optimize for repos with many files in the
diff --git a/Documentation/config/format.adoc b/Documentation/config/format.adoc
index 7410e930e5..ab0710e86a 100644
--- a/Documentation/config/format.adoc
+++ b/Documentation/config/format.adoc
@@ -68,9 +68,15 @@ format.encodeEmailHeaders::
Defaults to true.
format.pretty::
+ifndef::with-breaking-changes[]
The default pretty format for log/show/whatchanged command.
See linkgit:git-log[1], linkgit:git-show[1],
linkgit:git-whatchanged[1].
+endif::with-breaking-changes[]
+ifdef::with-breaking-changes[]
+ The default pretty format for log/show command.
+ See linkgit:git-log[1], linkgit:git-show[1].
+endif::with-breaking-changes[]
format.thread::
The default threading style for 'git format-patch'. Can be
diff --git a/Documentation/config/gpg.adoc b/Documentation/config/gpg.adoc
index 5cf32b179d..240e46c050 100644
--- a/Documentation/config/gpg.adoc
+++ b/Documentation/config/gpg.adoc
@@ -1,5 +1,5 @@
gpg.program::
- Use this custom program instead of "`gpg`" found on `$PATH` when
+ Pathname of the program to use instead of "`gpg`" when
making or verifying a PGP signature. The program must support the
same command-line interface as GPG, namely, to verify a detached
signature, "`gpg --verify $signature - <$file`" is run, and the
diff --git a/Documentation/config/imap.adoc b/Documentation/config/imap.adoc
index 3d28f72643..4682a6bd03 100644
--- a/Documentation/config/imap.adoc
+++ b/Documentation/config/imap.adoc
@@ -1,7 +1,9 @@
imap.folder::
The folder to drop the mails into, which is typically the Drafts
- folder. For example: "INBOX.Drafts", "INBOX/Drafts" or
- "[Gmail]/Drafts". Required.
+ folder. For example: `INBOX.Drafts`, `INBOX/Drafts` or
+ `[Gmail]/Drafts`. The IMAP folder to interact with MUST be specified;
+ the value of this configuration variable is used as the fallback
+ default value when the `--folder` option is not given.
imap.tunnel::
Command used to set up a tunnel to the IMAP server through which
@@ -40,5 +42,6 @@ imap.authMethod::
Specify the authentication method for authenticating with the IMAP server.
If Git was built with the NO_CURL option, or if your curl version is older
than 7.34.0, or if you're running git-imap-send with the `--no-curl`
- option, the only supported method is 'CRAM-MD5'. If this is not set
- then 'git imap-send' uses the basic IMAP plaintext LOGIN command.
+ option, the only supported methods are `PLAIN`, `CRAM-MD5`, `OAUTHBEARER`
+ and `XOAUTH2`. If this is not set then `git imap-send` uses the basic IMAP
+ plaintext `LOGIN` command.
diff --git a/Documentation/config/log.adoc b/Documentation/config/log.adoc
index 9003a82191..16e00e8d29 100644
--- a/Documentation/config/log.adoc
+++ b/Documentation/config/log.adoc
@@ -1,64 +1,76 @@
-log.abbrevCommit::
- If true, makes linkgit:git-log[1], linkgit:git-show[1], and
- linkgit:git-whatchanged[1] assume `--abbrev-commit`. You may
+`log.abbrevCommit`::
+ If `true`, make
+ifndef::with-breaking-changes[]
+ linkgit:git-log[1], linkgit:git-show[1], and
+ linkgit:git-whatchanged[1]
+endif::with-breaking-changes[]
+ifdef::with-breaking-changes[]
+ linkgit:git-log[1] and linkgit:git-show[1]
+endif::with-breaking-changes[]
+ assume `--abbrev-commit`. You may
override this option with `--no-abbrev-commit`.
-log.date::
- Set the default date-time mode for the 'log' command.
- Setting a value for log.date is similar to using 'git log''s
+`log.date`::
+ Set the default date-time mode for the `log` command.
+ Setting a value for log.date is similar to using `git log`'s
`--date` option. See linkgit:git-log[1] for details.
+
If the format is set to "auto:foo" and the pager is in use, format
"foo" will be used for the date format. Otherwise, "default" will
be used.
-log.decorate::
+`log.decorate`::
Print out the ref names of any commits that are shown by the log
- command. If 'short' is specified, the ref name prefixes 'refs/heads/',
- 'refs/tags/' and 'refs/remotes/' will not be printed. If 'full' is
- specified, the full ref name (including prefix) will be printed.
- If 'auto' is specified, then if the output is going to a terminal,
- the ref names are shown as if 'short' were given, otherwise no ref
- names are shown. This is the same as the `--decorate` option
- of the `git log`.
+ command. Possible values are:
++
+----
+`short`;; the ref name prefixes `refs/heads/`, `refs/tags/` and
+ `refs/remotes/` are not printed.
+`full`;; the full ref name (including prefix) are printed.
+`auto`;; if the output is going to a terminal,
+ the ref names are shown as if `short` were given, otherwise no ref
+ names are shown.
+----
++
+This is the same as the `--decorate` option of the `git log`.
-log.initialDecorationSet::
+`log.initialDecorationSet`::
By default, `git log` only shows decorations for certain known ref
namespaces. If 'all' is specified, then show all refs as
decorations.
-log.excludeDecoration::
+`log.excludeDecoration`::
Exclude the specified patterns from the log decorations. This is
similar to the `--decorate-refs-exclude` command-line option, but
the config option can be overridden by the `--decorate-refs`
option.
-log.diffMerges::
+`log.diffMerges`::
Set diff format to be used when `--diff-merges=on` is
specified, see `--diff-merges` in linkgit:git-log[1] for
details. Defaults to `separate`.
-log.follow::
+`log.follow`::
If `true`, `git log` will act as if the `--follow` option was used when
a single <path> is given. This has the same limitations as `--follow`,
i.e. it cannot be used to follow multiple files and does not work well
on non-linear history.
-log.graphColors::
+`log.graphColors`::
A list of colors, separated by commas, that can be used to draw
history lines in `git log --graph`.
-log.showRoot::
+`log.showRoot`::
If true, the initial commit will be shown as a big creation event.
This is equivalent to a diff against an empty tree.
Tools like linkgit:git-log[1] or linkgit:git-whatchanged[1], which
normally hide the root commit will now show it. True by default.
-log.showSignature::
+`log.showSignature`::
If true, makes linkgit:git-log[1], linkgit:git-show[1], and
linkgit:git-whatchanged[1] assume `--show-signature`.
-log.mailmap::
+`log.mailmap`::
If true, makes linkgit:git-log[1], linkgit:git-show[1], and
linkgit:git-whatchanged[1] assume `--use-mailmap`, otherwise
assume `--no-use-mailmap`. True by default.
diff --git a/Documentation/config/merge.adoc b/Documentation/config/merge.adoc
index 86359f6dd2..15a4c14c38 100644
--- a/Documentation/config/merge.adoc
+++ b/Documentation/config/merge.adoc
@@ -81,8 +81,18 @@ as `false`. Defaults to `conflict`.
attributes" in linkgit:gitattributes[5].
`merge.stat`::
- Whether to print the diffstat between `ORIG_HEAD` and the merge result
- at the end of the merge. True by default.
+ What, if anything, to print between `ORIG_HEAD` and the merge result
+ at the end of the merge. Possible values are:
++
+--
+`false`;; Show nothing.
+`true`;; Show `git diff --diffstat --summary ORIG_HEAD`.
+`compact`;; Show `git diff --compact-summary ORIG_HEAD`.
+--
++
+but any unrecognised value (e.g., a value added by a future version of
+Git) is taken as `true` instead of triggering an error. Defaults to
+`true`.
`merge.autoStash`::
When set to `true`, automatically create a temporary stash entry
diff --git a/Documentation/config/pack.adoc b/Documentation/config/pack.adoc
index da527377fa..75402d5579 100644
--- a/Documentation/config/pack.adoc
+++ b/Documentation/config/pack.adoc
@@ -155,6 +155,10 @@ pack.useSparse::
commits contain certain types of direct renames. Default is
`true`.
+pack.usePathWalk::
+ Enable the `--path-walk` option by default for `git pack-objects`
+ processes. See linkgit:git-pack-objects[1] for full details.
+
pack.preferBitmapTips::
When selecting which commits will receive bitmaps, prefer a
commit at the tip of any reference that is a suffix of any value
diff --git a/Documentation/config/repack.adoc b/Documentation/config/repack.adoc
index c79af6d7b8..e9e78dcb19 100644
--- a/Documentation/config/repack.adoc
+++ b/Documentation/config/repack.adoc
@@ -39,3 +39,10 @@ repack.cruftThreads::
a cruft pack and the respective parameters are not given over
the command line. See similarly named `pack.*` configuration
variables for defaults and meaning.
+
+repack.midxMustContainCruft::
+ When set to true, linkgit:git-repack[1] will unconditionally include
+ cruft pack(s), if any, in the multi-pack index when invoked with
+ `--write-midx`. When false, cruft packs are only included in the MIDX
+ when necessary (e.g., because they might be required to form a
+ reachability closure with MIDX bitmaps). Defaults to true.
diff --git a/Documentation/config/sendemail.adoc b/Documentation/config/sendemail.adoc
index 5ffcfc9f2a..4722334657 100644
--- a/Documentation/config/sendemail.adoc
+++ b/Documentation/config/sendemail.adoc
@@ -1,38 +1,38 @@
sendemail.identity::
A configuration identity. When given, causes values in the
- 'sendemail.<identity>' subsection to take precedence over
- values in the 'sendemail' section. The default identity is
+ `sendemail.<identity>` subsection to take precedence over
+ values in the `sendemail` section. The default identity is
the value of `sendemail.identity`.
sendemail.smtpEncryption::
See linkgit:git-send-email[1] for description. Note that this
- setting is not subject to the 'identity' mechanism.
+ setting is not subject to the `identity` mechanism.
sendemail.smtpSSLCertPath::
Path to ca-certificates (either a directory or a single file).
Set it to an empty string to disable certificate verification.
sendemail.<identity>.*::
- Identity-specific versions of the 'sendemail.*' parameters
+ Identity-specific versions of the `sendemail.*` parameters
found below, taking precedence over those when this
identity is selected, through either the command-line or
`sendemail.identity`.
sendemail.multiEdit::
- If true (default), a single editor instance will be spawned to edit
+ If `true` (default), a single editor instance will be spawned to edit
files you have to edit (patches when `--annotate` is used, and the
- summary when `--compose` is used). If false, files will be edited one
+ summary when `--compose` is used). If `false`, files will be edited one
after the other, spawning a new editor each time.
sendemail.confirm::
Sets the default for whether to confirm before sending. Must be
- one of 'always', 'never', 'cc', 'compose', or 'auto'. See `--confirm`
+ one of `always`, `never`, `cc`, `compose`, or `auto`. See `--confirm`
in the linkgit:git-send-email[1] documentation for the meaning of these
values.
sendemail.mailmap::
- If true, makes linkgit:git-send-email[1] assume `--mailmap`,
- otherwise assume `--no-mailmap`. False by default.
+ If `true`, makes linkgit:git-send-email[1] assume `--mailmap`,
+ otherwise assume `--no-mailmap`. `False` by default.
sendemail.mailmap.file::
The location of a linkgit:git-send-email[1] specific augmenting
@@ -51,7 +51,7 @@ sendemail.aliasesFile::
sendemail.aliasFileType::
Format of the file(s) specified in sendemail.aliasesFile. Must be
- one of 'mutt', 'mailrc', 'pine', 'elm', 'gnus', or 'sendmail'.
+ one of `mutt`, `mailrc`, `pine`, `elm`, `gnus`, or `sendmail`.
+
What an alias file in each format looks like can be found in
the documentation of the email program of the same name. The
@@ -96,12 +96,17 @@ sendemail.xmailer::
linkgit:git-send-email[1] command-line options. See its
documentation for details.
+sendemail.outlookidfix::
+ If `true`, makes linkgit:git-send-email[1] assume `--outlook-id-fix`,
+ and if `false` assume `--no-outlook-id-fix`. If not specified, it will
+ behave the same way as if `--outlook-id-fix` is not specified.
+
sendemail.signedOffCc (deprecated)::
Deprecated alias for `sendemail.signedOffByCc`.
sendemail.smtpBatchSize::
Number of messages to be sent per connection, after that a relogin
- will happen. If the value is 0 or undefined, send all messages in
+ will happen. If the value is `0` or undefined, send all messages in
one connection.
See also the `--batch-size` option of linkgit:git-send-email[1].
@@ -111,5 +116,5 @@ sendemail.smtpReloginDelay::
sendemail.forbidSendmailVariables::
To avoid common misconfiguration mistakes, linkgit:git-send-email[1]
- will abort with a warning if any configuration options for "sendmail"
+ will abort with a warning if any configuration options for `sendmail`
exist. Set this variable to bypass the check.
diff --git a/Documentation/diff-generate-patch.adoc b/Documentation/diff-generate-patch.adoc
index e5c813c96f..7b6cdd1980 100644
--- a/Documentation/diff-generate-patch.adoc
+++ b/Documentation/diff-generate-patch.adoc
@@ -138,7 +138,7 @@ or like this (when the `--cc` option is used):
+
[synopsis]
index <hash>,<hash>..<hash>
-mode <mode>,<mode>`..`<mode>
+mode <mode>,<mode>..<mode>
new file mode <mode>
deleted file mode <mode>,<mode>
+
diff --git a/Documentation/diff-options.adoc b/Documentation/diff-options.adoc
index 640eb6e7db..f3a35d8141 100644
--- a/Documentation/diff-options.adoc
+++ b/Documentation/diff-options.adoc
@@ -37,32 +37,32 @@ endif::git-diff[]
endif::git-format-patch[]
ifdef::git-log[]
--m::
+`-m`::
Show diffs for merge commits in the default format. This is
similar to `--diff-merges=on`, except `-m` will
produce no output unless `-p` is given as well.
--c::
+`-c`::
Produce combined diff output for merge commits.
Shortcut for `--diff-merges=combined -p`.
---cc::
+`--cc`::
Produce dense combined diff output for merge commits.
Shortcut for `--diff-merges=dense-combined -p`.
---dd::
+`--dd`::
Produce diff with respect to first parent for both merge and
regular commits.
Shortcut for `--diff-merges=first-parent -p`.
---remerge-diff::
+`--remerge-diff`::
Produce remerge-diff output for merge commits.
Shortcut for `--diff-merges=remerge -p`.
---no-diff-merges::
+`--no-diff-merges`::
Synonym for `--diff-merges=off`.
---diff-merges=<format>::
+`--diff-merges=<format>`::
Specify diff format to be used for merge commits. Default is
{diff-merges-default} unless `--first-parent` is in use, in
which case `first-parent` is the default.
@@ -70,48 +70,54 @@ ifdef::git-log[]
The following formats are supported:
+
--
-off, none::
+`off`::
+`none`::
Disable output of diffs for merge commits. Useful to override
implied value.
-on, m::
+`on`::
+`m`::
Make diff output for merge commits to be shown in the default
format. The default format can be changed using
`log.diffMerges` configuration variable, whose default value
is `separate`.
-first-parent, 1::
+`first-parent`::
+`1`::
Show full diff with respect to first parent. This is the same
format as `--patch` produces for non-merge commits.
-separate::
+`separate`::
Show full diff with respect to each of parents.
Separate log entry and diff is generated for each parent.
-combined, c::
+`combined`::
+`c`::
Show differences from each of the parents to the merge
result simultaneously instead of showing pairwise diff between
a parent and the result one at a time. Furthermore, it lists
only files which were modified from all parents.
-dense-combined, cc::
+`dense-combined`::
+`cc`::
Further compress output produced by `--diff-merges=combined`
by omitting uninteresting hunks whose contents in the parents
have only two variants and the merge result picks one of them
without modification.
-remerge, r::
- Remerge two-parent merge commits to create a temporary tree
+`remerge`::
+`r`:: Remerge two-parent merge commits to create a temporary tree
object--potentially containing files with conflict markers
and such. A diff is then shown between that temporary tree
and the actual merge commit.
+--
+
The output emitted when this option is used is subject to change, and
so is its interaction with other options (unless explicitly
documented).
---
---combined-all-paths::
+
+`--combined-all-paths`::
Cause combined diffs (used for merge commits) to
list the name of the file from all parents. It thus only has
effect when `--diff-merges=[dense-]combined` is in use, and
diff --git a/Documentation/git-apply.adoc b/Documentation/git-apply.adoc
index 952518b8af..6c71ee69da 100644
--- a/Documentation/git-apply.adoc
+++ b/Documentation/git-apply.adoc
@@ -75,13 +75,14 @@ OPTIONS
tree. If `--check` is in effect, merely check that it would
apply cleanly to the index entry.
+-N::
--intent-to-add::
When applying the patch only to the working tree, mark new
files to be added to the index later (see `--intent-to-add`
- option in linkgit:git-add[1]). This option is ignored unless
- running in a Git repository and `--index` is not specified.
- Note that `--index` could be implied by other options such
- as `--cached` or `--3way`.
+ option in linkgit:git-add[1]). This option is ignored if
+ `--index` or `--cached` are used, and has no effect outside a Git
+ repository. Note that `--index` could be implied by other options
+ such as `--3way`.
-3::
--3way::
diff --git a/Documentation/git-cat-file.adoc b/Documentation/git-cat-file.adoc
index cde79ad242..180d1ad363 100644
--- a/Documentation/git-cat-file.adoc
+++ b/Documentation/git-cat-file.adoc
@@ -307,6 +307,11 @@ newline. The available atoms are:
`objecttype`::
The type of the object (the same as `cat-file -t` reports).
+`objectmode`::
+ If the specified object has mode information (such as a tree or
+ index entry), the mode expressed as an octal integer. Otherwise,
+ empty string.
+
`objectsize`::
The size, in bytes, of the object (the same as `cat-file -s`
reports).
@@ -368,6 +373,14 @@ If a name is specified that might refer to more than one object (an ambiguous sh
<object> SP ambiguous LF
------------
+If a name is specified that refers to a submodule entry in a tree and the
+target object does not exist in the repository, then `cat-file` will ignore
+any custom format and print (with the object ID of the submodule):
+
+------------
+<oid> SP submodule LF
+------------
+
If `--follow-symlinks` is used, and a symlink in the repository points
outside the repository, then `cat-file` will ignore any custom format
and print:
diff --git a/Documentation/git-config.adoc b/Documentation/git-config.adoc
index 936e0c5130..511b2e26bf 100644
--- a/Documentation/git-config.adoc
+++ b/Documentation/git-config.adoc
@@ -10,9 +10,9 @@ SYNOPSIS
--------
[verse]
'git config list' [<file-option>] [<display-option>] [--includes]
-'git config get' [<file-option>] [<display-option>] [--includes] [--all] [--regexp] [--value=<value>] [--fixed-value] [--default=<default>] <name>
-'git config set' [<file-option>] [--type=<type>] [--all] [--value=<value>] [--fixed-value] <name> <value>
-'git config unset' [<file-option>] [--all] [--value=<value>] [--fixed-value] <name>
+'git config get' [<file-option>] [<display-option>] [--includes] [--all] [--regexp] [--value=<pattern>] [--fixed-value] [--default=<default>] [--url=<url>] <name>
+'git config set' [<file-option>] [--type=<type>] [--all] [--value=<pattern>] [--fixed-value] <name> <value>
+'git config unset' [<file-option>] [--all] [--value=<pattern>] [--fixed-value] <name>
'git config rename-section' [<file-option>] <old-name> <new-name>
'git config remove-section' [<file-option>] <name>
'git config edit' [<file-option>]
@@ -26,7 +26,7 @@ escaped.
Multiple lines can be added to an option by using the `--append` option.
If you want to update or unset an option which can occur on multiple
-lines, a `value-pattern` (which is an extended regular expression,
+lines, `--value=<pattern>` (which is an extended regular expression,
unless the `--fixed-value` option is given) needs to be given. Only the
existing values that match the pattern are updated or unset. If
you want to handle the lines that do *not* match the pattern, just
@@ -109,7 +109,7 @@ OPTIONS
--replace-all::
Default behavior is to replace at most one line. This replaces
- all lines matching the key (and optionally the `value-pattern`).
+ all lines matching the key (and optionally `--value=<pattern>`).
--append::
Adds a new line to the option without altering any existing
@@ -200,11 +200,19 @@ See also <<FILES>>.
section in linkgit:gitrevisions[7] for a more complete list of
ways to spell blob names.
+`--value=<pattern>`::
+`--no-value`::
+ With `get`, `set`, and `unset`, match only against
+ _<pattern>_. The pattern is an extended regular expression unless
+ `--fixed-value` is given.
++
+Use `--no-value` to unset _<pattern>_.
+
--fixed-value::
- When used with the `value-pattern` argument, treat `value-pattern` as
+ When used with `--value=<pattern>`, treat _<pattern>_ as
an exact string instead of a regular expression. This will restrict
the name/value pairs that are matched to only those where the value
- is exactly equal to the `value-pattern`.
+ is exactly equal to _<pattern>_.
--type <type>::
'git config' will ensure that any input or output is valid under the given
@@ -259,6 +267,12 @@ Valid `<type>`'s include:
Output only the names of config variables for `list` or
`get`.
+`--show-names`::
+`--no-show-names`::
+ With `get`, show config keys in addition to their values. The
+ default is `--no-show-names` unless `--url` is given and there
+ are no subsections in _<name>_.
+
--show-origin::
Augment the output of all queried config options with the
origin type (file, standard input, blob, command line) and
diff --git a/Documentation/git-diff.adoc b/Documentation/git-diff.adoc
index dec173a345..272331afba 100644
--- a/Documentation/git-diff.adoc
+++ b/Documentation/git-diff.adoc
@@ -14,7 +14,7 @@ git diff [<options>] --cached [--merge-base] [<commit>] [--] [<path>...]
git diff [<options>] [--merge-base] <commit> [<commit>...] <commit> [--] [<path>...]
git diff [<options>] <commit>...<commit> [--] [<path>...]
git diff [<options>] <blob> <blob>
-git diff [<options>] --no-index [--] <path> <path>
+git diff [<options>] --no-index [--] <path> <path> [<pathspec>...]
DESCRIPTION
-----------
@@ -31,14 +31,18 @@ files on disk.
further add to the index but you still haven't. You can
stage these changes by using linkgit:git-add[1].
-`git diff [<options>] --no-index [--] <path> <path>`::
+`git diff [<options>] --no-index [--] <path> <path> [<pathspec>...]`::
This form is to compare the given two paths on the
filesystem. You can omit the `--no-index` option when
running the command in a working tree controlled by Git and
at least one of the paths points outside the working tree,
or when running the command outside a working tree
- controlled by Git. This form implies `--exit-code`.
+ controlled by Git. This form implies `--exit-code`. If both
+ paths point to directories, additional pathspecs may be
+ provided. These will limit the files included in the
+ difference. All such pathspecs must be relative as they
+ apply to both sides of the diff.
`git diff [<options>] --cached [--merge-base] [<commit>] [--] [<path>...]`::
diff --git a/Documentation/git-fast-export.adoc b/Documentation/git-fast-export.adoc
index 43bbb4f63c..297b57bb2e 100644
--- a/Documentation/git-fast-export.adoc
+++ b/Documentation/git-fast-export.adoc
@@ -50,6 +50,23 @@ resulting tag will have an invalid signature.
is the same as how earlier versions of this command without
this option behaved.
+
+When exported, a signature starts with:
++
+gpgsig <git-hash-algo> <signature-format>
++
+where <git-hash-algo> is the Git object hash so either "sha1" or
+"sha256", and <signature-format> is the signature type, so "openpgp",
+"x509", "ssh" or "unknown".
++
+For example, an OpenPGP signature on a SHA-1 commit starts with
+`gpgsig sha1 openpgp`, while an SSH signature on a SHA-256 commit
+starts with `gpgsig sha256 ssh`.
++
+While all the signatures of a commit are exported, an importer may
+choose to accept only some of them. For example
+linkgit:git-fast-import[1] currently stores at most one signature per
+Git hash algorithm in each commit.
++
NOTE: This is highly experimental and the format of the data stream may
change in the future without compatibility guarantees.
diff --git a/Documentation/git-fast-import.adoc b/Documentation/git-fast-import.adoc
index 250d866652..d232784200 100644
--- a/Documentation/git-fast-import.adoc
+++ b/Documentation/git-fast-import.adoc
@@ -445,7 +445,7 @@ one).
original-oid?
('author' (SP <name>)? SP LT <email> GT SP <when> LF)?
'committer' (SP <name>)? SP LT <email> GT SP <when> LF
- ('gpgsig' SP <alg> LF data)?
+ ('gpgsig' SP <algo> SP <format> LF data)?
('encoding' SP <encoding> LF)?
data
('from' SP <commit-ish> LF)?
@@ -518,13 +518,39 @@ their syntax.
^^^^^^^^
The optional `gpgsig` command is used to include a PGP/GPG signature
-that signs the commit data.
+or other cryptographic signature that signs the commit data.
-Here <alg> specifies which hashing algorithm is used for this
-signature, either `sha1` or `sha256`.
+....
+ 'gpgsig' SP <git-hash-algo> SP <signature-format> LF data
+....
+
+The `gpgsig` command takes two arguments:
+
+* `<git-hash-algo>` specifies which Git object format this signature
+ applies to, either `sha1` or `sha256`. This allows to know which
+ representation of the commit was signed (the SHA-1 or the SHA-256
+ version) which helps with both signature verification and
+ interoperability between repos with different hash functions.
+
+* `<signature-format>` specifies the type of signature, such as
+ `openpgp`, `x509`, `ssh`, or `unknown`. This is a convenience for
+ tools that process the stream, so they don't have to parse the ASCII
+ armor to identify the signature type.
+
+A commit may have at most one signature for the SHA-1 object format
+(stored in the "gpgsig" header) and one for the SHA-256 object format
+(stored in the "gpgsig-sha256" header).
+
+See below for a detailed description of the `data` command which
+contains the raw signature data.
+
+Signatures are not yet checked in the current implementation
+though. (Already setting the `extensions.compatObjectFormat`
+configuration option might help with verifying both SHA-1 and SHA-256
+object format signatures when it will be implemented.)
-NOTE: This is highly experimental and the format of the data stream may
-change in the future without compatibility guarantees.
+NOTE: This is highly experimental and the format of the `gpgsig`
+command may change in the future without compatibility guarantees.
`encoding`
^^^^^^^^^^
diff --git a/Documentation/git-imap-send.adoc b/Documentation/git-imap-send.adoc
index 26ccf4e433..17147f93c3 100644
--- a/Documentation/git-imap-send.adoc
+++ b/Documentation/git-imap-send.adoc
@@ -9,21 +9,24 @@ git-imap-send - Send a collection of patches from stdin to an IMAP folder
SYNOPSIS
--------
[verse]
-'git imap-send' [-v] [-q] [--[no-]curl]
+'git imap-send' [-v] [-q] [--[no-]curl] [(--folder|-f) <folder>]
+'git imap-send' --list
DESCRIPTION
-----------
-This command uploads a mailbox generated with 'git format-patch'
+This command uploads a mailbox generated with `git format-patch`
into an IMAP drafts folder. This allows patches to be sent as
other email is when using mail clients that cannot read mailbox
files directly. The command also works with any general mailbox
-in which emails have the fields "From", "Date", and "Subject" in
+in which emails have the fields `From`, `Date`, and `Subject` in
that order.
Typical usage is something like:
-git format-patch --signoff --stdout --attach origin | git imap-send
+------
+$ git format-patch --signoff --stdout --attach origin | git imap-send
+------
OPTIONS
@@ -37,6 +40,11 @@ OPTIONS
--quiet::
Be quiet.
+-f <folder>::
+--folder=<folder>::
+ Specify the folder in which the emails have to saved.
+ For example: `--folder=[Gmail]/Drafts` or `-f INBOX/Drafts`.
+
--curl::
Use libcurl to communicate with the IMAP server, unless tunneling
into it. Ignored if Git was built without the USE_CURL_FOR_IMAP_SEND
@@ -47,6 +55,8 @@ OPTIONS
using libcurl. Ignored if Git was built with the NO_OPENSSL option
set.
+--list::
+ Run the IMAP LIST command to output a list of all the folders present.
CONFIGURATION
-------------
@@ -102,20 +112,56 @@ Using Gmail's IMAP interface:
---------
[imap]
- folder = "[Gmail]/Drafts"
- host = imaps://imap.gmail.com
- user = user@gmail.com
- port = 993
+ folder = "[Gmail]/Drafts"
+ host = imaps://imap.gmail.com
+ user = user@gmail.com
+ port = 993
---------
+Gmail does not allow using your regular password for `git imap-send`.
+If you have multi-factor authentication set up on your Gmail account, you
+can generate an app-specific password for use with `git imap-send`.
+Visit https://security.google.com/settings/security/apppasswords to create
+it. Alternatively, use OAuth2.0 authentication as described below.
+
[NOTE]
You might need to instead use: `folder = "[Google Mail]/Drafts"` if you get an error
-that the "Folder doesn't exist".
+that the "Folder doesn't exist". You can also run `git imap-send --list` to get a
+list of available folders.
[NOTE]
If your Gmail account is set to another language than English, the name of the "Drafts"
folder will be localized.
+If you want to use OAuth2.0 based authentication, you can specify
+`OAUTHBEARER` or `XOAUTH2` mechanism in your config. It is more secure
+than using app-specific passwords, and also does not enforce the need of
+having multi-factor authentication. You will have to use an OAuth2.0
+access token in place of your password when using this authentication.
+
+---------
+[imap]
+ folder = "[Gmail]/Drafts"
+ host = imaps://imap.gmail.com
+ user = user@gmail.com
+ port = 993
+ authmethod = OAUTHBEARER
+---------
+
+Using Outlook's IMAP interface:
+
+Unlike Gmail, Outlook only supports OAuth2.0 based authentication. Also, it
+supports only `XOAUTH2` as the mechanism.
+
+---------
+[imap]
+ folder = "Drafts"
+ host = imaps://outlook.office365.com
+ user = user@outlook.com
+ port = 993
+ authmethod = XOAUTH2
+---------
+
Once the commits are ready to be sent, run the following command:
$ git format-patch --cover-letter -M --stdout origin/master | git imap-send
@@ -124,6 +170,10 @@ Just make sure to disable line wrapping in the email client (Gmail's web
interface will wrap lines no matter what, so you need to use a real
IMAP client).
+In case you are using OAuth2.0 authentication, it is easier to use credential
+helpers to generate tokens. Credential helpers suggested in
+linkgit:git-send-email[1] can be used for `git imap-send` as well.
+
CAUTION
-------
It is still your responsibility to make sure that the email message
diff --git a/Documentation/git-log.adoc b/Documentation/git-log.adoc
index ae8a7e2d63..b6f3d92c43 100644
--- a/Documentation/git-log.adoc
+++ b/Documentation/git-log.adoc
@@ -8,8 +8,8 @@ git-log - Show commit logs
SYNOPSIS
--------
-[verse]
-'git log' [<options>] [<revision-range>] [[--] <path>...]
+[synopsis]
+git log [<options>] [<revision-range>] [[--] <path>...]
DESCRIPTION
-----------
@@ -27,28 +27,34 @@ each commit introduces are shown.
OPTIONS
-------
---follow::
+`--follow`::
Continue listing the history of a file beyond renames
(works only for a single file).
---no-decorate::
---decorate[=short|full|auto|no]::
- Print out the ref names of any commits that are shown. If 'short' is
- specified, the ref name prefixes 'refs/heads/', 'refs/tags/' and
- 'refs/remotes/' will not be printed. If 'full' is specified, the
- full ref name (including prefix) will be printed. If 'auto' is
- specified, then if the output is going to a terminal, the ref names
- are shown as if 'short' were given, otherwise no ref names are
- shown. The option `--decorate` is short-hand for `--decorate=short`.
- Default to configuration value of `log.decorate` if configured,
- otherwise, `auto`.
-
---decorate-refs=<pattern>::
---decorate-refs-exclude=<pattern>::
+`--no-decorate`::
+`--decorate[=(short|full|auto|no)]`::
+ Print out the ref names of any commits that are shown. Possible values
+ are:
++
+----
+`short`;; the ref name prefixes `refs/heads/`, `refs/tags/` and
+ `refs/remotes/` are not printed.
+`full`;; the full ref name (including prefix) is printed.
+`auto`:: if the output is going to a terminal, the ref names
+ are shown as if `short` were given, otherwise no ref names are
+ shown.
+----
++
+The option `--decorate` is short-hand for `--decorate=short`. Default to
+configuration value of `log.decorate` if configured, otherwise, `auto`.
+
+`--decorate-refs=<pattern>`::
+`--decorate-refs-exclude=<pattern>`::
For each candidate reference, do not use it for decoration if it
- matches any patterns given to `--decorate-refs-exclude` or if it
- doesn't match any of the patterns given to `--decorate-refs`. The
- `log.excludeDecoration` config option allows excluding refs from
+ matches any of the _<pattern>_ parameters given to
+ `--decorate-refs-exclude` or if it doesn't match any of the
+ _<pattern>_ parameters given to `--decorate-refs`.
+ The `log.excludeDecoration` config option allows excluding refs from
the decorations, but an explicit `--decorate-refs` pattern will
override a match in `log.excludeDecoration`.
+
@@ -56,51 +62,51 @@ If none of these options or config settings are given, then references are
used as decoration if they match `HEAD`, `refs/heads/`, `refs/remotes/`,
`refs/stash/`, or `refs/tags/`.
---clear-decorations::
+`--clear-decorations`::
When specified, this option clears all previous `--decorate-refs`
or `--decorate-refs-exclude` options and relaxes the default
decoration filter to include all references. This option is
assumed if the config value `log.initialDecorationSet` is set to
`all`.
---source::
+`--source`::
Print out the ref name given on the command line by which each
commit was reached.
---[no-]mailmap::
---[no-]use-mailmap::
+`--[no-]mailmap`::
+`--[no-]use-mailmap`::
Use mailmap file to map author and committer names and email
addresses to canonical real names and email addresses. See
linkgit:git-shortlog[1].
---full-diff::
+`--full-diff`::
Without this flag, `git log -p <path>...` shows commits that
touch the specified paths, and diffs about the same specified
paths. With this, the full diff is shown for commits that touch
- the specified paths; this means that "<path>..." limits only
+ the specified paths; this means that "`<path>...`" limits only
commits, and doesn't limit diff for those commits.
+
Note that this affects all diff-based output types, e.g. those
produced by `--stat`, etc.
---log-size::
- Include a line ``log size <number>'' in the output for each commit,
- where <number> is the length of that commit's message in bytes.
+`--log-size`::
+ Include a line `log size <number>` in the output for each commit,
+ where _<number>_ is the length of that commit's message in bytes.
Intended to speed up tools that read log messages from `git log`
output by allowing them to allocate space in advance.
include::line-range-options.adoc[]
-<revision-range>::
+_<revision-range>_::
Show only commits in the specified revision range. When no
- <revision-range> is specified, it defaults to `HEAD` (i.e. the
+ _<revision-range>_ is specified, it defaults to `HEAD` (i.e. the
whole history leading to the current commit). `origin..HEAD`
specifies all the commits reachable from the current commit
(i.e. `HEAD`), but not from `origin`. For a complete list of
- ways to spell <revision-range>, see the 'Specifying Ranges'
+ ways to spell _<revision-range>_, see the 'Specifying Ranges'
section of linkgit:gitrevisions[7].
-[--] <path>...::
+`[--] <path>...`::
Show only commits that are enough to explain how the files
that match the specified paths came to be. See 'History
Simplification' below for details and other simplification
@@ -145,14 +151,14 @@ EXAMPLES
`git log --since="2 weeks ago" -- gitk`::
- Show the changes during the last two weeks to the file 'gitk'.
+ Show the changes during the last two weeks to the file `gitk`.
The `--` is necessary to avoid confusion with the *branch* named
- 'gitk'
+ `gitk`
`git log --name-status release..test`::
- Show the commits that are in the "test" branch but not yet
- in the "release" branch, along with the list of paths
+ Show the commits that are in the "`test`" branch but not yet
+ in the "`release`" branch, along with the list of paths
each commit modifies.
`git log --follow builtin/rev-list.c`::
@@ -164,7 +170,7 @@ EXAMPLES
`git log --branches --not --remotes=origin`::
Shows all commits that are in any of local branches but not in
- any of remote-tracking branches for 'origin' (what you have that
+ any of remote-tracking branches for `origin` (what you have that
origin doesn't).
`git log master --not --remotes=*/master`::
@@ -200,11 +206,11 @@ CONFIGURATION
See linkgit:git-config[1] for core variables and linkgit:git-diff[1]
for settings related to diff generation.
-format.pretty::
+`format.pretty`::
Default for the `--format` option. (See 'Pretty Formats' above.)
Defaults to `medium`.
-i18n.logOutputEncoding::
+`i18n.logOutputEncoding`::
Encoding to use when displaying logs. (See 'Discussion' above.)
Defaults to the value of `i18n.commitEncoding` if set, and UTF-8
otherwise.
diff --git a/Documentation/git-merge.adoc b/Documentation/git-merge.adoc
index 12aa859d16..a055384ad6 100644
--- a/Documentation/git-merge.adoc
+++ b/Documentation/git-merge.adoc
@@ -9,7 +9,7 @@ git-merge - Join two or more development histories together
SYNOPSIS
--------
[synopsis]
-git merge [-n] [--stat] [--no-commit] [--squash] [--[no-]edit]
+git merge [-n] [--stat] [--compact-summary] [--no-commit] [--squash] [--[no-]edit]
[--no-verify] [-s <strategy>] [-X <strategy-option>] [-S[<keyid>]]
[--[no-]allow-unrelated-histories]
[--[no-]rerere-autoupdate] [-m <msg>] [-F <file>]
@@ -28,8 +28,8 @@ Assume the following history exists and the current branch is
`master`:
------------
- A---B---C topic
- /
+ A---B---C topic
+ /
D---E---F---G master
------------
@@ -38,11 +38,11 @@ Then `git merge topic` will replay the changes made on the
its current commit (`C`) on top of `master`, and record the result
in a new commit along with the names of the two parent commits and
a log message from the user describing the changes. Before the operation,
-`ORIG_HEAD` is set to the tip of the current branch (`C`).
+`ORIG_HEAD` is set to the tip of the current branch (`G`).
------------
- A---B---C topic
- / \
+ A---B---C topic
+ / \
D---E---F---G---H master
------------
diff --git a/Documentation/git-pack-objects.adoc b/Documentation/git-pack-objects.adoc
index 7f69ae4855..eba014c406 100644
--- a/Documentation/git-pack-objects.adoc
+++ b/Documentation/git-pack-objects.adoc
@@ -10,13 +10,13 @@ SYNOPSIS
--------
[verse]
'git pack-objects' [-q | --progress | --all-progress] [--all-progress-implied]
- [--no-reuse-delta] [--delta-base-offset] [--non-empty]
- [--local] [--incremental] [--window=<n>] [--depth=<n>]
- [--revs [--unpacked | --all]] [--keep-pack=<pack-name>]
- [--cruft] [--cruft-expiration=<time>]
- [--stdout [--filter=<filter-spec>] | <base-name>]
- [--shallow] [--keep-true-parents] [--[no-]sparse]
- [--name-hash-version=<n>] < <object-list>
+ [--no-reuse-delta] [--delta-base-offset] [--non-empty]
+ [--local] [--incremental] [--window=<n>] [--depth=<n>]
+ [--revs [--unpacked | --all]] [--keep-pack=<pack-name>]
+ [--cruft] [--cruft-expiration=<time>]
+ [--stdout [--filter=<filter-spec>] | <base-name>]
+ [--shallow] [--keep-true-parents] [--[no-]sparse]
+ [--name-hash-version=<n>] [--path-walk] < <object-list>
DESCRIPTION
@@ -87,13 +87,21 @@ base-name::
reference was included in the resulting packfile. This
can be useful to send new tags to native Git clients.
---stdin-packs::
+--stdin-packs[=<mode>]::
Read the basenames of packfiles (e.g., `pack-1234abcd.pack`)
from the standard input, instead of object names or revision
arguments. The resulting pack contains all objects listed in the
included packs (those not beginning with `^`), excluding any
objects listed in the excluded packs (beginning with `^`).
+
+When `mode` is "follow", objects from packs not listed on stdin receive
+special treatment. Objects within unlisted packs will be included if
+those objects are (1) reachable from the included packs, and (2) not
+found in any excluded packs. This mode is useful, for example, to
+resurrect once-unreachable objects found in cruft packs to generate
+packs which are closed under reachability up to the boundary set by the
+excluded packs.
++
Incompatible with `--revs`, or options that imply `--revs` (such as
`--all`), with the exception of `--unpacked`, which is compatible.
@@ -375,6 +383,17 @@ many different directories. At the moment, this version is not allowed
when writing reachability bitmap files with `--write-bitmap-index` and it
will be automatically changed to version `1`.
+--path-walk::
+ Perform compression by first organizing objects by path, then a
+ second pass that compresses across paths as normal. This has the
+ potential to improve delta compression especially in the presence
+ of filenames that cause collisions in Git's default name-hash
+ algorithm.
++
+Incompatible with `--delta-islands`, `--shallow`, or `--filter`. The
+`--use-bitmap-index` option will be ignored in the presence of
+`--path-walk.`
+
DELTA ISLANDS
-------------
diff --git a/Documentation/git-pack-refs.adoc b/Documentation/git-pack-refs.adoc
index 652c549771..42b90051e6 100644
--- a/Documentation/git-pack-refs.adoc
+++ b/Documentation/git-pack-refs.adoc
@@ -66,7 +66,10 @@ Pack refs as needed depending on the current state of the ref database. The
behavior depends on the ref format used by the repository and may change in the
future.
+
- - "files": No special handling for `--auto` has been implemented.
+ - "files": Loose references are packed into the `packed-refs` file
+ based on the ratio of loose references to the size of the
+ `packed-refs` file. The bigger the `packed-refs` file, the more loose
+ references need to exist before we repack.
+
- "reftable": Tables are compacted such that they form a geometric
sequence. For two tables N and N+1, where N+1 is newer, this
diff --git a/Documentation/git-repack.adoc b/Documentation/git-repack.adoc
index e1cd75eebe..d12c4985f6 100644
--- a/Documentation/git-repack.adoc
+++ b/Documentation/git-repack.adoc
@@ -11,7 +11,7 @@ SYNOPSIS
[verse]
'git repack' [-a] [-A] [-d] [-f] [-F] [-l] [-n] [-q] [-b] [-m]
[--window=<n>] [--depth=<n>] [--threads=<n>] [--keep-pack=<pack-name>]
- [--write-midx] [--name-hash-version=<n>]
+ [--write-midx] [--name-hash-version=<n>] [--path-walk]
DESCRIPTION
-----------
@@ -258,6 +258,9 @@ linkgit:git-multi-pack-index[1]).
Provide this argument to the underlying `git pack-objects` process.
See linkgit:git-pack-objects[1] for full details.
+--path-walk::
+ Pass the `--path-walk` option to the underlying `git pack-objects`
+ process. See linkgit:git-pack-objects[1] for full details.
CONFIGURATION
-------------
diff --git a/Documentation/git-send-email.adoc b/Documentation/git-send-email.adoc
index 26fda63c2f..5335502d68 100644
--- a/Documentation/git-send-email.adoc
+++ b/Documentation/git-send-email.adoc
@@ -21,7 +21,7 @@ Takes the patches given on the command line and emails them out.
Patches can be specified as files, directories (which will send all
files in the directory), or directly as a revision list. In the
last case, any format accepted by linkgit:git-format-patch[1] can
-be passed to git send-email, as well as options understood by
+be passed to `git send-email`, as well as options understood by
linkgit:git-format-patch[1].
The header of the email is configurable via command-line options. If not
@@ -35,11 +35,11 @@ There are two formats accepted for patch files:
This is what linkgit:git-format-patch[1] generates. Most headers and MIME
formatting are ignored.
-2. The original format used by Greg Kroah-Hartman's 'send_lots_of_email.pl'
+2. The original format used by Greg Kroah-Hartman's `send_lots_of_email.pl`
script
+
-This format expects the first line of the file to contain the "Cc:" value
-and the "Subject:" of the message as the second line.
+This format expects the first line of the file to contain the `Cc:` value
+and the `Subject:` of the message as the second line.
OPTIONS
@@ -54,13 +54,13 @@ Composing
`sendemail.multiEdit`.
--bcc=<address>,...::
- Specify a "Bcc:" value for each email. Default is the value of
+ Specify a `Bcc:` value for each email. Default is the value of
`sendemail.bcc`.
+
This option may be specified multiple times.
--cc=<address>,...::
- Specify a starting "Cc:" value for each email.
+ Specify a starting `Cc:` value for each email.
Default is the value of `sendemail.cc`.
+
This option may be specified multiple times.
@@ -69,14 +69,14 @@ This option may be specified multiple times.
Invoke a text editor (see GIT_EDITOR in linkgit:git-var[1])
to edit an introductory message for the patch series.
+
-When `--compose` is used, git send-email will use the From, To, Cc, Bcc,
-Subject, Reply-To, and In-Reply-To headers specified in the message. If
-the body of the message (what you type after the headers and a blank
-line) only contains blank (or Git: prefixed) lines, the summary won't be
+When `--compose` is used, `git send-email` will use the `From`, `To`, `Cc`,
+`Bcc`, `Subject`, `Reply-To`, and `In-Reply-To` headers specified in the
+message. If the body of the message (what you type after the headers and a
+blank line) only contains blank (or `Git:` prefixed) lines, the summary won't be
sent, but the headers mentioned above will be used unless they are
removed.
+
-Missing From or In-Reply-To headers will be prompted for.
+Missing `From` or `In-Reply-To` headers will be prompted for.
+
See the CONFIGURATION section for `sendemail.multiEdit`.
@@ -85,13 +85,13 @@ See the CONFIGURATION section for `sendemail.multiEdit`.
the value of the `sendemail.from` configuration option is used. If
neither the command-line option nor `sendemail.from` are set, then the
user will be prompted for the value. The default for the prompt will be
- the value of GIT_AUTHOR_IDENT, or GIT_COMMITTER_IDENT if that is not
- set, as returned by "git var -l".
+ the value of `GIT_AUTHOR_IDENT`, or `GIT_COMMITTER_IDENT` if that is not
+ set, as returned by `git var -l`.
--reply-to=<address>::
Specify the address where replies from recipients should go to.
Use this if replies to messages should go to another address than what
- is specified with the --from parameter.
+ is specified with the `--from` parameter.
--in-reply-to=<identifier>::
Make the first mail (or all the mails with `--no-thread`) appear as a
@@ -112,14 +112,14 @@ illustration below where `[PATCH v2 0/3]` is in reply to `[PATCH 0/2]`:
[PATCH v2 2/3] New tests
[PATCH v2 3/3] Implementation
+
-Only necessary if --compose is also set. If --compose
+Only necessary if `--compose` is also set. If `--compose`
is not set, this will be prompted for.
--[no-]outlook-id-fix::
Microsoft Outlook SMTP servers discard the Message-ID sent via email and
assign a new random Message-ID, thus breaking threads.
+
-With `--outlook-id-fix`, 'git send-email' uses a mechanism specific to
+With `--outlook-id-fix`, `git send-email` uses a mechanism specific to
Outlook servers to learn the Message-ID the server assigned to fix the
threading. Use it only when you know that the server reports the
rewritten Message-ID the same way as Outlook servers do.
@@ -130,14 +130,14 @@ to 'smtp.office365.com' or 'smtp-mail.outlook.com'. Use
--subject=<string>::
Specify the initial subject of the email thread.
- Only necessary if --compose is also set. If --compose
+ Only necessary if `--compose` is also set. If `--compose`
is not set, this will be prompted for.
--to=<address>,...::
Specify the primary recipient of the emails generated. Generally, this
will be the upstream maintainer of the project involved. Default is the
value of the `sendemail.to` configuration value; if that is unspecified,
- and --to-cmd is not specified, this will be prompted for.
+ and `--to-cmd` is not specified, this will be prompted for.
+
This option may be specified multiple times.
@@ -145,30 +145,30 @@ This option may be specified multiple times.
When encountering a non-ASCII message or subject that does not
declare its encoding, add headers/quoting to indicate it is
encoded in <encoding>. Default is the value of the
- 'sendemail.assume8bitEncoding'; if that is unspecified, this
+ `sendemail.assume8bitEncoding`; if that is unspecified, this
will be prompted for if any non-ASCII files are encountered.
+
Note that no attempts whatsoever are made to validate the encoding.
--compose-encoding=<encoding>::
Specify encoding of compose message. Default is the value of the
- 'sendemail.composeEncoding'; if that is unspecified, UTF-8 is assumed.
+ `sendemail.composeEncoding`; if that is unspecified, UTF-8 is assumed.
--transfer-encoding=(7bit|8bit|quoted-printable|base64|auto)::
Specify the transfer encoding to be used to send the message over SMTP.
- 7bit will fail upon encountering a non-ASCII message. quoted-printable
+ `7bit` will fail upon encountering a non-ASCII message. `quoted-printable`
can be useful when the repository contains files that contain carriage
- returns, but makes the raw patch email file (as saved from a MUA) much
- harder to inspect manually. base64 is even more fool proof, but also
- even more opaque. auto will use 8bit when possible, and quoted-printable
- otherwise.
+ returns, but makes the raw patch email file (as saved from an MUA) much
+ harder to inspect manually. `base64` is even more fool proof, but also
+ even more opaque. `auto` will use `8bit` when possible, and
+ `quoted-printable` otherwise.
+
Default is the value of the `sendemail.transferEncoding` configuration
value; if that is unspecified, default to `auto`.
--xmailer::
--no-xmailer::
- Add (or prevent adding) the "X-Mailer:" header. By default,
+ Add (or prevent adding) the `X-Mailer:` header. By default,
the header is added, but it can be turned off by setting the
`sendemail.xmailer` configuration variable to `false`.
@@ -178,9 +178,9 @@ Sending
--envelope-sender=<address>::
Specify the envelope sender used to send the emails.
This is useful if your default address is not the address that is
- subscribed to a list. In order to use the 'From' address, set the
- value to "auto". If you use the sendmail binary, you must have
- suitable privileges for the -f parameter. Default is the value of the
+ subscribed to a list. In order to use the `From` address, set the
+ value to `auto`. If you use the `sendmail` binary, you must have
+ suitable privileges for the `-f` parameter. Default is the value of the
`sendemail.envelopeSender` configuration variable; if that is
unspecified, choosing the envelope sender is left to your MTA.
@@ -189,27 +189,27 @@ Sending
be sendmail-like; specifically, it must support the `-i` option.
The command will be executed in the shell if necessary. Default
is the value of `sendemail.sendmailCmd`. If unspecified, and if
- --smtp-server is also unspecified, git-send-email will search
- for `sendmail` in `/usr/sbin`, `/usr/lib` and $PATH.
+ `--smtp-server` is also unspecified, `git send-email` will search
+ for `sendmail` in `/usr/sbin`, `/usr/lib` and `$PATH`.
--smtp-encryption=<encryption>::
Specify in what way encrypting begins for the SMTP connection.
- Valid values are 'ssl' and 'tls'. Any other value reverts to plain
+ Valid values are `ssl` and `tls`. Any other value reverts to plain
(unencrypted) SMTP, which defaults to port 25.
Despite the names, both values will use the same newer version of TLS,
- but for historic reasons have these names. 'ssl' refers to "implicit"
+ but for historic reasons have these names. `ssl` refers to "implicit"
encryption (sometimes called SMTPS), that uses port 465 by default.
- 'tls' refers to "explicit" encryption (often known as STARTTLS),
+ `tls` refers to "explicit" encryption (often known as STARTTLS),
that uses port 25 by default. Other ports might be used by the SMTP
server, which are not the default. Commonly found alternative port for
- 'tls' and unencrypted is 587. You need to check your provider's
+ `tls` and unencrypted is 587. You need to check your provider's
documentation or your server configuration to make sure
for your own case. Default is the value of `sendemail.smtpEncryption`.
--smtp-domain=<FQDN>::
Specifies the Fully Qualified Domain Name (FQDN) used in the
HELO/EHLO command to the SMTP server. Some servers require the
- FQDN to match your IP address. If not set, git send-email attempts
+ FQDN to match your IP address. If not set, `git send-email` attempts
to determine your FQDN automatically. Default is the value of
`sendemail.smtpDomain`.
@@ -223,10 +223,10 @@ $ git send-email --smtp-auth="PLAIN LOGIN GSSAPI" ...
+
If at least one of the specified mechanisms matches the ones advertised by the
SMTP server and if it is supported by the utilized SASL library, the mechanism
-is used for authentication. If neither 'sendemail.smtpAuth' nor `--smtp-auth`
+is used for authentication. If neither `sendemail.smtpAuth` nor `--smtp-auth`
is specified, all mechanisms supported by the SASL library can be used. The
-special value 'none' maybe specified to completely disable authentication
-independently of `--smtp-user`
+special value `none` maybe specified to completely disable authentication
+independently of `--smtp-user`.
--smtp-pass[=<password>]::
Password for SMTP-AUTH. The argument is optional: If no
@@ -238,16 +238,16 @@ Furthermore, passwords need not be specified in configuration files
or on the command line. If a username has been specified (with
`--smtp-user` or a `sendemail.smtpUser`), but no password has been
specified (with `--smtp-pass` or `sendemail.smtpPass`), then
-a password is obtained using 'git-credential'.
+a password is obtained using linkgit:git-credential[1].
--no-smtp-auth::
- Disable SMTP authentication. Short hand for `--smtp-auth=none`
+ Disable SMTP authentication. Short hand for `--smtp-auth=none`.
--smtp-server=<host>::
If set, specifies the outgoing SMTP server to use (e.g.
`smtp.example.com` or a raw IP address). If unspecified, and if
`--sendmail-cmd` is also unspecified, the default is to search
- for `sendmail` in `/usr/sbin`, `/usr/lib` and $PATH if such a
+ for `sendmail` in `/usr/sbin`, `/usr/lib` and `$PATH` if such a
program is available, falling back to `localhost` otherwise.
+
For backward compatibility, this option can also specify a full pathname
@@ -260,7 +260,7 @@ instead.
Specifies a port different from the default port (SMTP
servers typically listen to smtp port 25, but may also listen to
submission port 587, or the common SSL smtp port 465);
- symbolic port names (e.g. "submission" instead of 587)
+ symbolic port names (e.g. `submission` instead of 587)
are also accepted. The port can also be set with the
`sendemail.smtpServerPort` configuration variable.
@@ -269,23 +269,25 @@ instead.
Default value can be specified by the `sendemail.smtpServerOption`
configuration option.
+
-The --smtp-server-option option must be repeated for each option you want
+The `--smtp-server-option` option must be repeated for each option you want
to pass to the server. Likewise, different lines in the configuration files
must be used for each option.
--smtp-ssl::
- Legacy alias for '--smtp-encryption ssl'.
+ Legacy alias for `--smtp-encryption ssl`.
--smtp-ssl-cert-path::
Path to a store of trusted CA certificates for SMTP SSL/TLS
certificate validation (either a directory that has been processed
- by 'c_rehash', or a single file containing one or more PEM format
- certificates concatenated together: see verify(1) -CAfile and
- -CApath for more information on these). Set it to an empty string
- to disable certificate verification. Defaults to the value of the
- `sendemail.smtpSSLCertPath` configuration variable, if set, or the
- backing SSL library's compiled-in default otherwise (which should
- be the best choice on most platforms).
+ by `c_rehash`, or a single file containing one or more PEM format
+ certificates concatenated together: see the description of the
+ `-CAfile` _<file>_ and the `-CApath` _<dir>_ options of
+ https://docs.openssl.org/master/man1/openssl-verify/
+ [OpenSSL's verify(1) manual page] for more information on these).
+ Set it to an empty string to disable certificate verification.
+ Defaults to the value of the `sendemail.smtpSSLCertPath` configuration
+ variable, if set, or the backing SSL library's compiled-in default
+ otherwise (which should be the best choice on most platforms).
--smtp-user=<user>::
Username for SMTP-AUTH. Default is the value of `sendemail.smtpUser`;
@@ -298,18 +300,18 @@ must be used for each option.
connection and authentication problems.
--batch-size=<num>::
- Some email servers (e.g. smtp.163.com) limit the number emails to be
+ Some email servers (e.g. 'smtp.163.com') limit the number of emails to be
sent per session (connection) and this will lead to a failure when
sending many messages. With this option, send-email will disconnect after
- sending $<num> messages and wait for a few seconds (see --relogin-delay)
- and reconnect, to work around such a limit. You may want to
- use some form of credential helper to avoid having to retype
- your password every time this happens. Defaults to the
+ sending _<num>_ messages and wait for a few seconds
+ (see `--relogin-delay`) and reconnect, to work around such a limit.
+ You may want to use some form of credential helper to avoid having to
+ retype your password every time this happens. Defaults to the
`sendemail.smtpBatchSize` configuration variable.
--relogin-delay=<int>::
- Waiting $<int> seconds before reconnecting to SMTP server. Used together
- with --batch-size option. Defaults to the `sendemail.smtpReloginDelay`
+ Waiting _<int>_ seconds before reconnecting to SMTP server. Used together
+ with `--batch-size` option. Defaults to the `sendemail.smtpReloginDelay`
configuration variable.
Automating
@@ -318,7 +320,7 @@ Automating
--no-to::
--no-cc::
--no-bcc::
- Clears any list of "To:", "Cc:", "Bcc:" addresses previously
+ Clears any list of `To:`, `Cc:`, `Bcc:` addresses previously
set via config.
--no-identity::
@@ -327,13 +329,13 @@ Automating
--to-cmd=<command>::
Specify a command to execute once per patch file which
- should generate patch file specific "To:" entries.
+ should generate patch file specific `To:` entries.
Output of this command must be single email address per line.
- Default is the value of 'sendemail.toCmd' configuration value.
+ Default is the value of `sendemail.toCmd` configuration value.
--cc-cmd=<command>::
Specify a command to execute once per patch file which
- should generate patch file specific "Cc:" entries.
+ should generate patch file specific `Cc:` entries.
Output of this command must be single email address per line.
Default is the value of `sendemail.ccCmd` configuration value.
@@ -341,7 +343,7 @@ Automating
Specify a command that is executed once per outgoing message
and output RFC 2822 style header lines to be inserted into
them. When the `sendemail.headerCmd` configuration variable is
- set, its value is always used. When --header-cmd is provided
+ set, its value is always used. When `--header-cmd` is provided
at the command line, its value takes precedence over the
`sendemail.headerCmd` configuration variable.
@@ -350,7 +352,7 @@ Automating
--[no-]chain-reply-to::
If this is set, each email will be sent as a reply to the previous
- email sent. If disabled with "--no-chain-reply-to", all emails after
+ email sent. If disabled with `--no-chain-reply-to`, all emails after
the first will be sent as replies to the first email sent. When using
this, it is recommended that the first file given be an overview of the
entire patch series. Disabled by default, but the `sendemail.chainReplyTo`
@@ -358,79 +360,80 @@ Automating
--identity=<identity>::
A configuration identity. When given, causes values in the
- 'sendemail.<identity>' subsection to take precedence over
- values in the 'sendemail' section. The default identity is
+ `sendemail.<identity>` subsection to take precedence over
+ values in the `sendemail` section. The default identity is
the value of `sendemail.identity`.
--[no-]signed-off-by-cc::
- If this is set, add emails found in the `Signed-off-by` trailer or Cc: lines to the
- cc list. Default is the value of `sendemail.signedOffByCc` configuration
- value; if that is unspecified, default to --signed-off-by-cc.
+ If this is set, add emails found in the `Signed-off-by` trailer or `Cc:`
+ lines to the cc list. Default is the value of `sendemail.signedOffByCc`
+ configuration value; if that is unspecified, default to
+ `--signed-off-by-cc`.
--[no-]cc-cover::
- If this is set, emails found in Cc: headers in the first patch of
+ If this is set, emails found in `Cc:` headers in the first patch of
the series (typically the cover letter) are added to the cc list
- for each email set. Default is the value of 'sendemail.ccCover'
- configuration value; if that is unspecified, default to --no-cc-cover.
+ for each email set. Default is the value of `sendemail.ccCover`
+ configuration value; if that is unspecified, default to `--no-cc-cover`.
--[no-]to-cover::
- If this is set, emails found in To: headers in the first patch of
+ If this is set, emails found in `To:` headers in the first patch of
the series (typically the cover letter) are added to the to list
- for each email set. Default is the value of 'sendemail.toCover'
- configuration value; if that is unspecified, default to --no-to-cover.
+ for each email set. Default is the value of `sendemail.toCover`
+ configuration value; if that is unspecified, default to `--no-to-cover`.
--suppress-cc=<category>::
Specify an additional category of recipients to suppress the
auto-cc of:
+
--
-- 'author' will avoid including the patch author.
-- 'self' will avoid including the sender.
-- 'cc' will avoid including anyone mentioned in Cc lines in the patch header
- except for self (use 'self' for that).
-- 'bodycc' will avoid including anyone mentioned in Cc lines in the
- patch body (commit message) except for self (use 'self' for that).
-- 'sob' will avoid including anyone mentioned in the Signed-off-by trailers except
- for self (use 'self' for that).
-- 'misc-by' will avoid including anyone mentioned in Acked-by,
+- `author` will avoid including the patch author.
+- `self` will avoid including the sender.
+- `cc` will avoid including anyone mentioned in Cc lines in the patch header
+ except for self (use `self` for that).
+- `bodycc` will avoid including anyone mentioned in Cc lines in the
+ patch body (commit message) except for self (use `self` for that).
+- `sob` will avoid including anyone mentioned in the Signed-off-by trailers except
+ for self (use `self` for that).
+- `misc-by` will avoid including anyone mentioned in Acked-by,
Reviewed-by, Tested-by and other "-by" lines in the patch body,
- except Signed-off-by (use 'sob' for that).
-- 'cccmd' will avoid running the --cc-cmd.
-- 'body' is equivalent to 'sob' + 'bodycc' + 'misc-by'.
-- 'all' will suppress all auto cc values.
+ except Signed-off-by (use `sob` for that).
+- `cccmd` will avoid running the --cc-cmd.
+- `body` is equivalent to `sob` + `bodycc` + `misc-by`.
+- `all` will suppress all auto cc values.
--
+
Default is the value of `sendemail.suppressCc` configuration value; if
-that is unspecified, default to 'self' if --suppress-from is
-specified, as well as 'body' if --no-signed-off-cc is specified.
+that is unspecified, default to `self` if `--suppress-from` is
+specified, as well as `body` if `--no-signed-off-cc` is specified.
--[no-]suppress-from::
- If this is set, do not add the From: address to the cc: list.
+ If this is set, do not add the `From:` address to the `Cc:` list.
Default is the value of `sendemail.suppressFrom` configuration
- value; if that is unspecified, default to --no-suppress-from.
+ value; if that is unspecified, default to `--no-suppress-from`.
--[no-]thread::
- If this is set, the In-Reply-To and References headers will be
+ If this is set, the `In-Reply-To` and `References` headers will be
added to each email sent. Whether each mail refers to the
- previous email (`deep` threading per 'git format-patch'
+ previous email (`deep` threading per `git format-patch`
wording) or to the first email (`shallow` threading) is
- governed by "--[no-]chain-reply-to".
+ governed by `--[no-]chain-reply-to`.
+
-If disabled with "--no-thread", those headers will not be added
-(unless specified with --in-reply-to). Default is the value of the
+If disabled with `--no-thread`, those headers will not be added
+(unless specified with `--in-reply-to`). Default is the value of the
`sendemail.thread` configuration value; if that is unspecified,
-default to --thread.
+default to `--thread`.
+
It is up to the user to ensure that no In-Reply-To header already
-exists when 'git send-email' is asked to add it (especially note that
-'git format-patch' can be configured to do the threading itself).
+exists when `git send-email` is asked to add it (especially note that
+`git format-patch` can be configured to do the threading itself).
Failure to do so may not produce the expected result in the
recipient's MUA.
--[no-]mailmap::
Use the mailmap file (see linkgit:gitmailmap[5]) to map all
addresses to their canonical real name and email address. Additional
- mailmap data specific to git-send-email may be provided using the
+ mailmap data specific to `git send-email` may be provided using the
`sendemail.mailmap.file` or `sendemail.mailmap.blob` configuration
values. Defaults to `sendemail.mailmap`.
@@ -441,17 +444,17 @@ Administering
Confirm just before sending:
+
--
-- 'always' will always confirm before sending
-- 'never' will never confirm before sending
-- 'cc' will confirm before sending when send-email has automatically
- added addresses from the patch to the Cc list
-- 'compose' will confirm before sending the first message when using --compose.
-- 'auto' is equivalent to 'cc' + 'compose'
+- `always` will always confirm before sending.
+- `never` will never confirm before sending.
+- `cc` will confirm before sending when send-email has automatically
+ added addresses from the patch to the Cc list.
+- `compose` will confirm before sending the first message when using --compose.
+- `auto` is equivalent to `cc` + `compose`.
--
+
Default is the value of `sendemail.confirm` configuration value; if that
-is unspecified, default to 'auto' unless any of the suppress options
-have been specified, in which case default to 'compose'.
+is unspecified, default to `auto` unless any of the suppress options
+have been specified, in which case default to `compose`.
--dry-run::
Do everything except actually send the emails.
@@ -460,10 +463,10 @@ have been specified, in which case default to 'compose'.
When an argument may be understood either as a reference or as a file name,
choose to understand it as a format-patch argument (`--format-patch`)
or as a file name (`--no-format-patch`). By default, when such a conflict
- occurs, git send-email will fail.
+ occurs, `git send-email` will fail.
--quiet::
- Make git-send-email less verbose. One line per email should be
+ Make `git send-email` less verbose. One line per email should be
all that is output.
--[no-]validate::
@@ -474,7 +477,7 @@ have been specified, in which case default to 'compose'.
* Invoke the sendemail-validate hook if present (see linkgit:githooks[5]).
* Warn of patches that contain lines longer than
998 characters unless a suitable transfer encoding
- ('auto', 'base64', or 'quoted-printable') is used;
+ (`auto`, `base64`, or `quoted-printable`) is used;
this is due to SMTP limits as described by
https://www.ietf.org/rfc/rfc5322.txt.
--
@@ -493,13 +496,13 @@ Information
Instead of the normal operation, dump the shorthand alias names from
the configured alias file(s), one per line in alphabetical order. Note
that this only includes the alias name and not its expanded email addresses.
- See 'sendemail.aliasesFile' for more information about aliases.
+ See `sendemail.aliasesFile` for more information about aliases.
--translate-aliases::
Instead of the normal operation, read from standard input and
interpret each line as an email alias. Translate it according to the
configured alias file(s). Output each translated name and email
- address to standard output, one per line. See 'sendemail.aliasFile'
+ address to standard output, one per line. See `sendemail.aliasFile`
for more information about aliases.
CONFIGURATION
@@ -524,15 +527,18 @@ edit `~/.gitconfig` to specify your account settings:
smtpServerPort = 587
----
+Gmail does not allow using your regular password for `git send-email`.
If you have multi-factor authentication set up on your Gmail account, you can
-generate an app-specific password for use with 'git send-email'. Visit
+generate an app-specific password for use with `git send-email`. Visit
https://security.google.com/settings/security/apppasswords to create it.
-You can also use OAuth2.0 authentication with Gmail. `OAUTHBEARER` and
-`XOAUTH2` are common methods used for this type of authentication. Gmail
-supports both of them. As an example, if you want to use `OAUTHBEARER`, edit
-your `~/.gitconfig` file and add `smtpAuth = OAUTHBEARER` to your account
-settings:
+Alternatively, instead of using an app-specific password, you can use
+OAuth2.0 authentication with Gmail. OAuth2.0 is more secure than
+app-specific passwords, and works regardless of whether you have multi-factor
+authentication set up. `OAUTHBEARER` and `XOAUTH2` are common mechanisms used
+for this type of authentication. Gmail supports both of them. As an example,
+if you want to use `OAUTHBEARER`, edit your `~/.gitconfig` file and add
+`smtpAuth = OAUTHBEARER` to your account settings:
----
[sendemail]
@@ -543,11 +549,15 @@ settings:
smtpAuth = OAUTHBEARER
----
+Another alternative is using a tool developed by Google known as
+https://github.com/google/gmail-oauth2-tools/tree/master/go/sendgmail[sendgmail]
+to send emails using `git send-email`.
+
Use Microsoft Outlook as the SMTP Server
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Unlike Gmail, Microsoft Outlook no longer supports app-specific passwords.
Therefore, OAuth2.0 authentication must be used for Outlook. Also, it only
-supports `XOAUTH2` authentication method.
+supports `XOAUTH2` authentication mechanism.
Edit `~/.gitconfig` to specify your account settings for Outlook and use its
SMTP server with `git send-email`:
@@ -579,8 +589,7 @@ next time.
If you are using OAuth2.0 authentication, you need to use an access token in
place of a password when prompted. Various OAuth2.0 token generators are
-available online. Community maintained credential helpers for Gmail and Outlook
-are also available:
+available online. Community maintained credential helpers are also available:
- https://github.com/AdityaGarg8/git-credential-email[git-credential-gmail]
(cross platform, dedicated helper for authenticating Gmail accounts)
@@ -588,15 +597,65 @@ are also available:
- https://github.com/AdityaGarg8/git-credential-email[git-credential-outlook]
(cross platform, dedicated helper for authenticating Microsoft Outlook accounts)
+ - https://github.com/AdityaGarg8/git-credential-email[git-credential-yahoo]
+ (cross platform, dedicated helper for authenticating Yahoo accounts)
+
+ - https://github.com/AdityaGarg8/git-credential-email[git-credential-aol]
+ (cross platform, dedicated helper for authenticating AOL accounts)
+
You can also see linkgit:gitcredentials[7] for more OAuth based authentication
helpers.
+Proton Mail does not provide an SMTP server to send emails. If you are a paid
+customer of Proton Mail, you can use
+https://proton.me/mail/bridge[Proton Mail Bridge]
+officially provided by Proton Mail to create a local SMTP server for sending
+emails. For both free and paid users, community maintained projects like
+https://github.com/AdityaGarg8/git-credential-email[git-protonmail] can be
+used.
+
Note: the following core Perl modules that may be installed with your
distribution of Perl are required:
-MIME::Base64, MIME::QuotedPrint, Net::Domain and Net::SMTP.
+
+https://metacpan.org/pod/MIME::Base64[MIME::Base64],
+https://metacpan.org/pod/MIME::QuotedPrint[MIME::QuotedPrint],
+https://metacpan.org/pod/Net::Domain[Net::Domain] and
+https://metacpan.org/pod/Net::SMTP[Net::SMTP].
+
These additional Perl modules are also required:
-Authen::SASL and Mail::Address.
+https://metacpan.org/pod/Authen::SASL[Authen::SASL] and
+https://metacpan.org/pod/Mail::Address[Mail::Address].
+
+Exploiting the `sendmailCmd` option of `git send-email`
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Apart from sending emails via an SMTP server, `git send-email` can also send
+emails through any application that supports sendmail-like commands. You can
+read documentation of `--sendmail-cmd=<command>` above for more information.
+This ability can be very useful if you want to use another application as an
+SMTP client for `git send-email`, or if your email provider uses proprietary
+APIs instead of SMTP to send emails.
+
+As an example, lets see how to configure https://marlam.de/msmtp/[msmtp], a
+popular SMTP client found in many Linux distributions. Edit `~/.gitconfig`
+to instruct `git-send-email` to use it for sending emails.
+
+----
+[sendemail]
+ sendmailCmd = /usr/bin/msmtp # Change this to the path where msmtp is installed
+----
+
+Links of a few such community maintained helpers are:
+
+ - https://marlam.de/msmtp/[msmtp]
+ (popular SMTP client with many features, available for Linux and macOS)
+
+ - https://github.com/AdityaGarg8/git-credential-email[git-protonmail]
+ (cross platform client that can send emails using the ProtonMail API)
+
+ - https://github.com/AdityaGarg8/git-credential-email[git-msgraph]
+ (cross platform client that can send emails using the Microsoft Graph API)
SEE ALSO
--------
diff --git a/Documentation/git-stash.adoc b/Documentation/git-stash.adoc
index 1a5177f498..e5e6c9d37f 100644
--- a/Documentation/git-stash.adoc
+++ b/Documentation/git-stash.adoc
@@ -23,6 +23,8 @@ SYNOPSIS
'git stash' clear
'git stash' create [<message>]
'git stash' store [(-m | --message) <message>] [-q | --quiet] <commit>
+'git stash' export (--print | --to-ref <ref>) [<stash>...]
+'git stash' import <commit>
DESCRIPTION
-----------
@@ -154,6 +156,18 @@ store::
reflog. This is intended to be useful for scripts. It is
probably not the command you want to use; see "push" above.
+export ( --print | --to-ref <ref> ) [<stash>...]::
+
+ Export the specified stashes, or all of them if none are specified, to
+ a chain of commits which can be transferred using the normal fetch and
+ push mechanisms, then imported using the `import` subcommand.
+
+import <commit>::
+
+ Import the specified stashes from the specified commit, which must have been
+ created by `export`, and add them to the list of stashes. To replace the
+ existing stashes, use `clear` first.
+
OPTIONS
-------
-a::
@@ -242,6 +256,19 @@ literally (including newlines and quotes).
+
Quiet, suppress feedback messages.
+--print::
+ This option is only valid for the `export` command.
++
+Create the chain of commits representing the exported stashes without
+storing it anywhere in the ref namespace and print the object ID to
+standard output. This is designed for scripts.
+
+--to-ref::
+ This option is only valid for the `export` command.
++
+Create the chain of commits representing the exported stashes and store
+it to the specified ref.
+
\--::
This option is only valid for `push` command.
+
@@ -259,7 +286,7 @@ For more details, see the 'pathspec' entry in linkgit:gitglossary[7].
<stash>::
This option is only valid for `apply`, `branch`, `drop`, `pop`,
- `show` commands.
+ `show`, and `export` commands.
+
A reference of the form `stash@{<revision>}`. When no `<stash>` is
given, the latest stash is assumed (that is, `stash@{0}`).
diff --git a/Documentation/git-whatchanged.adoc b/Documentation/git-whatchanged.adoc
index 8e55e0bb1e..d21484026f 100644
--- a/Documentation/git-whatchanged.adoc
+++ b/Documentation/git-whatchanged.adoc
@@ -8,8 +8,14 @@ git-whatchanged - Show logs with differences each commit introduces
SYNOPSIS
--------
-[verse]
-'git whatchanged' <option>...
+[synopsis]
+git whatchanged <option>...
+
+WARNING
+-------
+`git whatchanged` has been deprecated and is scheduled for removal in
+a future version of Git, as it is merely `git log` with different
+default; `whatchanged` is not even shorter to type than `log --raw`.
DESCRIPTION
-----------
diff --git a/Documentation/gitcredentials.adoc b/Documentation/gitcredentials.adoc
index b49923db02..3337bb475d 100644
--- a/Documentation/gitcredentials.adoc
+++ b/Documentation/gitcredentials.adoc
@@ -133,10 +133,6 @@ Popular helpers with OAuth support include:
- https://github.com/hickford/git-credential-oauth[git-credential-oauth] (cross platform, included in many Linux distributions)
- - https://github.com/AdityaGarg8/git-credential-email[git-credential-gmail] (cross platform, dedicated helper to authenticate Gmail accounts for linkgit:git-send-email[1])
-
- - https://github.com/AdityaGarg8/git-credential-email[git-credential-outlook] (cross platform, dedicated helper to authenticate Microsoft Outlook accounts for linkgit:git-send-email[1])
-
CREDENTIAL CONTEXTS
-------------------
diff --git a/Documentation/gitprotocol-v2.adoc b/Documentation/gitprotocol-v2.adoc
index 5598c93e67..9a57005d77 100644
--- a/Documentation/gitprotocol-v2.adoc
+++ b/Documentation/gitprotocol-v2.adoc
@@ -54,7 +54,7 @@ In general a client can request to speak protocol v2 by sending
`version=2` through the respective side-channel for the transport being
used which inevitably sets `GIT_PROTOCOL`. More information can be
found in linkgit:gitprotocol-pack[5] and linkgit:gitprotocol-http[5], as well as the
-`GIT_PROTOCOL` definition in `git.txt`. In all cases the
+`GIT_PROTOCOL` definition in linkgit:git[1]. In all cases the
response from the server is the capability advertisement.
Git Transport
@@ -99,7 +99,7 @@ Uses the `--http-backend-info-refs` option to
linkgit:git-upload-pack[1].
The server may need to be configured to pass this header's contents via
-the `GIT_PROTOCOL` variable. See the discussion in `git-http-backend.txt`.
+the `GIT_PROTOCOL` variable. See the discussion in linkgit:git-http-backend[1].
Capability Advertisement
------------------------
diff --git a/Documentation/gitremote-helpers.adoc b/Documentation/gitremote-helpers.adoc
index d0be008e5e..39cdece16e 100644
--- a/Documentation/gitremote-helpers.adoc
+++ b/Documentation/gitremote-helpers.adoc
@@ -498,7 +498,7 @@ set by Git if the remote helper has the 'option' capability.
ask for the tag specifically. Some helpers may be able to
use this option to avoid a second network connection.
-'option dry-run' {'true'|'false'}:
+'option dry-run' {'true'|'false'}::
If true, pretend the operation completed successfully,
but don't actually change any repository data. For most
helpers this only applies to the 'push', if supported.
diff --git a/Documentation/glossary-content.adoc b/Documentation/glossary-content.adoc
index 575c18f776..e423e4765b 100644
--- a/Documentation/glossary-content.adoc
+++ b/Documentation/glossary-content.adoc
@@ -418,9 +418,8 @@ full pathname may have special meaning:
- A leading "`**`" followed by a slash means match in all
directories. For example, "`**/foo`" matches file or directory
- "`foo`" anywhere, the same as pattern "`foo`". "`**/foo/bar`"
- matches file or directory "`bar`" anywhere that is directly
- under directory "`foo`".
+ "`foo`" anywhere. "`**/foo/bar`" matches file or directory "`bar`"
+ anywhere that is directly under directory "`foo`".
- A trailing "`/**`" matches everything inside. For example,
"`abc/**`" matches all files inside directory "abc", relative
diff --git a/Documentation/line-range-format.adoc b/Documentation/line-range-format.adoc
index 9b51e9fb66..3cc2a14544 100644
--- a/Documentation/line-range-format.adoc
+++ b/Documentation/line-range-format.adoc
@@ -1,30 +1,30 @@
-'<start>' and '<end>' can take one of these forms:
+_<start>_ and _<end>_ can take one of these forms:
-- number
+- _<number>_
+
-If '<start>' or '<end>' is a number, it specifies an
+If _<start>_ or _<end>_ is a number, it specifies an
absolute line number (lines count from 1).
+
-- `/regex/`
+- `/<regex>/`
+
This form will use the first line matching the given
-POSIX regex. If '<start>' is a regex, it will search from the end of
+POSIX _<regex>_. If _<start>_ is a regex, it will search from the end of
the previous `-L` range, if any, otherwise from the start of file.
-If '<start>' is `^/regex/`, it will search from the start of file.
-If '<end>' is a regex, it will search
-starting at the line given by '<start>'.
+If _<start>_ is `^/<regex>/`, it will search from the start of file.
+If _<end>_ is a regex, it will search starting at the line given by
+_<start>_.
+
-- +offset or -offset
+- `+<offset>` or `-<offset>`
+
-This is only valid for '<end>' and will specify a number
-of lines before or after the line given by '<start>'.
+This is only valid for _<end>_ and will specify a number
+of lines before or after the line given by _<start>_.
+
-If `:<funcname>` is given in place of '<start>' and '<end>', it is a
+If `:<funcname>` is given in place of _<start>_ and _<end>_, it is a
regular expression that denotes the range from the first funcname line
-that matches '<funcname>', up to the next funcname line. `:<funcname>`
+that matches _<funcname>_, up to the next funcname line. `:<funcname>`
searches from the end of the previous `-L` range, if any, otherwise
from the start of file. `^:<funcname>` searches from the start of
file. The function names are determined in the same way as `git diff`
diff --git a/Documentation/line-range-options.adoc b/Documentation/line-range-options.adoc
index f275df3b69..c44ba05320 100644
--- a/Documentation/line-range-options.adoc
+++ b/Documentation/line-range-options.adoc
@@ -1,12 +1,12 @@
--L<start>,<end>:<file>::
--L:<funcname>:<file>::
+`-L<start>,<end>:<file>`::
+`-L:<funcname>:<file>`::
- Trace the evolution of the line range given by '<start>,<end>',
- or by the function name regex '<funcname>', within the '<file>'. You may
+ Trace the evolution of the line range given by `<start>,<end>`,
+ or by the function name regex _<funcname>_, within the _<file>_. You may
not give any pathspec limiters. This is currently limited to
a walk starting from a single revision, i.e., you may only
give zero or one positive revision arguments, and
- '<start>' and '<end>' (or '<funcname>') must exist in the starting revision.
+ _<start>_ and _<end>_ (or _<funcname>_) must exist in the starting revision.
You can specify this option more than once. Implies `--patch`.
Patch output can be suppressed using `--no-patch`, but other diff formats
(namely `--raw`, `--numstat`, `--shortstat`, `--dirstat`, `--summary`,
diff --git a/Documentation/merge-options.adoc b/Documentation/merge-options.adoc
index 078f4f6157..95ef491be1 100644
--- a/Documentation/merge-options.adoc
+++ b/Documentation/merge-options.adoc
@@ -113,6 +113,9 @@ include::signoff-option.adoc[]
With `-n` or `--no-stat` do not show a diffstat at the end of the
merge.
+`--compact-summary`::
+ Show a compact-summary at the end of the merge.
+
`--squash`::
`--no-squash`::
Produce the working tree and index state as if a real merge
diff --git a/Documentation/meson.build b/Documentation/meson.build
index 1433acfd31..4404c623f0 100644
--- a/Documentation/meson.build
+++ b/Documentation/meson.build
@@ -158,7 +158,6 @@ manpages = {
'git-verify-tag.adoc' : 1,
'git-version.adoc' : 1,
'git-web--browse.adoc' : 1,
- 'git-whatchanged.adoc' : 1,
'git-worktree.adoc' : 1,
'git-write-tree.adoc' : 1,
'git.adoc' : 1,
@@ -207,6 +206,7 @@ manpages = {
manpages_breaking_changes = {
'git-pack-redundant.adoc' : 1,
+ 'git-whatchanged.adoc' : 1,
}
if not get_option('breaking_changes')
@@ -375,8 +375,7 @@ foreach manpage, category : manpages
output: fs.stem(manpage) + '.xml',
)
- manpage_path = fs.stem(manpage) + '.' + category.to_string()
- manpage_target = custom_target(
+ custom_target(
command: [
xmlto,
'-m', '@INPUT0@',
@@ -392,7 +391,7 @@ foreach manpage, category : manpages
'manpage-normal.xsl',
'manpage-bold-literal.xsl',
],
- output: manpage_path,
+ output: fs.stem(manpage) + '.' + category.to_string(),
install: true,
install_dir: get_option('mandir') / 'man' + category.to_string(),
)
diff --git a/Documentation/pretty-formats.adoc b/Documentation/pretty-formats.adoc
index 07475de8c3..9ed0417fc8 100644
--- a/Documentation/pretty-formats.adoc
+++ b/Documentation/pretty-formats.adoc
@@ -2,11 +2,11 @@ PRETTY FORMATS
--------------
If the commit is a merge, and if the pretty-format
-is not 'oneline', 'email' or 'raw', an additional line is
-inserted before the 'Author:' line. This line begins with
+is not `oneline`, `email` or `raw`, an additional line is
+inserted before the `Author:` line. This line begins with
"Merge: " and the hashes of ancestral commits are printed,
separated by spaces. Note that the listed commits may not
-necessarily be the list of the *direct* parent commits if you
+necessarily be the list of the 'direct' parent commits if you
have limited your view of history: for example, if you are
only interested in changes related to a certain directory or
file.
@@ -14,24 +14,24 @@ file.
There are several built-in formats, and you can define
additional formats by setting a pretty.<name>
config option to either another format name, or a
-'format:' string, as described below (see
+`format:` string, as described below (see
linkgit:git-config[1]). Here are the details of the
built-in formats:
-* 'oneline'
+* `oneline`
<hash> <title-line>
+
This is designed to be as compact as possible.
-* 'short'
+* `short`
commit <hash>
Author: <author>
<title-line>
-* 'medium'
+* `medium`
commit <hash>
Author: <author>
@@ -41,7 +41,7 @@ This is designed to be as compact as possible.
<full-commit-message>
-* 'full'
+* `full`
commit <hash>
Author: <author>
@@ -51,7 +51,7 @@ This is designed to be as compact as possible.
<full-commit-message>
-* 'fuller'
+* `fuller`
commit <hash>
Author: <author>
@@ -63,18 +63,18 @@ This is designed to be as compact as possible.
<full-commit-message>
-* 'reference'
+* `reference`
<abbrev-hash> (<title-line>, <short-author-date>)
+
This format is used to refer to another commit in a commit message and
-is the same as `--pretty='format:%C(auto)%h (%s, %ad)'`. By default,
+is the same as ++--pretty=\'format:%C(auto)%h (%s, %ad)'++. By default,
the date is formatted with `--date=short` unless another `--date` option
is explicitly specified. As with any `format:` with format
placeholders, its output is not affected by other options like
`--decorate` and `--walk-reflogs`.
-* 'email'
+* `email`
From <hash> <date>
From: <author>
@@ -83,30 +83,30 @@ placeholders, its output is not affected by other options like
<full-commit-message>
-* 'mboxrd'
+* `mboxrd`
+
-Like 'email', but lines in the commit message starting with "From "
+Like `email`, but lines in the commit message starting with "From "
(preceded by zero or more ">") are quoted with ">" so they aren't
confused as starting a new commit.
-* 'raw'
+* `raw`
+
-The 'raw' format shows the entire commit exactly as
+The `raw` format shows the entire commit exactly as
stored in the commit object. Notably, the hashes are
-displayed in full, regardless of whether --abbrev or
---no-abbrev are used, and 'parents' information show the
+displayed in full, regardless of whether `--abbrev` or
+`--no-abbrev` are used, and 'parents' information show the
true parent commits, without taking grafts or history
simplification into account. Note that this format affects the way
commits are displayed, but not the way the diff is shown e.g. with
`git log --raw`. To get full object names in a raw diff format,
use `--no-abbrev`.
-* 'format:<format-string>'
+* `format:<format-string>`
+
-The 'format:<format-string>' format allows you to specify which information
+The `format:<format-string>` format allows you to specify which information
you want to show. It works a little bit like printf format,
-with the notable exception that you get a newline with '%n'
-instead of '\n'.
+with the notable exception that you get a newline with `%n`
+instead of `\n`.
+
E.g, 'format:"The author of %h was %an, %ar%nThe title was >>%s<<%n"'
would show something like this:
@@ -120,158 +120,161 @@ The title was >>t4119: test autocomputing -p<n> for traditional diff input.<<
The placeholders are:
- Placeholders that expand to a single literal character:
-'%n':: newline
-'%%':: a raw '%'
-'%x00':: '%x' followed by two hexadecimal digits is replaced with a
+++%n++:: newline
+++%%++:: a raw ++%++
+++%x00++:: ++%x++ followed by two hexadecimal digits is replaced with a
byte with the hexadecimal digits' value (we will call this
"literal formatting code" in the rest of this document).
- Placeholders that affect formatting of later placeholders:
-'%Cred':: switch color to red
-'%Cgreen':: switch color to green
-'%Cblue':: switch color to blue
-'%Creset':: reset color
-'%C(...)':: color specification, as described under Values in the
+++%Cred++:: switch color to red
+++%Cgreen++:: switch color to green
+++%Cblue++:: switch color to blue
+++%Creset++:: reset color
+++%C(++_<spec>_++)++:: color specification, as described under Values in the
"CONFIGURATION FILE" section of linkgit:git-config[1]. By
default, colors are shown only when enabled for log output
(by `color.diff`, `color.ui`, or `--color`, and respecting
the `auto` settings of the former if we are going to a
- terminal). `%C(auto,...)` is accepted as a historical
- synonym for the default (e.g., `%C(auto,red)`). Specifying
- `%C(always,...)` will show the colors even when color is
+ terminal). ++%C(auto,++_<spec>_++)++ is accepted as a historical
+ synonym for the default (e.g., ++%C(auto,red)++). Specifying
+ ++%C(always,++_<spec>_++)++ will show the colors even when color is
not otherwise enabled (though consider just using
- `--color=always` to enable color for the whole output,
+ `--color=always` to enable color for the whole output,
including this format and anything else git might color).
- `auto` alone (i.e. `%C(auto)`) will turn on auto coloring
+ `auto` alone (i.e. ++%C(auto)++) will turn on auto coloring
on the next placeholders until the color is switched
again.
-'%m':: left (`<`), right (`>`) or boundary (`-`) mark
-'%w([<w>[,<i1>[,<i2>]]])':: switch line wrapping, like the -w option of
+++%m++:: left (`<`), right (`>`) or boundary (`-`) mark
+++%w(++`[<w>[,<i1>[,<i2>]]]`++)++:: switch line wrapping, like the `-w` option of
linkgit:git-shortlog[1].
-'%<( <N> [,trunc|ltrunc|mtrunc])':: make the next placeholder take at
+++%<(++`<n>[,(trunc|ltrunc|mtrunc)]`++)++:: make the next placeholder take at
least N column widths, padding spaces on
the right if necessary. Optionally
- truncate (with ellipsis '..') at the left (ltrunc) `..ft`,
+ truncate (with ellipsis `..`) at the left (ltrunc) `..ft`,
the middle (mtrunc) `mi..le`, or the end
(trunc) `rig..`, if the output is longer than
- N columns.
+ _<n>_ columns.
Note 1: that truncating
- only works correctly with N >= 2.
- Note 2: spaces around the N and M (see below)
+ only works correctly with _<n>_ >= 2.
+ Note 2: spaces around the _<n>_ and _<m>_ (see below)
values are optional.
Note 3: Emojis and other wide characters
will take two display columns, which may
over-run column boundaries.
Note 4: decomposed character combining marks
may be misplaced at padding boundaries.
-'%<|( <M> )':: make the next placeholder take at least until Mth
+++%<|(++_<m>_ ++)++:: make the next placeholder take at least until _<m>_ th
display column, padding spaces on the right if necessary.
- Use negative M values for column positions measured
+ Use negative _<m>_ values for column positions measured
from the right hand edge of the terminal window.
-'%>( <N> )', '%>|( <M> )':: similar to '%<( <N> )', '%<|( <M> )' respectively,
+++%>(++_<n>_++)++::
+++%>|(++_<m>_++)++:: similar to ++%<(++_<n>_++)++, ++%<|(++_<m>_++)++ respectively,
but padding spaces on the left
-'%>>( <N> )', '%>>|( <M> )':: similar to '%>( <N> )', '%>|( <M> )'
+++%>>(++_<n>_++)++::
+++%>>|(++_<m>_++)++:: similar to ++%>(++_<n>_++)++, ++%>|(++_<m>_++)++
respectively, except that if the next
placeholder takes more spaces than given and
there are spaces on its left, use those
spaces
-'%><( <N> )', '%><|( <M> )':: similar to '%<( <N> )', '%<|( <M> )'
+++%><(++_<n>_++)++::
+++%><|(++_<m>_++)++:: similar to ++%<(++_<n>_++)++, ++%<|(++_<m>_++)++
respectively, but padding both sides
(i.e. the text is centered)
- Placeholders that expand to information extracted from the commit:
-'%H':: commit hash
-'%h':: abbreviated commit hash
-'%T':: tree hash
-'%t':: abbreviated tree hash
-'%P':: parent hashes
-'%p':: abbreviated parent hashes
-'%an':: author name
-'%aN':: author name (respecting .mailmap, see linkgit:git-shortlog[1]
++%H+:: commit hash
++%h+:: abbreviated commit hash
++%T+:: tree hash
++%t+:: abbreviated tree hash
++%P+:: parent hashes
++%p+:: abbreviated parent hashes
++%an+:: author name
++%aN+:: author name (respecting .mailmap, see linkgit:git-shortlog[1]
or linkgit:git-blame[1])
-'%ae':: author email
-'%aE':: author email (respecting .mailmap, see linkgit:git-shortlog[1]
++%ae+:: author email
++%aE+:: author email (respecting .mailmap, see linkgit:git-shortlog[1]
or linkgit:git-blame[1])
-'%al':: author email local-part (the part before the '@' sign)
-'%aL':: author local-part (see '%al') respecting .mailmap, see
++%al+:: author email local-part (the part before the `@` sign)
++%aL+:: author local-part (see +%al+) respecting .mailmap, see
linkgit:git-shortlog[1] or linkgit:git-blame[1])
-'%ad':: author date (format respects --date= option)
-'%aD':: author date, RFC2822 style
-'%ar':: author date, relative
-'%at':: author date, UNIX timestamp
-'%ai':: author date, ISO 8601-like format
-'%aI':: author date, strict ISO 8601 format
-'%as':: author date, short format (`YYYY-MM-DD`)
-'%ah':: author date, human style (like the `--date=human` option of
++%ad+:: author date (format respects --date= option)
++%aD+:: author date, RFC2822 style
++%ar+:: author date, relative
++%at+:: author date, UNIX timestamp
++%ai+:: author date, ISO 8601-like format
++%aI+:: author date, strict ISO 8601 format
++%as+:: author date, short format (`YYYY-MM-DD`)
++%ah+:: author date, human style (like the `--date=human` option of
linkgit:git-rev-list[1])
-'%cn':: committer name
-'%cN':: committer name (respecting .mailmap, see
++%cn+:: committer name
++%cN+:: committer name (respecting .mailmap, see
linkgit:git-shortlog[1] or linkgit:git-blame[1])
-'%ce':: committer email
-'%cE':: committer email (respecting .mailmap, see
++%ce+:: committer email
++%cE+:: committer email (respecting .mailmap, see
linkgit:git-shortlog[1] or linkgit:git-blame[1])
-'%cl':: committer email local-part (the part before the '@' sign)
-'%cL':: committer local-part (see '%cl') respecting .mailmap, see
++%cl+:: committer email local-part (the part before the `@` sign)
++%cL+:: committer local-part (see +%cl+) respecting .mailmap, see
linkgit:git-shortlog[1] or linkgit:git-blame[1])
-'%cd':: committer date (format respects --date= option)
-'%cD':: committer date, RFC2822 style
-'%cr':: committer date, relative
-'%ct':: committer date, UNIX timestamp
-'%ci':: committer date, ISO 8601-like format
-'%cI':: committer date, strict ISO 8601 format
-'%cs':: committer date, short format (`YYYY-MM-DD`)
-'%ch':: committer date, human style (like the `--date=human` option of
++%cd+:: committer date (format respects --date= option)
++%cD+:: committer date, RFC2822 style
++%cr+:: committer date, relative
++%ct+:: committer date, UNIX timestamp
++%ci+:: committer date, ISO 8601-like format
++%cI+:: committer date, strict ISO 8601 format
++%cs+:: committer date, short format (`YYYY-MM-DD`)
++%ch+:: committer date, human style (like the `--date=human` option of
linkgit:git-rev-list[1])
-'%d':: ref names, like the --decorate option of linkgit:git-log[1]
-'%D':: ref names without the " (", ")" wrapping.
-'%(decorate[:<options>])'::
++%d+:: ref names, like the --decorate option of linkgit:git-log[1]
++%D+:: ref names without the " (", ")" wrapping.
+++%(decorate++`[:<option>,...]`++)++::
ref names with custom decorations. The `decorate` string may be followed by a
colon and zero or more comma-separated options. Option values may contain
literal formatting codes. These must be used for commas (`%x2C`) and closing
parentheses (`%x29`), due to their role in the option syntax.
+
-** 'prefix=<value>': Shown before the list of ref names. Defaults to "{nbsp}`(`".
-** 'suffix=<value>': Shown after the list of ref names. Defaults to "`)`".
-** 'separator=<value>': Shown between ref names. Defaults to "`,`{nbsp}".
-** 'pointer=<value>': Shown between HEAD and the branch it points to, if any.
- Defaults to "{nbsp}`->`{nbsp}".
-** 'tag=<value>': Shown before tag names. Defaults to "`tag:`{nbsp}".
+** `prefix=<value>`: Shown before the list of ref names. Defaults to "{nbsp}+(+".
+** `suffix=<value>`: Shown after the list of ref names. Defaults to "+)+".
+** `separator=<value>`: Shown between ref names. Defaults to "+,+{nbsp}".
+** `pointer=<value>`: Shown between HEAD and the branch it points to, if any.
+ Defaults to "{nbsp}+->+{nbsp}".
+** `tag=<value>`: Shown before tag names. Defaults to "`tag:`{nbsp}".
+
For example, to produce decorations with no wrapping
or tag annotations, and spaces as separators:
+
-`%(decorate:prefix=,suffix=,tag=,separator= )`
+++%(decorate:prefix=,suffix=,tag=,separator= )++
-'%(describe[:<options>])'::
+++%(describe++`[:<option>,...]`++)++::
human-readable name, like linkgit:git-describe[1]; empty string for
undescribable commits. The `describe` string may be followed by a colon and
zero or more comma-separated options. Descriptions can be inconsistent when
tags are added or removed at the same time.
+
-** 'tags[=<bool-value>]': Instead of only considering annotated tags,
+** `tags[=<bool-value>]`: Instead of only considering annotated tags,
consider lightweight tags as well.
-** 'abbrev=<number>': Instead of using the default number of hexadecimal digits
+** `abbrev=<number>`: Instead of using the default number of hexadecimal digits
(which will vary according to the number of objects in the repository with a
default of 7) of the abbreviated object name, use <number> digits, or as many
digits as needed to form a unique object name.
-** 'match=<pattern>': Only consider tags matching the given
- `glob(7)` pattern, excluding the "refs/tags/" prefix.
-** 'exclude=<pattern>': Do not consider tags matching the given
- `glob(7)` pattern, excluding the "refs/tags/" prefix.
+** `match=<pattern>`: Only consider tags matching the given
+ `glob(7)` _<pattern>_, excluding the `refs/tags/` prefix.
+** `exclude=<pattern>`: Do not consider tags matching the given
+ `glob(7)` _<pattern>_, excluding the `refs/tags/` prefix.
-'%S':: ref name given on the command line by which the commit was reached
++%S+:: ref name given on the command line by which the commit was reached
(like `git log --source`), only works with `git log`
-'%e':: encoding
-'%s':: subject
-'%f':: sanitized subject line, suitable for a filename
-'%b':: body
-'%B':: raw body (unwrapped subject and body)
++%e+:: encoding
++%s+:: subject
++%f+:: sanitized subject line, suitable for a filename
++%b+:: body
++%B+:: raw body (unwrapped subject and body)
ifndef::git-rev-list[]
-'%N':: commit notes
++%N+:: commit notes
endif::git-rev-list[]
-'%GG':: raw verification message from GPG for a signed commit
-'%G?':: show "G" for a good (valid) signature,
++%GG+:: raw verification message from GPG for a signed commit
++%G?+:: show "G" for a good (valid) signature,
"B" for a bad signature,
"U" for a good signature with unknown validity,
"X" for a good signature that has expired,
@@ -279,86 +282,86 @@ endif::git-rev-list[]
"R" for a good signature made by a revoked key,
"E" if the signature cannot be checked (e.g. missing key)
and "N" for no signature
-'%GS':: show the name of the signer for a signed commit
-'%GK':: show the key used to sign a signed commit
-'%GF':: show the fingerprint of the key used to sign a signed commit
-'%GP':: show the fingerprint of the primary key whose subkey was used
++%GS+:: show the name of the signer for a signed commit
++%GK+:: show the key used to sign a signed commit
++%GF+:: show the fingerprint of the key used to sign a signed commit
++%GP+:: show the fingerprint of the primary key whose subkey was used
to sign a signed commit
-'%GT':: show the trust level for the key used to sign a signed commit
-'%gD':: reflog selector, e.g., `refs/stash@{1}` or `refs/stash@{2
++%GT+:: show the trust level for the key used to sign a signed commit
++%gD+:: reflog selector, e.g., `refs/stash@{1}` or `refs/stash@{2
minutes ago}`; the format follows the rules described for the
`-g` option. The portion before the `@` is the refname as
given on the command line (so `git log -g refs/heads/master`
would yield `refs/heads/master@{0}`).
-'%gd':: shortened reflog selector; same as `%gD`, but the refname
++%gd+:: shortened reflog selector; same as `%gD`, but the refname
portion is shortened for human readability (so
`refs/heads/master` becomes just `master`).
-'%gn':: reflog identity name
-'%gN':: reflog identity name (respecting .mailmap, see
++%gn+:: reflog identity name
++%gN+:: reflog identity name (respecting .mailmap, see
linkgit:git-shortlog[1] or linkgit:git-blame[1])
-'%ge':: reflog identity email
-'%gE':: reflog identity email (respecting .mailmap, see
++%ge+:: reflog identity email
++%gE+:: reflog identity email (respecting .mailmap, see
linkgit:git-shortlog[1] or linkgit:git-blame[1])
-'%gs':: reflog subject
-'%(trailers[:<options>])'::
++%gs+:: reflog subject
+++%(trailers++`[:<option>,...]`++)++::
display the trailers of the body as interpreted by
linkgit:git-interpret-trailers[1]. The `trailers` string may be followed by
a colon and zero or more comma-separated options. If any option is provided
multiple times, the last occurrence wins.
+
-** 'key=<key>': only show trailers with specified <key>. Matching is done
+** `key=<key>`: only show trailers with specified <key>. Matching is done
case-insensitively and trailing colon is optional. If option is
given multiple times trailer lines matching any of the keys are
shown. This option automatically enables the `only` option so that
non-trailer lines in the trailer block are hidden. If that is not
desired it can be disabled with `only=false`. E.g.,
- `%(trailers:key=Reviewed-by)` shows trailer lines with key
+ +%(trailers:key=Reviewed-by)+ shows trailer lines with key
`Reviewed-by`.
-** 'only[=<bool>]': select whether non-trailer lines from the trailer
+** `only[=<bool>]`: select whether non-trailer lines from the trailer
block should be included.
-** 'separator=<sep>': specify the separator inserted between trailer
+** `separator=<sep>`: specify the separator inserted between trailer
lines. Defaults to a line feed character. The string <sep> may contain
the literal formatting codes described above. To use comma as
separator one must use `%x2C` as it would otherwise be parsed as
- next option. E.g., `%(trailers:key=Ticket,separator=%x2C )`
+ next option. E.g., +%(trailers:key=Ticket,separator=%x2C )+
shows all trailer lines whose key is "Ticket" separated by a comma
and a space.
-** 'unfold[=<bool>]': make it behave as if interpret-trailer's `--unfold`
+** `unfold[=<bool>]`: make it behave as if interpret-trailer's `--unfold`
option was given. E.g.,
- `%(trailers:only,unfold=true)` unfolds and shows all trailer lines.
-** 'keyonly[=<bool>]': only show the key part of the trailer.
-** 'valueonly[=<bool>]': only show the value part of the trailer.
-** 'key_value_separator=<sep>': specify the separator inserted between
+ +%(trailers:only,unfold=true)+ unfolds and shows all trailer lines.
+** `keyonly[=<bool>]`: only show the key part of the trailer.
+** `valueonly[=<bool>]`: only show the value part of the trailer.
+** `key_value_separator=<sep>`: specify the separator inserted between
the key and value of each trailer. Defaults to ": ". Otherwise it
- shares the same semantics as 'separator=<sep>' above.
+ shares the same semantics as `separator=<sep>` above.
NOTE: Some placeholders may depend on other options given to the
-revision traversal engine. For example, the `%g*` reflog options will
+revision traversal engine. For example, the +%g*+ reflog options will
insert an empty string unless we are traversing reflog entries (e.g., by
-`git log -g`). The `%d` and `%D` placeholders will use the "short"
+`git log -g`). The +%d+ and +%D+ placeholders will use the "short"
decoration format if `--decorate` was not already provided on the command
line.
The boolean options accept an optional value `[=<bool-value>]`. The
-values taken by `--type=bool` git-config[1], like `yes` and `off`,
+values taken by `--type=bool` linkgit:git-config[1], like `yes` and `off`,
are all accepted. Giving a boolean option without `=<value>` is
equivalent to giving it with `=true`.
-If you add a `+` (plus sign) after '%' of a placeholder, a line-feed
+If you add a `+` (plus sign) after +%+ of a placeholder, a line-feed
is inserted immediately before the expansion if and only if the
placeholder expands to a non-empty string.
-If you add a `-` (minus sign) after '%' of a placeholder, all consecutive
+If you add a `-` (minus sign) after +%+ of a placeholder, all consecutive
line-feeds immediately preceding the expansion are deleted if and only if the
placeholder expands to an empty string.
-If you add a ` ` (space) after '%' of a placeholder, a space
+If you add a `' '` (space) after +%+ of a placeholder, a space
is inserted immediately before the expansion if and only if the
placeholder expands to a non-empty string.
-* 'tformat:'
+* `tformat:`
+
-The 'tformat:' format works exactly like 'format:', except that it
+The `tformat:` format works exactly like `format:`, except that it
provides "terminator" semantics instead of "separator" semantics. In
other words, each commit has the message terminator character (usually a
newline) appended, rather than a separator placed between entries.
@@ -378,7 +381,7 @@ $ git log -2 --pretty=tformat:%h 4da45bef \
7134973
---------------------
+
-In addition, any unrecognized string that has a `%` in it is interpreted
+In addition, any unrecognized string that has a +%+ in it is interpreted
as if it has `tformat:` in front of it. For example, these two are
equivalent:
+
diff --git a/Documentation/pretty-options.adoc b/Documentation/pretty-options.adoc
index 23888cd612..8aac51dbe7 100644
--- a/Documentation/pretty-options.adoc
+++ b/Documentation/pretty-options.adoc
@@ -1,38 +1,38 @@
---pretty[=<format>]::
---format=<format>::
+`--pretty[=<format>]`::
+`--format=<format>`::
Pretty-print the contents of the commit logs in a given format,
- where '<format>' can be one of 'oneline', 'short', 'medium',
- 'full', 'fuller', 'reference', 'email', 'raw', 'format:<string>'
- and 'tformat:<string>'. When '<format>' is none of the above,
- and has '%placeholder' in it, it acts as if
- '--pretty=tformat:<format>' were given.
+ where '<format>' can be one of `oneline`, `short`, `medium`,
+ `full`, `fuller`, `reference`, `email`, `raw`, `format:<string>`
+ and `tformat:<string>`. When _<format>_ is none of the above,
+ and has `%<placeholder>` in it, it acts as if
+ `--pretty=tformat:<format>` were given.
+
See the "PRETTY FORMATS" section for some additional details for each
-format. When '=<format>' part is omitted, it defaults to 'medium'.
+format. When `=<format>` part is omitted, it defaults to `medium`.
+
-Note: you can specify the default pretty format in the repository
+NOTE: you can specify the default pretty format in the repository
configuration (see linkgit:git-config[1]).
---abbrev-commit::
+`--abbrev-commit`::
Instead of showing the full 40-byte hexadecimal commit object
name, show a prefix that names the object uniquely.
- "--abbrev=<n>" (which also modifies diff output, if it is displayed)
+ `--abbrev=<n>` (which also modifies diff output, if it is displayed)
option can be used to specify the minimum length of the prefix.
+
-This should make "--pretty=oneline" a whole lot more readable for
+This should make `--pretty=oneline` a whole lot more readable for
people using 80-column terminals.
---no-abbrev-commit::
+`--no-abbrev-commit`::
Show the full 40-byte hexadecimal commit object name. This negates
`--abbrev-commit`, either explicit or implied by other options such
- as "--oneline". It also overrides the `log.abbrevCommit` variable.
+ as `--oneline`. It also overrides the `log.abbrevCommit` variable.
---oneline::
- This is a shorthand for "--pretty=oneline --abbrev-commit"
+`--oneline`::
+ This is a shorthand for `--pretty=oneline --abbrev-commit`
used together.
---encoding=<encoding>::
+`--encoding=<encoding>`::
Commit objects record the character encoding used for the log message
in their encoding header; this option can be used to tell the
command to re-code the commit log message in the encoding
@@ -44,25 +44,30 @@ people using 80-column terminals.
to convert the commit, we will quietly output the original
object verbatim.
---expand-tabs=<n>::
---expand-tabs::
---no-expand-tabs::
+`--expand-tabs=<n>`::
+`--expand-tabs`::
+`--no-expand-tabs`::
Perform a tab expansion (replace each tab with enough spaces
- to fill to the next display column that is a multiple of '<n>')
+ to fill to the next display column that is a multiple of _<n>_)
in the log message before showing it in the output.
`--expand-tabs` is a short-hand for `--expand-tabs=8`, and
`--no-expand-tabs` is a short-hand for `--expand-tabs=0`,
which disables tab expansion.
+
By default, tabs are expanded in pretty formats that indent the log
-message by 4 spaces (i.e. 'medium', which is the default, 'full',
-and 'fuller').
+message by 4 spaces (i.e. `medium`, which is the default, `full`,
+and `fuller`).
ifndef::git-rev-list[]
---notes[=<ref>]::
+`--notes[=<ref>]`::
Show the notes (see linkgit:git-notes[1]) that annotate the
commit, when showing the commit log message. This is the default
+ifndef::with-breaking-changes[]
for `git log`, `git show` and `git whatchanged` commands when
+endif::with-breaking-changes[]
+ifdef::with-breaking-changes[]
+ for `git log` and `git show` commands when
+endif::with-breaking-changes[]
there is no `--pretty`, `--format`, or `--oneline` option given
on the command line.
+
@@ -75,28 +80,29 @@ to display. The ref can specify the full refname when it begins
with `refs/notes/`; when it begins with `notes/`, `refs/` and otherwise
`refs/notes/` is prefixed to form the full name of the ref.
+
-Multiple --notes options can be combined to control which notes are
-being displayed. Examples: "--notes=foo" will show only notes from
-"refs/notes/foo"; "--notes=foo --notes" will show both notes from
+Multiple `--notes` options can be combined to control which notes are
+being displayed. Examples: "`--notes=foo`" will show only notes from
+`refs/notes/foo`; "`--notes=foo --notes`" will show both notes from
"refs/notes/foo" and from the default notes ref(s).
---no-notes::
+`--no-notes`::
Do not show notes. This negates the above `--notes` option, by
resetting the list of notes refs from which notes are shown.
Options are parsed in the order given on the command line, so e.g.
- "--notes --notes=foo --no-notes --notes=bar" will only show notes
- from "refs/notes/bar".
+ "`--notes --notes=foo --no-notes --notes=bar`" will only show notes
+ from `refs/notes/bar`.
---show-notes-by-default::
+`--show-notes-by-default`::
Show the default notes unless options for displaying specific
notes are given.
---show-notes[=<ref>]::
---[no-]standard-notes::
- These options are deprecated. Use the above --notes/--no-notes
+`--show-notes[=<ref>]`::
+`--standard-notes`::
+`--no-standard-notes`::
+ These options are deprecated. Use the above `--notes`/`--no-notes`
options instead.
endif::git-rev-list[]
---show-signature::
+`--show-signature`::
Check the validity of a signed commit object by passing the signature
to `gpg --verify` and show the output.
diff --git a/Documentation/rev-list-description.adoc b/Documentation/rev-list-description.adoc
index a9efa7fa27..82c680e570 100644
--- a/Documentation/rev-list-description.adoc
+++ b/Documentation/rev-list-description.adoc
@@ -26,8 +26,8 @@ endif::git-log[]
means "list all the commits which are reachable from 'foo' or 'bar', but
not from 'baz'".
-A special notation "'<commit1>'..'<commit2>'" can be used as a
-short-hand for "^'<commit1>' '<commit2>'". For example, either of
+A special notation "`<commit1>..<commit2>`" can be used as a
+short-hand for "`^<commit1> <commit2>`". For example, either of
the following may be used interchangeably:
ifdef::git-rev-list[]
@@ -43,7 +43,7 @@ $ git log HEAD ^origin
-----------------------------------------------------------------------
endif::git-log[]
-Another special notation is "'<commit1>'...'<commit2>'" which is useful
+Another special notation is "`<commit1>...<commit2>`" which is useful
for merges. The resulting set of commits is the symmetric difference
between the two operands. The following two commands are equivalent:
diff --git a/Documentation/rev-list-options.adoc b/Documentation/rev-list-options.adoc
index d38875efda..d9665d82c8 100644
--- a/Documentation/rev-list-options.adoc
+++ b/Documentation/rev-list-options.adoc
@@ -6,60 +6,60 @@ special notations explained in the description, additional commit
limiting may be applied.
Using more options generally further limits the output (e.g.
-`--since=<date1>` limits to commits newer than `<date1>`, and using it
+`--since=<date1>` limits to commits newer than _<date1>_, and using it
with `--grep=<pattern>` further limits to commits whose log message
-has a line that matches `<pattern>`), unless otherwise noted.
+has a line that matches _<pattern>_), unless otherwise noted.
Note that these are applied before commit
ordering and formatting options, such as `--reverse`.
--<number>::
--n <number>::
---max-count=<number>::
- Limit the number of commits to output.
+`-<number>`::
+`-n <number>`::
+`--max-count=<number>`::
+ Limit the output to _<number>_ commits.
---skip=<number>::
- Skip 'number' commits before starting to show the commit output.
+`--skip=<number>`::
+ Skip _<number>_ commits before starting to show the commit output.
---since=<date>::
---after=<date>::
- Show commits more recent than a specific date.
+`--since=<date>`::
+`--after=<date>`::
+ Show commits more recent than _<date>_.
---since-as-filter=<date>::
- Show all commits more recent than a specific date. This visits
+`--since-as-filter=<date>`::
+ Show all commits more recent than _<date>_. This visits
all commits in the range, rather than stopping at the first commit which
- is older than a specific date.
+ is older than _<date>_.
---until=<date>::
---before=<date>::
- Show commits older than a specific date.
+`--until=<date>`::
+`--before=<date>`::
+ Show commits older than _<date>_.
ifdef::git-rev-list[]
---max-age=<timestamp>::
---min-age=<timestamp>::
+`--max-age=<timestamp>`::
+`--min-age=<timestamp>`::
Limit the commits output to specified time range.
endif::git-rev-list[]
---author=<pattern>::
---committer=<pattern>::
+`--author=<pattern>`::
+`--committer=<pattern>`::
Limit the commits output to ones with author/committer
- header lines that match the specified pattern (regular
- expression). With more than one `--author=<pattern>`,
- commits whose author matches any of the given patterns are
+ header lines that match the _<pattern>_ regular
+ expression. With more than one `--author=<pattern>`,
+ commits whose author matches any of the _<pattern>_ are
chosen (similarly for multiple `--committer=<pattern>`).
---grep-reflog=<pattern>::
+`--grep-reflog=<pattern>`::
Limit the commits output to ones with reflog entries that
- match the specified pattern (regular expression). With
+ match the _<pattern>_ regular expression. With
more than one `--grep-reflog`, commits whose reflog message
matches any of the given patterns are chosen. It is an
error to use this option unless `--walk-reflogs` is in use.
---grep=<pattern>::
+`--grep=<pattern>`::
Limit the commits output to ones with a log message that
- matches the specified pattern (regular expression). With
+ matches the _<pattern>_ regular expression. With
more than one `--grep=<pattern>`, commits whose message
- matches any of the given patterns are chosen (but see
+ matches any of the _<pattern>_ are chosen (but see
`--all-match`).
ifndef::git-rev-list[]
+
@@ -67,35 +67,35 @@ When `--notes` is in effect, the message from the notes is
matched as if it were part of the log message.
endif::git-rev-list[]
---all-match::
+`--all-match`::
Limit the commits output to ones that match all given `--grep`,
instead of ones that match at least one.
---invert-grep::
+`--invert-grep`::
Limit the commits output to ones with a log message that do not
- match the pattern specified with `--grep=<pattern>`.
+ match the _<pattern>_ specified with `--grep=<pattern>`.
--i::
---regexp-ignore-case::
+`-i`::
+`--regexp-ignore-case`::
Match the regular expression limiting patterns without regard to letter
case.
---basic-regexp::
+`--basic-regexp`::
Consider the limiting patterns to be basic regular expressions;
this is the default.
--E::
---extended-regexp::
+`-E`::
+`--extended-regexp`::
Consider the limiting patterns to be extended regular expressions
instead of the default basic regular expressions.
--F::
---fixed-strings::
+`-F`::
+`--fixed-strings`::
Consider the limiting patterns to be fixed strings (don't interpret
pattern as a regular expression).
--P::
---perl-regexp::
+`-P`::
+`--perl-regexp`::
Consider the limiting patterns to be Perl-compatible regular
expressions.
+
@@ -103,20 +103,20 @@ Support for these types of regular expressions is an optional
compile-time dependency. If Git wasn't compiled with support for them
providing this option will cause it to die.
---remove-empty::
+`--remove-empty`::
Stop when a given path disappears from the tree.
---merges::
+`--merges`::
Print only merge commits. This is exactly the same as `--min-parents=2`.
---no-merges::
+`--no-merges`::
Do not print commits with more than one parent. This is
exactly the same as `--max-parents=1`.
---min-parents=<number>::
---max-parents=<number>::
---no-min-parents::
---no-max-parents::
+`--min-parents=<number>`::
+`--max-parents=<number>`::
+`--no-min-parents`::
+`--no-max-parents`::
Show only commits which have at least (or at most) that many parent
commits. In particular, `--max-parents=1` is the same as `--no-merges`,
`--min-parents=2` is the same as `--merges`. `--max-parents=0`
@@ -126,7 +126,7 @@ providing this option will cause it to die.
again. Equivalent forms are `--min-parents=0` (any commit has 0 or more
parents) and `--max-parents=-1` (negative numbers denote no upper limit).
---first-parent::
+`--first-parent`::
When finding commits to include, follow only the first
parent commit upon seeing a merge commit. This option
can give a better overview when viewing the evolution of
@@ -141,14 +141,14 @@ This option also changes default diff format for merge commits
to `first-parent`, see `--diff-merges=first-parent` for details.
endif::git-log[]
---exclude-first-parent-only::
+`--exclude-first-parent-only`::
When finding commits to exclude (with a '{caret}'), follow only
the first parent commit upon seeing a merge commit.
This can be used to find the set of changes in a topic branch
from the point where it diverged from the remote branch, given
that arbitrary merges can be valid topic branch changes.
---not::
+`--not`::
Reverses the meaning of the '{caret}' prefix (or lack thereof)
for all following revision specifiers, up to the next `--not`.
When used on the command line before --stdin, the revisions passed
@@ -156,37 +156,37 @@ endif::git-log[]
via standard input, the revisions passed on the command line will
not be affected by it.
---all::
+`--all`::
Pretend as if all the refs in `refs/`, along with `HEAD`, are
- listed on the command line as '<commit>'.
+ listed on the command line as _<commit>_.
---branches[=<pattern>]::
+`--branches[=<pattern>]`::
Pretend as if all the refs in `refs/heads` are listed
- on the command line as '<commit>'. If '<pattern>' is given, limit
- branches to ones matching given shell glob. If pattern lacks '?',
+ on the command line as _<commit>_. If _<pattern>_ is given, limit
+ branches to ones matching given shell glob. If _<pattern>_ lacks '?',
'{asterisk}', or '[', '/{asterisk}' at the end is implied.
---tags[=<pattern>]::
+`--tags[=<pattern>]`::
Pretend as if all the refs in `refs/tags` are listed
- on the command line as '<commit>'. If '<pattern>' is given, limit
+ on the command line as _<commit>_. If _<pattern>_ is given, limit
tags to ones matching given shell glob. If pattern lacks '?', '{asterisk}',
or '[', '/{asterisk}' at the end is implied.
---remotes[=<pattern>]::
+`--remotes[=<pattern>]`::
Pretend as if all the refs in `refs/remotes` are listed
- on the command line as '<commit>'. If '<pattern>' is given, limit
+ on the command line as _<commit>_. If _<pattern>_ is given, limit
remote-tracking branches to ones matching given shell glob.
If pattern lacks '?', '{asterisk}', or '[', '/{asterisk}' at the end is implied.
---glob=<glob-pattern>::
- Pretend as if all the refs matching shell glob '<glob-pattern>'
- are listed on the command line as '<commit>'. Leading 'refs/',
+`--glob=<glob-pattern>`::
+ Pretend as if all the refs matching shell glob _<glob-pattern>_
+ are listed on the command line as _<commit>_. Leading 'refs/',
is automatically prepended if missing. If pattern lacks '?', '{asterisk}',
or '[', '/{asterisk}' at the end is implied.
---exclude=<glob-pattern>::
+`--exclude=<glob-pattern>`::
- Do not include refs matching '<glob-pattern>' that the next `--all`,
+ Do not include refs matching _<glob-pattern>_ that the next `--all`,
`--branches`, `--tags`, `--remotes`, or `--glob` would otherwise
consider. Repetitions of this option accumulate exclusion patterns
up to the next `--all`, `--branches`, `--tags`, `--remotes`, or
@@ -199,7 +199,7 @@ respectively, and they must begin with `refs/` when applied to `--glob`
or `--all`. If a trailing '/{asterisk}' is intended, it must be given
explicitly.
---exclude-hidden=[fetch|receive|uploadpack]::
+`--exclude-hidden=(fetch|receive|uploadpack)`::
Do not include refs that would be hidden by `git-fetch`,
`git-receive-pack` or `git-upload-pack` by consulting the appropriate
`fetch.hideRefs`, `receive.hideRefs` or `uploadpack.hideRefs`
@@ -207,11 +207,11 @@ explicitly.
linkgit:git-config[1]). This option affects the next pseudo-ref option
`--all` or `--glob` and is cleared after processing them.
---reflog::
+`--reflog`::
Pretend as if all objects mentioned by reflogs are listed on the
- command line as `<commit>`.
+ command line as _<commit>_.
---alternate-refs::
+`--alternate-refs`::
Pretend as if all objects mentioned as ref tips of alternate
repositories were listed on the command line. An alternate
repository is any repository whose object directory is specified
@@ -219,7 +219,7 @@ explicitly.
be modified by `core.alternateRefsCommand`, etc. See
linkgit:git-config[1].
---single-worktree::
+`--single-worktree`::
By default, all working trees will be examined by the
following options when there are more than one (see
linkgit:git-worktree[1]): `--all`, `--reflog` and
@@ -227,19 +227,19 @@ explicitly.
This option forces them to examine the current working tree
only.
---ignore-missing::
+`--ignore-missing`::
Upon seeing an invalid object name in the input, pretend as if
the bad input was not given.
ifndef::git-rev-list[]
---bisect::
+`--bisect`::
Pretend as if the bad bisection ref `refs/bisect/bad`
was listed and as if it was followed by `--not` and the good
bisection refs `refs/bisect/good-*` on the command
line.
endif::git-rev-list[]
---stdin::
+`--stdin`::
In addition to getting arguments from the command line, read
them from standard input as well. This accepts commits and
pseudo-options like `--all` and `--glob=`. When a `--` separator
@@ -249,15 +249,15 @@ endif::git-rev-list[]
influence any subsequent command line arguments.
ifdef::git-rev-list[]
---quiet::
+`--quiet`::
Don't print anything to standard output. This form
is primarily meant to allow the caller to
test the exit status to see if a range of objects is fully
connected (or not). It is faster than redirecting stdout
to `/dev/null` as the output does not have to be formatted.
---disk-usage::
---disk-usage=human::
+`--disk-usage`::
+`--disk-usage=human`::
Suppress normal output; instead, print the sum of the bytes used
for on-disk storage by the selected commits or objects. This is
equivalent to piping the output into `git cat-file
@@ -269,11 +269,11 @@ ifdef::git-rev-list[]
in human-readable string(e.g. 12.24 Kib, 3.50 Mib).
endif::git-rev-list[]
---cherry-mark::
+`--cherry-mark`::
Like `--cherry-pick` (see below) but mark equivalent commits
with `=` rather than omitting them, and inequivalent ones with `+`.
---cherry-pick::
+`--cherry-pick`::
Omit any commit that introduces the same change as
another commit on the ``other side'' when the set of
commits are limited with symmetric difference.
@@ -286,8 +286,8 @@ cherry-picked from the other branch (for example, ``3rd on b'' may be
cherry-picked from branch A). With this option, such pairs of commits are
excluded from the output.
---left-only::
---right-only::
+`--left-only`::
+`--right-only`::
List only commits on the respective side of a symmetric difference,
i.e. only those which would be marked `<` resp. `>` by
`--left-right`.
@@ -298,20 +298,20 @@ commits from `B` which are in `A` or are patch-equivalent to a commit in
More precisely, `--cherry-pick --right-only --no-merges` gives the exact
list.
---cherry::
+`--cherry`::
A synonym for `--right-only --cherry-mark --no-merges`; useful to
limit the output to the commits on our side and mark those that
have been applied to the other side of a forked history with
`git log --cherry upstream...mybranch`, similar to
`git cherry upstream mybranch`.
--g::
---walk-reflogs::
+`-g`::
+`--walk-reflogs`::
Instead of walking the commit ancestry chain, walk
reflog entries from the most recent one to older ones.
When this option is used you cannot specify commits to
- exclude (that is, '{caret}commit', 'commit1..commit2',
- and 'commit1\...commit2' notations cannot be used).
+ exclude (that is, `^<commit>`, `<commit1>..<commit2>`,
+ and `<commit1>...<commit2>` notations cannot be used).
+
With `--pretty` format other than `oneline` and `reference` (for obvious reasons),
this causes the output to have two extra lines of information
@@ -340,29 +340,29 @@ See also linkgit:git-reflog[1].
+
Under `--pretty=reference`, this information will not be shown at all.
---merge::
+`--merge`::
Show commits touching conflicted paths in the range `HEAD...<other>`,
where `<other>` is the first existing pseudoref in `MERGE_HEAD`,
`CHERRY_PICK_HEAD`, `REVERT_HEAD` or `REBASE_HEAD`. Only works
when the index has unmerged entries. This option can be used to show
relevant commits when resolving conflicts from a 3-way merge.
---boundary::
+`--boundary`::
Output excluded boundary commits. Boundary commits are
prefixed with `-`.
ifdef::git-rev-list[]
---use-bitmap-index::
+`--use-bitmap-index`::
Try to speed up the traversal using the pack bitmap index (if
one is available). Note that when traversing with `--objects`,
trees and blobs will not have their associated path printed.
---progress=<header>::
+`--progress=<header>`::
Show progress reports on stderr as objects are considered. The
`<header>` text will be printed with each progress update.
--z::
+`-z`::
Instead of being newline-delimited, each outputted object and its
accompanying metadata is delimited using NUL bytes. Output is printed
in the following form:
@@ -397,56 +397,56 @@ is how to do it, as there are various strategies to simplify the history.
The following options select the commits to be shown:
-<paths>::
+`<paths>`::
Commits modifying the given <paths> are selected.
---simplify-by-decoration::
+`--simplify-by-decoration`::
Commits that are referred by some branch or tag are selected.
Note that extra commits can be shown to give a meaningful history.
The following options affect the way the simplification is performed:
-Default mode::
+`Default mode`::
Simplifies the history to the simplest history explaining the
final state of the tree. Simplest because it prunes some side
branches if the end result is the same (i.e. merging branches
with the same content)
---show-pulls::
+`--show-pulls`::
Include all commits from the default mode, but also any merge
commits that are not TREESAME to the first parent but are
TREESAME to a later parent. This mode is helpful for showing
the merge commits that "first introduced" a change to a branch.
---full-history::
+`--full-history`::
Same as the default mode, but does not prune some history.
---dense::
+`--dense`::
Only the selected commits are shown, plus some to have a
meaningful history.
---sparse::
+`--sparse`::
All commits in the simplified history are shown.
---simplify-merges::
+`--simplify-merges`::
Additional option to `--full-history` to remove some needless
merges from the resulting history, as there are no selected
commits contributing to this merge.
---ancestry-path[=<commit>]::
- When given a range of commits to display (e.g. 'commit1..commit2'
- or 'commit2 {caret}commit1'), and a commit <commit> in that range,
+`--ancestry-path[=<commit>]`::
+ When given a range of commits to display (e.g. `<commit1>..<commit2>`
+ or `<commit2> ^<commit1>`), and a commit _<commit>_ in that range,
only display commits in that range
- that are ancestors of <commit>, descendants of <commit>, or
- <commit> itself. If no commit is specified, use 'commit1' (the
- excluded part of the range) as <commit>. Can be passed multiple
+ that are ancestors of _<commit>_, descendants of _<commit>_, or
+ _<commit>_ itself. If no commit is specified, use _<commit1>_ (the
+ excluded part of the range) as _<commit>_. Can be passed multiple
times; if so, a commit is included if it is any of the commits
given or if it is an ancestor or descendant of one of them.
A more detailed explanation follows.
-Suppose you specified `foo` as the <paths>. We shall call commits
+Suppose you specified `foo` as the _<paths>_. We shall call commits
that modify `foo` !TREESAME, and the rest TREESAME. (In a diff
filtered for `foo`, they look different and equal, respectively.)
@@ -466,22 +466,22 @@ The horizontal line of history A---Q is taken to be the first parent of
each merge. The commits are:
* `I` is the initial commit, in which `foo` exists with contents
- ``asdf'', and a file `quux` exists with contents ``quux''. Initial
+ `asdf`, and a file `quux` exists with contents `quux`. Initial
commits are compared to an empty tree, so `I` is !TREESAME.
-* In `A`, `foo` contains just ``foo''.
+* In `A`, `foo` contains just `foo`.
* `B` contains the same change as `A`. Its merge `M` is trivial and
hence TREESAME to all parents.
-* `C` does not change `foo`, but its merge `N` changes it to ``foobar'',
+* `C` does not change `foo`, but its merge `N` changes it to `foobar`,
so it is not TREESAME to any parent.
-* `D` sets `foo` to ``baz''. Its merge `O` combines the strings from
- `N` and `D` to ``foobarbaz''; i.e., it is not TREESAME to any parent.
+* `D` sets `foo` to `baz`. Its merge `O` combines the strings from
+ `N` and `D` to `foobarbaz`; i.e., it is not TREESAME to any parent.
-* `E` changes `quux` to ``xyzzy'', and its merge `P` combines the
- strings to ``quux xyzzy''. `P` is TREESAME to `O`, but not to `E`.
+* `E` changes `quux` to `xyzzy`, and its merge `P` combines the
+ strings to `quux xyzzy`. `P` is TREESAME to `O`, but not to `E`.
* `X` is an independent root commit that added a new file `side`, and `Y`
modified it. `Y` is TREESAME to `X`. Its merge `Q` added `side` to `P`, and
@@ -517,7 +517,7 @@ Parent/child relations are only visible with `--parents`, but that does
not affect the commits selected in default mode, so we have shown the
parent lines.
---full-history without parent rewriting::
+`--full-history` without parent rewriting::
This mode differs from the default in one point: always follow
all parents of a merge, even if it is TREESAME to one of them.
Even if more than one side of the merge has commits that are
@@ -536,7 +536,7 @@ Note that without parent rewriting, it is not really possible to talk
about the parent/child relationships between the commits, so we show
them disconnected.
---full-history with parent rewriting::
+`--full-history` with parent rewriting::
Ordinary commits are only included if they are !TREESAME
(though this can be changed, see `--sparse` below).
+
@@ -560,18 +560,18 @@ rewritten to contain `E`'s parent `I`. The same happened for `C` and
In addition to the above settings, you can change whether TREESAME
affects inclusion:
---dense::
+`--dense`::
Commits that are walked are included if they are not TREESAME
to any parent.
---sparse::
+`--sparse`::
All commits that are walked are included.
+
Note that without `--full-history`, this still simplifies merges: if
one of the parents is TREESAME, we follow only that one, so the other
sides of the merge are never walked.
---simplify-merges::
+`--simplify-merges`::
First, build a history graph in the same way that
`--full-history` with parent rewriting does (see above).
+
@@ -618,9 +618,9 @@ Note the major differences in `N`, `P`, and `Q` over `--full-history`:
There is another simplification mode available:
---ancestry-path[=<commit>]::
+`--ancestry-path[=<commit>]`::
Limit the displayed commits to those which are an ancestor of
- <commit>, or which are a descendant of <commit>, or are <commit>
+ _<commit>_, or which are a descendant of _<commit>_, or are _<commit>_
itself.
+
As an example use case, consider the following commit history:
@@ -636,15 +636,15 @@ As an example use case, consider the following commit history:
A regular 'D..M' computes the set of commits that are ancestors of `M`,
but excludes the ones that are ancestors of `D`. This is useful to see
what happened to the history leading to `M` since `D`, in the sense
-that ``what does `M` have that did not exist in `D`''. The result in this
+that "what does `M` have that did not exist in `D`". The result in this
example would be all the commits, except `A` and `B` (and `D` itself,
of course).
+
When we want to find out what commits in `M` are contaminated with the
bug introduced by `D` and need fixing, however, we might want to view
-only the subset of 'D..M' that are actually descendants of `D`, i.e.
+only the subset of `D..M` that are actually descendants of `D`, i.e.
excluding `C` and `K`. This is exactly what the `--ancestry-path`
-option does. Applied to the 'D..M' range, it results in:
+option does. Applied to the `D..M` range, it results in:
+
-----------------------------------------------------------------------
E-------F
@@ -655,7 +655,7 @@ option does. Applied to the 'D..M' range, it results in:
-----------------------------------------------------------------------
+
We can also use `--ancestry-path=D` instead of `--ancestry-path` which
-means the same thing when applied to the 'D..M' range but is just more
+means the same thing when applied to the `D..M` range but is just more
explicit.
+
If we instead are interested in a given topic within this range, and all
@@ -770,7 +770,7 @@ into the important branch. This commit may have information about why
the change `X` came to override the changes from `A` and `B` in its
commit message.
---show-pulls::
+`--show-pulls`::
In addition to the commits shown in the default history, show
each merge commit that is not TREESAME to its first parent but
is TREESAME to a later parent.
@@ -819,7 +819,7 @@ ifdef::git-rev-list[]
Bisection Helpers
~~~~~~~~~~~~~~~~~
---bisect::
+`--bisect`::
Limit output to the one commit object which is roughly halfway between
included and excluded commits. Note that the bad bisection ref
`refs/bisect/bad` is added to the included commits (if it
@@ -843,7 +843,7 @@ introduces a regression is thus reduced to a binary search: repeatedly
generate and test new 'midpoint's until the commit chain is of length
one.
---bisect-vars::
+`--bisect-vars`::
This calculates the same as `--bisect`, except that refs in
`refs/bisect/` are not used, and except that this outputs
text ready to be eval'ed by the shell. These lines will assign the
@@ -855,7 +855,7 @@ one.
`bisect_bad`, and the number of commits we are bisecting right now to
`bisect_all`.
---bisect-all::
+`--bisect-all`::
This outputs all the commit objects between the included and excluded
commits, ordered by their distance to the included and excluded
commits. Refs in `refs/bisect/` are not used. The farthest
@@ -878,15 +878,15 @@ Commit Ordering
By default, the commits are shown in reverse chronological order.
---date-order::
+`--date-order`::
Show no parents before all of its children are shown, but
otherwise show commits in the commit timestamp order.
---author-date-order::
+`--author-date-order`::
Show no parents before all of its children are shown, but
otherwise show commits in the author timestamp order.
---topo-order::
+`--topo-order`::
Show no parents before all of its children are shown, and
avoid showing commits on multiple lines of history
intermixed.
@@ -910,8 +910,8 @@ With `--topo-order`, they would show 8 6 5 3 7 4 2 1 (or 8 7 4 2 6 5
avoid showing the commits from two parallel development track mixed
together.
---reverse::
- Output the commits chosen to be shown (see Commit Limiting
+`--reverse`::
+ Output the commits chosen to be shown (see 'Commit Limiting'
section above) in reverse order. Cannot be combined with
`--walk-reflogs`.
endif::git-shortlog[]
@@ -923,39 +923,39 @@ Object Traversal
These options are mostly targeted for packing of Git repositories.
ifdef::git-rev-list[]
---objects::
+`--objects`::
Print the object IDs of any object referenced by the listed
- commits. `--objects foo ^bar` thus means ``send me
+ commits. `--objects foo ^bar` thus means "send me
all object IDs which I need to download if I have the commit
- object _bar_ but not _foo_''. See also `--object-names` below.
+ object `bar` but not `foo`". See also `--object-names` below.
---in-commit-order::
+`--in-commit-order`::
Print tree and blob ids in order of the commits. The tree
and blob ids are printed after they are first referenced
by a commit.
---objects-edge::
+`--objects-edge`::
Similar to `--objects`, but also print the IDs of excluded
- commits prefixed with a ``-'' character. This is used by
+ commits prefixed with a "`-`" character. This is used by
linkgit:git-pack-objects[1] to build a ``thin'' pack, which records
objects in deltified form based on objects contained in these
excluded commits to reduce network traffic.
---objects-edge-aggressive::
+`--objects-edge-aggressive`::
Similar to `--objects-edge`, but it tries harder to find excluded
commits at the cost of increased time. This is used instead of
`--objects-edge` to build ``thin'' packs for shallow repositories.
---indexed-objects::
+`--indexed-objects`::
Pretend as if all trees and blobs used by the index are listed
on the command line. Note that you probably want to use
`--objects`, too.
---unpacked::
+`--unpacked`::
Only useful with `--objects`; print the object IDs that are not
in packs.
---object-names::
+`--object-names`::
Only useful with `--objects`; print the names of the object IDs
that are found. This is the default behavior. Note that the
"name" of each object is ambiguous, and mostly intended as a
@@ -964,52 +964,52 @@ ifdef::git-rev-list[]
to remove newlines; and if an object would appear multiple times
with different names, only one name is shown.
---no-object-names::
+`--no-object-names`::
Only useful with `--objects`; does not print the names of the object
IDs that are found. This inverts `--object-names`. This flag allows
the output to be more easily parsed by commands such as
linkgit:git-cat-file[1].
---filter=<filter-spec>::
+`--filter=<filter-spec>`::
Only useful with one of the `--objects*`; omits objects (usually
- blobs) from the list of printed objects. The '<filter-spec>'
+ blobs) from the list of printed objects. The _<filter-spec>_
may be one of the following:
+
-The form '--filter=blob:none' omits all blobs.
+The form `--filter=blob:none` omits all blobs.
+
-The form '--filter=blob:limit=<n>[kmg]' omits blobs of size at least n
-bytes or units. n may be zero. The suffixes k, m, and g can be used
-to name units in KiB, MiB, or GiB. For example, 'blob:limit=1k'
+The form `--filter=blob:limit=<n>[kmg]` omits blobs of size at least _<n>_
+bytes or units. _<n>_ may be zero. The suffixes `k`, `m`, and `g` can be used
+to name units in KiB, MiB, or GiB. For example, `blob:limit=1k`
is the same as 'blob:limit=1024'.
+
-The form '--filter=object:type=(tag|commit|tree|blob)' omits all objects
+The form `--filter=object:type=(tag|commit|tree|blob)` omits all objects
which are not of the requested type.
+
-The form '--filter=sparse:oid=<blob-ish>' uses a sparse-checkout
-specification contained in the blob (or blob-expression) '<blob-ish>'
+The form `--filter=sparse:oid=<blob-ish>` uses a sparse-checkout
+specification contained in the blob (or blob-expression) _<blob-ish>_
to omit blobs that would not be required for a sparse checkout on
the requested refs.
+
-The form '--filter=tree:<depth>' omits all blobs and trees whose depth
-from the root tree is >= <depth> (minimum depth if an object is located
-at multiple depths in the commits traversed). <depth>=0 will not include
+The form `--filter=tree:<depth>` omits all blobs and trees whose depth
+from the root tree is >= _<depth>_ (minimum depth if an object is located
+at multiple depths in the commits traversed). _<depth>_=0 will not include
any trees or blobs unless included explicitly in the command-line (or
-standard input when --stdin is used). <depth>=1 will include only the
+standard input when `--stdin` is used). _<depth>_=1 will include only the
tree and blobs which are referenced directly by a commit reachable from
-<commit> or an explicitly-given object. <depth>=2 is like <depth>=1
+_<commit>_ or an explicitly-given object. _<depth>_=2 is like <depth>=1
while also including trees and blobs one more level removed from an
explicitly-given commit or tree.
+
-Note that the form '--filter=sparse:path=<path>' that wants to read
+Note that the form `--filter=sparse:path=<path>` that wants to read
from an arbitrary path on the filesystem has been dropped for security
reasons.
+
-Multiple '--filter=' flags can be specified to combine filters. Only
+Multiple `--filter=` flags can be specified to combine filters. Only
objects which are accepted by every filter are included.
+
-The form '--filter=combine:<filter1>+<filter2>+...<filterN>' can also be
+The form `--filter=combine:<filter1>+<filter2>+...<filterN>` can also be
used to combined several filters, but this is harder than just repeating
-the '--filter' flag and is usually not necessary. Filters are joined by
+the `--filter` flag and is usually not necessary. Filters are joined by
'{plus}' and individual filters are %-encoded (i.e. URL-encoded).
Besides the '{plus}' and '%' characters, the following characters are
reserved and also must be encoded: `~!@#$^&*()[]{}\;",<>?`+&#39;&#96;+
@@ -1017,52 +1017,52 @@ as well as all characters with ASCII code &lt;= `0x20`, which includes
space and newline.
+
Other arbitrary characters can also be encoded. For instance,
-'combine:tree:3+blob:none' and 'combine:tree%3A3+blob%3Anone' are
+`combine:tree:3+blob:none` and `combine:tree%3A3+blob%3Anone` are
equivalent.
---no-filter::
+`--no-filter`::
Turn off any previous `--filter=` argument.
---filter-provided-objects::
+`--filter-provided-objects`::
Filter the list of explicitly provided objects, which would otherwise
always be printed even if they did not match any of the filters. Only
useful with `--filter=`.
---filter-print-omitted::
+`--filter-print-omitted`::
Only useful with `--filter=`; prints a list of the objects omitted
by the filter. Object IDs are prefixed with a ``~'' character.
---missing=<missing-action>::
+`--missing=<missing-action>`::
A debug option to help with future "partial clone" development.
This option specifies how missing objects are handled.
+
-The form '--missing=error' requests that rev-list stop with an error if
+The form `--missing=error` requests that rev-list stop with an error if
a missing object is encountered. This is the default action.
+
-The form '--missing=allow-any' will allow object traversal to continue
+The form `--missing=allow-any` will allow object traversal to continue
if a missing object is encountered. Missing objects will silently be
omitted from the results.
+
-The form '--missing=allow-promisor' is like 'allow-any', but will only
+The form `--missing=allow-promisor` is like `allow-any`, but will only
allow object traversal to continue for EXPECTED promisor missing objects.
Unexpected missing objects will raise an error.
+
-The form '--missing=print' is like 'allow-any', but will also print a
+The form `--missing=print` is like `allow-any`, but will also print a
list of the missing objects. Object IDs are prefixed with a ``?'' character.
+
-The form '--missing=print-info' is like 'print', but will also print additional
+The form `--missing=print-info` is like `print`, but will also print additional
information about the missing object inferred from its containing object. The
information is all printed on the same line with the missing object ID in the
form: `?<oid> [<token>=<value>]...`. The `<token>=<value>` pairs containing
-additional information are separated from each other by a SP. The value is
-encoded in a token specific fashion, but SP or LF contained in value are always
+additional information are separated from each other by a _SP_. The value is
+encoded in a token specific fashion, but _SP_ or _LF_ contained in value are always
expected to be represented in such a way that the resulting encoded value does
not have either of these two problematic bytes. Each `<token>=<value>` may be
one of the following:
+
--
* The `path=<path>` shows the path of the missing object inferred from a
- containing object. A path containing SP or special characters is enclosed in
+ containing object. A path containing _SP_ or special characters is enclosed in
double-quotes in the C style as needed.
+
* The `type=<type>` shows the type of the missing object inferred from a
@@ -1073,7 +1073,7 @@ If some tips passed to the traversal are missing, they will be
considered as missing too, and the traversal will ignore them. In case
we cannot get their Object ID though, an error will be raised.
---exclude-promisor-objects::
+`--exclude-promisor-objects`::
(For internal use only.) Prefilter object traversal at
promisor boundary. This is used with partial clone. This is
stronger than `--missing=allow-promisor` because it limits the
@@ -1081,7 +1081,7 @@ we cannot get their Object ID though, an error will be raised.
objects.
endif::git-rev-list[]
---no-walk[=(sorted|unsorted)]::
+`--no-walk[=(sorted|unsorted)]`::
Only show the given commits, but do not traverse their ancestors.
This has no effect if a range is specified. If the argument
`unsorted` is given, the commits are shown in the order they were
@@ -1090,7 +1090,7 @@ endif::git-rev-list[]
by commit time.
Cannot be combined with `--graph`.
---do-walk::
+`--do-walk`::
Overrides a previous `--no-walk`.
endif::git-shortlog[]
@@ -1100,16 +1100,21 @@ Commit Formatting
ifdef::git-rev-list[]
Using these options, linkgit:git-rev-list[1] will act similar to the
-more specialized family of commit log tools: linkgit:git-log[1],
-linkgit:git-show[1], and linkgit:git-whatchanged[1]
+more specialized family of commit log tools:
+ifndef::with-breaking-changes[]
+linkgit:git-log[1], linkgit:git-show[1], and linkgit:git-whatchanged[1].
+endif::with-breaking-changes[]
+ifdef::with-breaking-changes[]
+linkgit:git-log[1] and linkgit:git-show[1].
+endif::with-breaking-changes[]
endif::git-rev-list[]
include::pretty-options.adoc[]
---relative-date::
+`--relative-date`::
Synonym for `--date=relative`.
---date=<format>::
+`--date=<format>`::
Only takes effect for dates shown in human-readable format, such
as when using `--pretty`. `log.date` config variable sets a default
value for the log command's `--date` option. By default, dates
@@ -1159,12 +1164,12 @@ omitted.
1970). As with `--raw`, this is always in UTC and therefore `-local`
has no effect.
-`--date=format:...` feeds the format `...` to your system `strftime`,
-except for %s, %z, and %Z, which are handled internally.
+`--date=format:<format>` feeds the _<format>_ to your system `strftime`,
+except for `%s`, `%z`, and `%Z`, which are handled internally.
Use `--date=format:%c` to show the date in your system locale's
-preferred format. See the `strftime` manual for a complete list of
+preferred format. See the `strftime`(3) manual for a complete list of
format placeholders. When using `-local`, the correct syntax is
-`--date=format-local:...`.
+`--date=format-local:<format>`.
`--date=default` is the default format, and is based on ctime(3)
output. It shows a single line with three-letter day of the week,
@@ -1174,33 +1179,33 @@ the local time zone is used, e.g. `Thu Jan 1 00:00:00 1970 +0000`.
--
ifdef::git-rev-list[]
---header::
+`--header`::
Print the contents of the commit in raw-format; each record is
separated with a NUL character.
---no-commit-header::
+`--no-commit-header`::
Suppress the header line containing "commit" and the object ID printed before
the specified format. This has no effect on the built-in formats; only custom
formats are affected.
---commit-header::
+`--commit-header`::
Overrides a previous `--no-commit-header`.
endif::git-rev-list[]
---parents::
+`--parents`::
Print also the parents of the commit (in the form "commit parent...").
Also enables parent rewriting, see 'History Simplification' above.
---children::
+`--children`::
Print also the children of the commit (in the form "commit child...").
Also enables parent rewriting, see 'History Simplification' above.
ifdef::git-rev-list[]
---timestamp::
+`--timestamp`::
Print the raw commit timestamp.
endif::git-rev-list[]
---left-right::
+`--left-right`::
Mark which side of a symmetric difference a commit is reachable from.
Commits from the left side are prefixed with `<` and those from
the right with `>`. If combined with `--boundary`, those
@@ -1229,7 +1234,7 @@ you would get an output like this:
-xxxxxxx... 1st on a
-----------------------------------------------------------------------
---graph::
+`--graph`::
Draw a text-based graphical representation of the commit history
on the left hand side of the output. This may cause extra lines
to be printed in between commits, in order for the graph history
@@ -1241,15 +1246,15 @@ This enables parent rewriting, see 'History Simplification' above.
This implies the `--topo-order` option by default, but the
`--date-order` option may also be specified.
---show-linear-break[=<barrier>]::
- When --graph is not used, all history branches are flattened
+`--show-linear-break[=<barrier>]`::
+ When `--graph` is not used, all history branches are flattened
which can make it hard to see that the two consecutive commits
do not belong to a linear branch. This option puts a barrier
- in between them in that case. If `<barrier>` is specified, it
+ in between them in that case. If _<barrier>_ is specified, it
is the string that will be shown instead of the default one.
ifdef::git-rev-list[]
---count::
+`--count`::
Print a number stating how many commits would have been
listed, and suppress all other output. When used together
with `--left-right`, instead print the counts for left and
diff --git a/Documentation/technical/api-path-walk.adoc b/Documentation/technical/api-path-walk.adoc
index 3e089211fb..34c905eb9c 100644
--- a/Documentation/technical/api-path-walk.adoc
+++ b/Documentation/technical/api-path-walk.adoc
@@ -56,6 +56,14 @@ better off using the revision walk API instead.
the revision walk so that the walk emits commits marked with the
`UNINTERESTING` flag.
+`edge_aggressive`::
+ For performance reasons, usually only the boundary commits are
+ explored to find UNINTERESTING objects. However, in the case of
+ shallow clones it can be helpful to mark all trees and blobs
+ reachable from UNINTERESTING tip commits as UNINTERESTING. This
+ matches the behavior of `--objects-edge-aggressive` in the
+ revision API.
+
`pl`::
This pattern list pointer allows focusing the path-walk search to
a set of patterns, only emitting paths that match the given
@@ -69,4 +77,5 @@ Examples
See example usages in:
`t/helper/test-path-walk.c`,
+ `builtin/pack-objects.c`,
`builtin/backfill.c`
diff --git a/Documentation/technical/build-systems.adoc b/Documentation/technical/build-systems.adoc
index d9dafb407c..3c5237b9fd 100644
--- a/Documentation/technical/build-systems.adoc
+++ b/Documentation/technical/build-systems.adoc
@@ -32,7 +32,10 @@ that generally have somebody running test pipelines against regularly:
- OpenBSD
The platforms which must be supported by the tool should be aligned with our
-[platform support policy](platform-support.txt).
+platform support policy (see platform-support.adoc).
+// once we lose AsciiDoc compatibility, we can start writing the above as:
+// xref:platform-support.adoc#platform-support-policy[platform support policy]
+// or something like that, but until then....
=== Auto-detection of supported features
diff --git a/Documentation/technical/sparse-checkout.adoc b/Documentation/technical/sparse-checkout.adoc
index 8202172b70..0f750ef3e3 100644
--- a/Documentation/technical/sparse-checkout.adoc
+++ b/Documentation/technical/sparse-checkout.adoc
@@ -440,7 +440,7 @@ understanding these differences can be beneficial.
* blame (only matters when one or more -C flags are passed)
* and annotate
* log
- * whatchanged
+ * whatchanged (may not exist anymore)
* ls-files
* diff-index
* diff-tree
diff --git a/Documentation/user-manual.adoc b/Documentation/user-manual.adoc
index d2b478ad23..8d00a9e822 100644
--- a/Documentation/user-manual.adoc
+++ b/Documentation/user-manual.adoc
@@ -4240,7 +4240,7 @@ command `git`. The source side of a builtin is
- an entry in `BUILTIN_OBJECTS` in the `Makefile`.
Sometimes, more than one builtin is contained in one source file. For
-example, `cmd_whatchanged()` and `cmd_log()` both reside in `builtin/log.c`,
+example, `cmd_show()` and `cmd_log()` both reside in `builtin/log.c`,
since they share quite a bit of code. In that case, the commands which are
_not_ named like the `.c` file in which they live have to be listed in
`BUILT_INS` in the `Makefile`.
@@ -4301,11 +4301,11 @@ Now, for the meat:
-----------------------------------------------------------------------------
case 0:
- buf = read_object_with_reference(sha1, argv[1], &size, NULL);
+ buf = odb_read_object_peeled(r->objects, sha1, argv[1], &size, NULL);
-----------------------------------------------------------------------------
This is how you read a blob (actually, not only a blob, but any type of
-object). To know how the function `read_object_with_reference()` actually
+object). To know how the function `odb_read_object_peeled()` actually
works, find the source code for it (something like `git grep
read_object_with | grep ":[a-z]"` in the Git repository), and read
the source.
diff --git a/GIT-VERSION-GEN b/GIT-VERSION-GEN
index 5d17e1ef62..63463c8773 100755
--- a/GIT-VERSION-GEN
+++ b/GIT-VERSION-GEN
@@ -1,6 +1,6 @@
#!/bin/sh
-DEF_VER=v2.50.0
+DEF_VER=v2.50.GIT
LF='
'
diff --git a/Makefile b/Makefile
index 70d1543b6b..5f7dd79dfa 100644
--- a/Makefile
+++ b/Makefile
@@ -114,8 +114,6 @@ include shared.mak
#
# Define NO_INTPTR_T if you don't have intptr_t or uintptr_t.
#
-# Define NO_UINTMAX_T if you don't have uintmax_t.
-#
# Define NEEDS_SOCKET if linking with libc is not enough (SunOS,
# Patrick Mauritz).
#
@@ -1085,8 +1083,8 @@ LIB_OBJS += notes.o
LIB_OBJS += object-file-convert.o
LIB_OBJS += object-file.o
LIB_OBJS += object-name.o
-LIB_OBJS += object-store.o
LIB_OBJS += object.o
+LIB_OBJS += odb.o
LIB_OBJS += oid-array.o
LIB_OBJS += oidmap.o
LIB_OBJS += oidset.o
@@ -1367,6 +1365,7 @@ CLAR_TEST_SUITES += u-prio-queue
CLAR_TEST_SUITES += u-reftable-tree
CLAR_TEST_SUITES += u-strbuf
CLAR_TEST_SUITES += u-strcmp-offset
+CLAR_TEST_SUITES += u-string-list
CLAR_TEST_SUITES += u-strvec
CLAR_TEST_SUITES += u-trailer
CLAR_TEST_SUITES += u-urlmatch-normalization
@@ -1918,9 +1917,6 @@ endif
ifdef NO_INTPTR_T
COMPAT_CFLAGS += -DNO_INTPTR_T
endif
-ifdef NO_UINTMAX_T
- BASIC_CFLAGS += -Duintmax_t=uint32_t
-endif
ifdef NO_SOCKADDR_STORAGE
ifdef NO_IPV6
BASIC_CFLAGS += -Dsockaddr_storage=sockaddr_in
@@ -3473,11 +3469,14 @@ endif
coccicheck-test: $(COCCI_TEST_RES_GEN)
coccicheck: coccicheck-test
+
ifdef SPATCH_CONCAT_COCCI
-coccicheck: contrib/coccinelle/ALL.cocci.patch
+COCCICHECK_PATCH_MUST_BE_EMPTY_FILES = contrib/coccinelle/ALL.cocci.patch
else
-coccicheck: $(COCCICHECK_PATCHES_INTREE)
+COCCICHECK_PATCH_MUST_BE_EMPTY_FILES = $(COCCICHECK_PATCHES_INTREE)
endif
+coccicheck: $(COCCICHECK_PATCH_MUST_BE_EMPTY_FILES)
+ ! grep -q ^ $(COCCICHECK_PATCH_MUST_BE_EMPTY_FILES) /dev/null
# See contrib/coccinelle/README
coccicheck-pending: coccicheck-test
diff --git a/RelNotes b/RelNotes
index eaaaf878d3..48f15770a4 120000
--- a/RelNotes
+++ b/RelNotes
@@ -1 +1 @@
-Documentation/RelNotes/2.50.0.adoc \ No newline at end of file
+Documentation/RelNotes/2.51.0.adoc \ No newline at end of file
diff --git a/apply.c b/apply.c
index 8bbe6ed224..a3804316fb 100644
--- a/apply.c
+++ b/apply.c
@@ -14,7 +14,7 @@
#include "abspath.h"
#include "base85.h"
#include "config.h"
-#include "object-store.h"
+#include "odb.h"
#include "delta.h"
#include "diff.h"
#include "dir.h"
@@ -3204,14 +3204,14 @@ static int apply_binary(struct apply_state *state,
return 0; /* deletion patch */
}
- if (has_object(the_repository, &oid, 0)) {
+ if (odb_has_object(the_repository->objects, &oid, 0)) {
/* We already have the postimage */
enum object_type type;
unsigned long size;
char *result;
- result = repo_read_object_file(the_repository, &oid, &type,
- &size);
+ result = odb_read_object(the_repository->objects, &oid,
+ &type, &size);
if (!result)
return error(_("the necessary postimage %s for "
"'%s' cannot be read"),
@@ -3273,8 +3273,8 @@ static int read_blob_object(struct strbuf *buf, const struct object_id *oid, uns
unsigned long sz;
char *result;
- result = repo_read_object_file(the_repository, oid, &type,
- &sz);
+ result = odb_read_object(the_repository->objects, oid,
+ &type, &sz);
if (!result)
return -1;
/* XXX read_sha1_file NUL-terminates */
@@ -3503,7 +3503,7 @@ static int resolve_to(struct image *image, const struct object_id *result_id)
image_clear(image);
- data = repo_read_object_file(the_repository, result_id, &type, &size);
+ data = odb_read_object(the_repository->objects, result_id, &type, &size);
if (!data || type != OBJ_BLOB)
die("unable to read blob object %s", oid_to_hex(result_id));
strbuf_attach(&image->buf, data, size, size + 1);
@@ -4565,7 +4565,7 @@ static int create_file(struct apply_state *state, struct patch *patch)
if (patch->conflicted_threeway)
return add_conflicted_stages_file(state, patch);
- else if (state->update_index)
+ else if (state->check_index || (state->ita_only && patch->is_new > 0))
return add_index_file(state, path, mode, buf, size);
return 0;
}
@@ -4833,7 +4833,7 @@ static int apply_patch(struct apply_state *state,
LOCK_DIE_ON_ERROR);
}
- if (state->check_index && read_apply_cache(state) < 0) {
+ if ((state->check_index || state->update_index) && read_apply_cache(state) < 0) {
error(_("unable to read index file"));
res = -128;
goto end;
diff --git a/archive-tar.c b/archive-tar.c
index 282b48196f..249164ea77 100644
--- a/archive-tar.c
+++ b/archive-tar.c
@@ -11,7 +11,7 @@
#include "hex.h"
#include "tar.h"
#include "archive.h"
-#include "object-store.h"
+#include "odb.h"
#include "strbuf.h"
#include "streaming.h"
#include "run-command.h"
diff --git a/archive-zip.c b/archive-zip.c
index 405da6f3d8..df8866d5ba 100644
--- a/archive-zip.c
+++ b/archive-zip.c
@@ -12,7 +12,7 @@
#include "hex.h"
#include "streaming.h"
#include "utf8.h"
-#include "object-store.h"
+#include "odb.h"
#include "strbuf.h"
#include "userdiff.h"
#include "write-or-die.h"
diff --git a/archive.c b/archive.c
index 8309ea213e..f5a9d45c8d 100644
--- a/archive.c
+++ b/archive.c
@@ -14,7 +14,7 @@
#include "pretty.h"
#include "setup.h"
#include "refs.h"
-#include "object-store.h"
+#include "odb.h"
#include "commit.h"
#include "tree.h"
#include "tree-walk.h"
@@ -98,7 +98,7 @@ static void *object_file_to_archive(const struct archiver_args *args,
(args->tree ? &args->tree->object.oid : NULL), oid);
path += args->baselen;
- buffer = repo_read_object_file(the_repository, oid, type, sizep);
+ buffer = odb_read_object(the_repository->objects, oid, type, sizep);
if (buffer && S_ISREG(mode)) {
struct strbuf buf = STRBUF_INIT;
size_t size = 0;
@@ -215,7 +215,7 @@ static int write_archive_entry(const struct object_id *oid, const char *base,
/* Stream it? */
if (S_ISREG(mode) && !args->convert &&
- oid_object_info(args->repo, oid, &size) == OBJ_BLOB &&
+ odb_read_object_info(args->repo->objects, oid, &size) == OBJ_BLOB &&
size > repo_settings_get_big_file_threshold(the_repository))
return write_entry(args, oid, path.buf, path.len, mode, NULL, size);
diff --git a/attr.c b/attr.c
index 86b6109fc4..d1daeb0b4d 100644
--- a/attr.c
+++ b/attr.c
@@ -22,7 +22,7 @@
#include "read-cache-ll.h"
#include "refs.h"
#include "revision.h"
-#include "object-store.h"
+#include "odb.h"
#include "setup.h"
#include "thread-utils.h"
#include "tree-walk.h"
@@ -779,7 +779,7 @@ static struct attr_stack *read_attr_from_blob(struct index_state *istate,
if (get_tree_entry(istate->repo, tree_oid, path, &oid, &mode))
return NULL;
- buf = repo_read_object_file(istate->repo, &oid, &type, &sz);
+ buf = odb_read_object(istate->repo->objects, &oid, &type, &sz);
if (!buf || type != OBJ_BLOB) {
free(buf);
return NULL;
diff --git a/bisect.c b/bisect.c
index a327468c75..f24474542e 100644
--- a/bisect.c
+++ b/bisect.c
@@ -20,7 +20,7 @@
#include "commit-slab.h"
#include "commit-reach.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "path.h"
#include "dir.h"
@@ -155,9 +155,9 @@ static void show_list(const char *debug, int counted, int nr,
unsigned commit_flags = commit->object.flags;
enum object_type type;
unsigned long size;
- char *buf = repo_read_object_file(the_repository,
- &commit->object.oid, &type,
- &size);
+ char *buf = odb_read_object(the_repository->objects,
+ &commit->object.oid, &type,
+ &size);
const char *subject_start;
int subject_len;
diff --git a/bisect.h b/bisect.h
index 944439bfac..8621460f93 100644
--- a/bisect.h
+++ b/bisect.h
@@ -27,14 +27,6 @@ struct commit_list *filter_skipped(struct commit_list *list,
#define FIND_BISECTION_ALL (1u<<0)
#define FIND_BISECTION_FIRST_PARENT_ONLY (1u<<1)
-struct rev_list_info {
- struct rev_info *revs;
- int flags;
- int show_timestamp;
- int hdr_termination;
- const char *header_prefix;
-};
-
/*
* enum bisect_error represents the following return codes:
* BISECT_OK: success code. Internally, it means that next
diff --git a/blame.c b/blame.c
index 57daa45e89..f1c0670144 100644
--- a/blame.c
+++ b/blame.c
@@ -3,7 +3,7 @@
#include "git-compat-util.h"
#include "refs.h"
-#include "object-store.h"
+#include "odb.h"
#include "cache-tree.h"
#include "mergesort.h"
#include "commit.h"
@@ -116,7 +116,7 @@ static void verify_working_tree_path(struct repository *r,
unsigned short mode;
if (!get_tree_entry(r, commit_oid, path, &blob_oid, &mode) &&
- oid_object_info(r, &blob_oid, NULL) == OBJ_BLOB)
+ odb_read_object_info(r->objects, &blob_oid, NULL) == OBJ_BLOB)
return;
}
@@ -277,7 +277,8 @@ static struct commit *fake_working_tree_commit(struct repository *r,
convert_to_git(r->index, path, buf.buf, buf.len, &buf, 0);
origin->file.ptr = buf.buf;
origin->file.size = buf.len;
- pretend_object_file(the_repository, buf.buf, buf.len, OBJ_BLOB, &origin->blob_oid);
+ odb_pretend_object(the_repository->objects, buf.buf, buf.len,
+ OBJ_BLOB, &origin->blob_oid);
/*
* Read the current index, replace the path entry with
@@ -1041,9 +1042,9 @@ static void fill_origin_blob(struct diff_options *opt,
&o->blob_oid, 1, &file->ptr, &file_size))
;
else
- file->ptr = repo_read_object_file(the_repository,
- &o->blob_oid, &type,
- &file_size);
+ file->ptr = odb_read_object(the_repository->objects,
+ &o->blob_oid, &type,
+ &file_size);
file->size = file_size;
if (!file->ptr)
@@ -1245,7 +1246,7 @@ static int fill_blob_sha1_and_mode(struct repository *r,
return 0;
if (get_tree_entry(r, &origin->commit->object.oid, origin->path, &origin->blob_oid, &origin->mode))
goto error_out;
- if (oid_object_info(r, &origin->blob_oid, NULL) != OBJ_BLOB)
+ if (odb_read_object_info(r->objects, &origin->blob_oid, NULL) != OBJ_BLOB)
goto error_out;
return 0;
error_out:
@@ -1310,7 +1311,7 @@ static void add_bloom_key(struct blame_bloom_data *bd,
}
bd->keys[bd->nr] = xmalloc(sizeof(struct bloom_key));
- fill_bloom_key(path, strlen(path), bd->keys[bd->nr], bd->settings);
+ bloom_key_fill(bd->keys[bd->nr], path, strlen(path), bd->settings);
bd->nr++;
}
@@ -2869,10 +2870,9 @@ void setup_scoreboard(struct blame_scoreboard *sb,
&sb->final_buf_size))
;
else
- sb->final_buf = repo_read_object_file(the_repository,
- &o->blob_oid,
- &type,
- &sb->final_buf_size);
+ sb->final_buf = odb_read_object(the_repository->objects,
+ &o->blob_oid, &type,
+ &sb->final_buf_size);
if (!sb->final_buf)
die(_("cannot read blob %s for path %s"),
diff --git a/bloom.c b/bloom.c
index 0c8d2cebf9..b86015f6d1 100644
--- a/bloom.c
+++ b/bloom.c
@@ -107,7 +107,7 @@ int load_bloom_filter_from_graph(struct commit_graph *g,
* Not considered to be cryptographically secure.
* Implemented as described in https://en.wikipedia.org/wiki/MurmurHash#Algorithm
*/
-uint32_t murmur3_seeded_v2(uint32_t seed, const char *data, size_t len)
+static uint32_t murmur3_seeded_v2(uint32_t seed, const char *data, size_t len)
{
const uint32_t c1 = 0xcc9e2d51;
const uint32_t c2 = 0x1b873593;
@@ -221,9 +221,7 @@ static uint32_t murmur3_seeded_v1(uint32_t seed, const char *data, size_t len)
return seed;
}
-void fill_bloom_key(const char *data,
- size_t len,
- struct bloom_key *key,
+void bloom_key_fill(struct bloom_key *key, const char *data, size_t len,
const struct bloom_filter_settings *settings)
{
int i;
@@ -243,7 +241,7 @@ void fill_bloom_key(const char *data,
key->hashes[i] = hash0 + i * hash1;
}
-void clear_bloom_key(struct bloom_key *key)
+void bloom_key_clear(struct bloom_key *key)
{
FREE_AND_NULL(key->hashes);
}
@@ -280,6 +278,55 @@ void deinit_bloom_filters(void)
deep_clear_bloom_filter_slab(&bloom_filters, free_one_bloom_filter);
}
+struct bloom_keyvec *bloom_keyvec_new(const char *path, size_t len,
+ const struct bloom_filter_settings *settings)
+{
+ struct bloom_keyvec *vec;
+ const char *p;
+ size_t sz;
+ size_t nr = 1;
+
+ p = path;
+ while (*p) {
+ /*
+ * At this point, the path is normalized to use Unix-style
+ * path separators. This is required due to how the
+ * changed-path Bloom filters store the paths.
+ */
+ if (*p == '/')
+ nr++;
+ p++;
+ }
+
+ sz = sizeof(struct bloom_keyvec);
+ sz += nr * sizeof(struct bloom_key);
+ vec = (struct bloom_keyvec *)xcalloc(1, sz);
+ if (!vec)
+ return NULL;
+ vec->count = nr;
+
+ bloom_key_fill(&vec->key[0], path, len, settings);
+ nr = 1;
+ p = path + len - 1;
+ while (p > path) {
+ if (*p == '/') {
+ bloom_key_fill(&vec->key[nr++], path, p - path, settings);
+ }
+ p--;
+ }
+ assert(nr == vec->count);
+ return vec;
+}
+
+void bloom_keyvec_free(struct bloom_keyvec *vec)
+{
+ if (!vec)
+ return;
+ for (size_t nr = 0; nr < vec->count; nr++)
+ bloom_key_clear(&vec->key[nr]);
+ free(vec);
+}
+
static int pathmap_cmp(const void *hashmap_cmp_fn_data UNUSED,
const struct hashmap_entry *eptr,
const struct hashmap_entry *entry_or_key,
@@ -500,9 +547,9 @@ struct bloom_filter *get_or_compute_bloom_filter(struct repository *r,
hashmap_for_each_entry(&pathmap, &iter, e, entry) {
struct bloom_key key;
- fill_bloom_key(e->path, strlen(e->path), &key, settings);
+ bloom_key_fill(&key, e->path, strlen(e->path), settings);
add_key_to_filter(&key, filter, settings);
- clear_bloom_key(&key);
+ bloom_key_clear(&key);
}
cleanup:
@@ -540,3 +587,26 @@ int bloom_filter_contains(const struct bloom_filter *filter,
return 1;
}
+
+int bloom_filter_contains_vec(const struct bloom_filter *filter,
+ const struct bloom_keyvec *vec,
+ const struct bloom_filter_settings *settings)
+{
+ int ret = 1;
+
+ for (size_t nr = 0; ret > 0 && nr < vec->count; nr++)
+ ret = bloom_filter_contains(filter, &vec->key[nr], settings);
+
+ return ret;
+}
+
+uint32_t test_bloom_murmur3_seeded(uint32_t seed, const char *data, size_t len,
+ int version)
+{
+ assert(version == 1 || version == 2);
+
+ if (version == 2)
+ return murmur3_seeded_v2(seed, data, len);
+ else
+ return murmur3_seeded_v1(seed, data, len);
+}
diff --git a/bloom.h b/bloom.h
index 6e46489a20..92ab2100d3 100644
--- a/bloom.h
+++ b/bloom.h
@@ -74,24 +74,40 @@ struct bloom_key {
uint32_t *hashes;
};
+/*
+ * A bloom_keyvec is a vector of bloom_keys, which
+ * can be used to store multiple keys for a single
+ * pathspec item.
+ */
+struct bloom_keyvec {
+ size_t count;
+ struct bloom_key key[FLEX_ARRAY];
+};
+
int load_bloom_filter_from_graph(struct commit_graph *g,
struct bloom_filter *filter,
uint32_t graph_pos);
+void bloom_key_fill(struct bloom_key *key, const char *data, size_t len,
+ const struct bloom_filter_settings *settings);
+void bloom_key_clear(struct bloom_key *key);
+
/*
- * Calculate the murmur3 32-bit hash value for the given data
- * using the given seed.
- * Produces a uniformly distributed hash value.
- * Not considered to be cryptographically secure.
- * Implemented as described in https://en.wikipedia.org/wiki/MurmurHash#Algorithm
+ * bloom_keyvec_new - Allocate and populate a bloom_keyvec with keys for the
+ * given path.
+ *
+ * This function splits the input path by '/' and generates a bloom key for each
+ * prefix, in reverse order of specificity. For example, given the input
+ * "a/b/c", it will generate bloom keys for:
+ * - "a/b/c"
+ * - "a/b"
+ * - "a"
+ *
+ * The resulting keys are stored in a newly allocated bloom_keyvec.
*/
-uint32_t murmur3_seeded_v2(uint32_t seed, const char *data, size_t len);
-
-void fill_bloom_key(const char *data,
- size_t len,
- struct bloom_key *key,
- const struct bloom_filter_settings *settings);
-void clear_bloom_key(struct bloom_key *key);
+struct bloom_keyvec *bloom_keyvec_new(const char *path, size_t len,
+ const struct bloom_filter_settings *settings);
+void bloom_keyvec_free(struct bloom_keyvec *vec);
void add_key_to_filter(const struct bloom_key *key,
struct bloom_filter *filter,
@@ -137,4 +153,18 @@ int bloom_filter_contains(const struct bloom_filter *filter,
const struct bloom_key *key,
const struct bloom_filter_settings *settings);
+/*
+ * bloom_filter_contains_vec - Check if all keys in a key vector are in the
+ * Bloom filter.
+ *
+ * Returns 1 if **all** keys in the vector are present in the filter,
+ * 0 if **any** key is not present.
+ */
+int bloom_filter_contains_vec(const struct bloom_filter *filter,
+ const struct bloom_keyvec *v,
+ const struct bloom_filter_settings *settings);
+
+uint32_t test_bloom_murmur3_seeded(uint32_t seed, const char *data, size_t len,
+ int version);
+
#endif
diff --git a/branch.c b/branch.c
index 6d01d7d6bd..93f5b4e8dd 100644
--- a/branch.c
+++ b/branch.c
@@ -230,7 +230,7 @@ static int inherit_tracking(struct tracking *tracking, const char *orig_ref)
return -1;
}
- if (branch->merge_nr < 1 || !branch->merge_name || !branch->merge_name[0]) {
+ if (branch->merge_nr < 1 || !branch->merge || !branch->merge[0] || !branch->merge[0]->src) {
warning(_("asked to inherit tracking from '%s', but no merge configuration is set"),
bare_ref);
return -1;
@@ -238,7 +238,7 @@ static int inherit_tracking(struct tracking *tracking, const char *orig_ref)
tracking->remote = branch->remote_name;
for (i = 0; i < branch->merge_nr; i++)
- string_list_append(tracking->srcs, branch->merge_name[i]);
+ string_list_append(tracking->srcs, branch->merge[i]->src);
return 0;
}
diff --git a/builtin/am.c b/builtin/am.c
index e32a3b4c97..c9d925f7b9 100644
--- a/builtin/am.c
+++ b/builtin/am.c
@@ -1000,7 +1000,7 @@ static void am_setup(struct am_state *state, enum patch_format patch_format,
if (!patch_format) {
fprintf_ln(stderr, _("Patch format detection failed."));
- exit(128);
+ die(NULL);
}
if (mkdir(state->dir, 0777) < 0 && errno != EEXIST)
@@ -1178,7 +1178,7 @@ static void NORETURN die_user_resolve(const struct am_state *state)
strbuf_release(&sb);
}
- exit(128);
+ die(NULL);
}
/**
@@ -2406,6 +2406,7 @@ int cmd_am(int argc,
.type = OPTION_CALLBACK,
.long_name = "show-current-patch",
.value = &resume_mode,
+ .precision = sizeof(resume_mode),
.argh = "(diff|raw)",
.help = N_("show the patch being applied"),
.flags = PARSE_OPT_CMDMODE | PARSE_OPT_OPTARG | PARSE_OPT_NONEG | PARSE_OPT_LITERAL_ARGHELP,
diff --git a/builtin/apply.c b/builtin/apply.c
index a1e20c593d..d642a40251 100644
--- a/builtin/apply.c
+++ b/builtin/apply.c
@@ -29,7 +29,7 @@ int cmd_apply(int argc,
* cf. https://lore.kernel.org/git/xmqqcypfcmn4.fsf@gitster.g/
*/
if (!the_hash_algo)
- repo_set_hash_algo(the_repository, GIT_HASH_SHA1);
+ repo_set_hash_algo(the_repository, GIT_HASH_DEFAULT);
argc = apply_parse_options(argc, argv,
&state, &force_apply, &options,
diff --git a/builtin/backfill.c b/builtin/backfill.c
index fa82ad2f6f..80056abe47 100644
--- a/builtin/backfill.c
+++ b/builtin/backfill.c
@@ -13,7 +13,7 @@
#include "tree.h"
#include "tree-walk.h"
#include "object.h"
-#include "object-store.h"
+#include "odb.h"
#include "oid-array.h"
#include "oidset.h"
#include "promisor-remote.h"
@@ -67,8 +67,8 @@ static int fill_missing_blobs(const char *path UNUSED,
return 0;
for (size_t i = 0; i < list->nr; i++) {
- if (!has_object(ctx->repo, &list->oid[i],
- OBJECT_INFO_FOR_PREFETCH))
+ if (!odb_has_object(ctx->repo->objects, &list->oid[i],
+ OBJECT_INFO_FOR_PREFETCH))
oid_array_append(&ctx->current_batch, &list->oid[i]);
}
diff --git a/builtin/blame.c b/builtin/blame.c
index 944952e30e..91586e6852 100644
--- a/builtin/blame.c
+++ b/builtin/blame.c
@@ -28,7 +28,7 @@
#include "line-log.h"
#include "progress.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "pager.h"
#include "blame.h"
#include "refs.h"
@@ -837,7 +837,7 @@ static int is_a_rev(const char *name)
if (repo_get_oid(the_repository, name, &oid))
return 0;
- return OBJ_NONE < oid_object_info(the_repository, &oid, NULL);
+ return OBJ_NONE < odb_read_object_info(the_repository->objects, &oid, NULL);
}
static int peel_to_commit_oid(struct object_id *oid_ret, void *cbdata)
@@ -848,7 +848,7 @@ static int peel_to_commit_oid(struct object_id *oid_ret, void *cbdata)
oidcpy(&oid, oid_ret);
while (1) {
struct object *obj;
- int kind = oid_object_info(r, &oid, NULL);
+ int kind = odb_read_object_info(r->objects, &oid, NULL);
if (kind == OBJ_COMMIT) {
oidcpy(oid_ret, &oid);
return 0;
diff --git a/builtin/cat-file.c b/builtin/cat-file.c
index 67a5ff2b9e..2492a0b6f3 100644
--- a/builtin/cat-file.c
+++ b/builtin/cat-file.c
@@ -24,7 +24,7 @@
#include "pack-bitmap.h"
#include "object-file.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "replace-object.h"
#include "promisor-remote.h"
#include "mailmap.h"
@@ -74,7 +74,7 @@ static int filter_object(const char *path, unsigned mode,
{
enum object_type type;
- *buf = repo_read_object_file(the_repository, oid, &type, size);
+ *buf = odb_read_object(the_repository->objects, oid, &type, size);
if (!*buf)
return error(_("cannot read object %s '%s'"),
oid_to_hex(oid), path);
@@ -132,7 +132,7 @@ static int cat_one_file(int opt, const char *exp_type, const char *obj_name)
switch (opt) {
case 't':
oi.typep = &type;
- if (oid_object_info_extended(the_repository, &oid, &oi, flags) < 0)
+ if (odb_read_object_info_extended(the_repository->objects, &oid, &oi, flags) < 0)
die("git cat-file: could not get object info");
printf("%s\n", type_name(type));
ret = 0;
@@ -146,7 +146,7 @@ static int cat_one_file(int opt, const char *exp_type, const char *obj_name)
oi.contentp = (void**)&buf;
}
- if (oid_object_info_extended(the_repository, &oid, &oi, flags) < 0)
+ if (odb_read_object_info_extended(the_repository->objects, &oid, &oi, flags) < 0)
die("git cat-file: could not get object info");
if (use_mailmap && (type == OBJ_COMMIT || type == OBJ_TAG)) {
@@ -160,8 +160,8 @@ static int cat_one_file(int opt, const char *exp_type, const char *obj_name)
goto cleanup;
case 'e':
- ret = !has_object(the_repository, &oid,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR);
+ ret = !odb_has_object(the_repository->objects, &oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR);
goto cleanup;
case 'w':
@@ -180,7 +180,7 @@ static int cat_one_file(int opt, const char *exp_type, const char *obj_name)
/* else fallthrough */
case 'p':
- type = oid_object_info(the_repository, &oid, NULL);
+ type = odb_read_object_info(the_repository->objects, &oid, NULL);
if (type < 0)
die("Not a valid object name %s", obj_name);
@@ -197,8 +197,8 @@ static int cat_one_file(int opt, const char *exp_type, const char *obj_name)
ret = stream_blob(&oid);
goto cleanup;
}
- buf = repo_read_object_file(the_repository, &oid, &type,
- &size);
+ buf = odb_read_object(the_repository->objects, &oid,
+ &type, &size);
if (!buf)
die("Cannot read object %s", obj_name);
@@ -217,11 +217,10 @@ static int cat_one_file(int opt, const char *exp_type, const char *obj_name)
if (exp_type_id == OBJ_BLOB) {
struct object_id blob_oid;
- if (oid_object_info(the_repository, &oid, NULL) == OBJ_TAG) {
- char *buffer = repo_read_object_file(the_repository,
- &oid,
- &type,
- &size);
+ if (odb_read_object_info(the_repository->objects,
+ &oid, NULL) == OBJ_TAG) {
+ char *buffer = odb_read_object(the_repository->objects,
+ &oid, &type, &size);
const char *target;
if (!buffer)
@@ -235,7 +234,8 @@ static int cat_one_file(int opt, const char *exp_type, const char *obj_name)
} else
oidcpy(&blob_oid, &oid);
- if (oid_object_info(the_repository, &blob_oid, NULL) == OBJ_BLOB) {
+ if (odb_read_object_info(the_repository->objects,
+ &blob_oid, NULL) == OBJ_BLOB) {
ret = stream_blob(&blob_oid);
goto cleanup;
}
@@ -246,8 +246,8 @@ static int cat_one_file(int opt, const char *exp_type, const char *obj_name)
* fall-back to the usual case.
*/
}
- buf = read_object_with_reference(the_repository, &oid,
- exp_type_id, &size, NULL);
+ buf = odb_read_object_peeled(the_repository->objects, &oid,
+ exp_type_id, &size, NULL);
if (use_mailmap) {
size_t s = size;
@@ -275,6 +275,7 @@ struct expand_data {
struct object_id oid;
enum object_type type;
unsigned long size;
+ unsigned short mode;
off_t disk_size;
const char *rest;
struct object_id delta_base_oid;
@@ -294,7 +295,7 @@ struct expand_data {
/*
* After a mark_query run, this object_info is set up to be
- * passed to oid_object_info_extended. It will point to the data
+ * passed to odb_read_object_info_extended. It will point to the data
* elements above, so you can retrieve the response from there.
*/
struct object_info info;
@@ -306,6 +307,7 @@ struct expand_data {
*/
unsigned skip_object_info : 1;
};
+#define EXPAND_DATA_INIT { .mode = S_IFINVALID }
static int is_atom(const char *atom, const char *s, int slen)
{
@@ -345,6 +347,9 @@ static int expand_atom(struct strbuf *sb, const char *atom, int len,
else
strbuf_addstr(sb,
oid_to_hex(&data->delta_base_oid));
+ } else if (is_atom("objectmode", atom, len)) {
+ if (!data->mark_query && !(S_IFINVALID == data->mode))
+ strbuf_addf(sb, "%06o", data->mode);
} else
return 0;
return 1;
@@ -401,10 +406,8 @@ static void print_object_or_die(struct batch_options *opt, struct expand_data *d
if (!textconv_object(the_repository,
data->rest, 0100644, oid,
1, &contents, &size))
- contents = repo_read_object_file(the_repository,
- oid,
- &type,
- &size);
+ contents = odb_read_object(the_repository->objects,
+ oid, &type, &size);
if (!contents)
die("could not convert '%s' %s",
oid_to_hex(oid), data->rest);
@@ -421,8 +424,8 @@ static void print_object_or_die(struct batch_options *opt, struct expand_data *d
unsigned long size;
void *contents;
- contents = repo_read_object_file(the_repository, oid, &type,
- &size);
+ contents = odb_read_object(the_repository->objects, oid,
+ &type, &size);
if (!contents)
die("object %s disappeared", oid_to_hex(oid));
@@ -484,14 +487,17 @@ static void batch_object_write(const char *obj_name,
data->info.sizep = &data->size;
if (pack)
- ret = packed_object_info(the_repository, pack, offset,
- &data->info);
+ ret = packed_object_info(the_repository, pack,
+ offset, &data->info);
else
- ret = oid_object_info_extended(the_repository,
- &data->oid, &data->info,
- OBJECT_INFO_LOOKUP_REPLACE);
+ ret = odb_read_object_info_extended(the_repository->objects,
+ &data->oid, &data->info,
+ OBJECT_INFO_LOOKUP_REPLACE);
if (ret < 0) {
- report_object_status(opt, obj_name, &data->oid, "missing");
+ if (data->mode == S_IFGITLINK)
+ report_object_status(opt, oid_to_hex(&data->oid), &data->oid, "submodule");
+ else
+ report_object_status(opt, obj_name, &data->oid, "missing");
return;
}
@@ -531,8 +537,8 @@ static void batch_object_write(const char *obj_name,
size_t s = data->size;
char *buf = NULL;
- buf = repo_read_object_file(the_repository, &data->oid, &data->type,
- &data->size);
+ buf = odb_read_object(the_repository->objects, &data->oid,
+ &data->type, &data->size);
if (!buf)
die(_("unable to read %s"), oid_to_hex(&data->oid));
buf = replace_idents_using_mailmap(buf, &s);
@@ -613,6 +619,7 @@ static void batch_one_object(const char *obj_name,
goto out;
}
+ data->mode = ctx.mode;
batch_object_write(obj_name, scratch, opt, data, NULL, 0);
out:
@@ -866,16 +873,15 @@ static int batch_objects(struct batch_options *opt)
{
struct strbuf input = STRBUF_INIT;
struct strbuf output = STRBUF_INIT;
- struct expand_data data;
+ struct expand_data data = EXPAND_DATA_INIT;
int save_warning;
int retval = 0;
/*
* Expand once with our special mark_query flag, which will prime the
- * object_info to be handed to oid_object_info_extended for each
+ * object_info to be handed to odb_read_object_info_extended for each
* object.
*/
- memset(&data, 0, sizeof(data));
data.mark_query = 1;
expand_format(&output,
opt->format ? opt->format : DEFAULT_FORMAT,
diff --git a/builtin/checkout.c b/builtin/checkout.c
index d185982f3a..0a90b86a72 100644
--- a/builtin/checkout.c
+++ b/builtin/checkout.c
@@ -20,7 +20,7 @@
#include "merge-ort-wrappers.h"
#include "object-file.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "parse-options.h"
#include "path.h"
#include "preload-index.h"
@@ -838,7 +838,7 @@ static int merge_working_tree(const struct checkout_opts *opts,
init_tree_desc(&trees[0], &tree->object.oid,
tree->buffer, tree->size);
if (parse_tree(new_tree) < 0)
- exit(128);
+ die(NULL);
tree = new_tree;
init_tree_desc(&trees[1], &tree->object.oid,
tree->buffer, tree->size);
@@ -913,7 +913,7 @@ static int merge_working_tree(const struct checkout_opts *opts,
work,
old_tree);
if (ret < 0)
- exit(128);
+ die(NULL);
ret = reset_tree(new_tree,
opts, 0,
writeout_error, new_branch_info);
diff --git a/builtin/clone.c b/builtin/clone.c
index 91b9cd0d16..6d08abed37 100644
--- a/builtin/clone.c
+++ b/builtin/clone.c
@@ -25,7 +25,7 @@
#include "refs.h"
#include "refspec.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "tree.h"
#include "tree-walk.h"
#include "unpack-trees.h"
@@ -171,7 +171,7 @@ static int add_one_reference(struct string_list_item *item, void *cb_data)
} else {
struct strbuf sb = STRBUF_INIT;
strbuf_addf(&sb, "%s/objects", ref_git);
- add_to_alternates_file(sb.buf);
+ odb_add_to_alternates_file(the_repository->objects, sb.buf);
strbuf_release(&sb);
}
@@ -212,12 +212,14 @@ static void copy_alternates(struct strbuf *src, const char *src_repo)
if (!line.len || line.buf[0] == '#')
continue;
if (is_absolute_path(line.buf)) {
- add_to_alternates_file(line.buf);
+ odb_add_to_alternates_file(the_repository->objects,
+ line.buf);
continue;
}
abs_path = mkpathdup("%s/objects/%s", src_repo, line.buf);
if (!normalize_path_copy(abs_path, abs_path))
- add_to_alternates_file(abs_path);
+ odb_add_to_alternates_file(the_repository->objects,
+ abs_path);
else
warning("skipping invalid relative alternate: %s/%s",
src_repo, line.buf);
@@ -352,7 +354,7 @@ static void clone_local(const char *src_repo, const char *dest_repo)
struct strbuf alt = STRBUF_INIT;
get_common_dir(&alt, src_repo);
strbuf_addstr(&alt, "/objects");
- add_to_alternates_file(alt.buf);
+ odb_add_to_alternates_file(the_repository->objects, alt.buf);
strbuf_release(&alt);
} else {
struct strbuf src = STRBUF_INIT;
@@ -504,7 +506,7 @@ static void write_followtags(const struct ref *refs, const char *msg)
continue;
if (ends_with(ref->name, "^{}"))
continue;
- if (!has_object(the_repository, &ref->old_oid, 0))
+ if (!odb_has_object(the_repository->objects, &ref->old_oid, 0))
continue;
refs_update_ref(get_main_ref_store(the_repository), msg,
ref->name, &ref->old_oid, NULL, 0,
diff --git a/builtin/commit-graph.c b/builtin/commit-graph.c
index a783a86e79..25018a0b9d 100644
--- a/builtin/commit-graph.c
+++ b/builtin/commit-graph.c
@@ -6,7 +6,7 @@
#include "hex.h"
#include "parse-options.h"
#include "commit-graph.h"
-#include "object-store.h"
+#include "odb.h"
#include "progress.h"
#include "replace-object.h"
#include "strbuf.h"
@@ -66,7 +66,7 @@ static int graph_verify(int argc, const char **argv, const char *prefix,
struct repository *repo UNUSED)
{
struct commit_graph *graph = NULL;
- struct object_directory *odb = NULL;
+ struct odb_source *source = NULL;
char *graph_name;
char *chain_name;
enum { OPENED_NONE, OPENED_GRAPH, OPENED_CHAIN } opened = OPENED_NONE;
@@ -101,9 +101,9 @@ static int graph_verify(int argc, const char **argv, const char *prefix,
if (opts.progress)
flags |= COMMIT_GRAPH_WRITE_PROGRESS;
- odb = find_odb(the_repository, opts.obj_dir);
- graph_name = get_commit_graph_filename(odb);
- chain_name = get_commit_graph_chain_filename(odb);
+ source = odb_find_source(the_repository->objects, opts.obj_dir);
+ graph_name = get_commit_graph_filename(source);
+ chain_name = get_commit_graph_chain_filename(source);
if (open_commit_graph(graph_name, &fd, &st))
opened = OPENED_GRAPH;
else if (errno != ENOENT)
@@ -120,7 +120,7 @@ static int graph_verify(int argc, const char **argv, const char *prefix,
if (opened == OPENED_NONE)
return 0;
else if (opened == OPENED_GRAPH)
- graph = load_commit_graph_one_fd_st(the_repository, fd, &st, odb);
+ graph = load_commit_graph_one_fd_st(the_repository, fd, &st, source);
else
graph = load_commit_graph_chain_fd_st(the_repository, fd, &st,
&incomplete_chain);
@@ -221,7 +221,7 @@ static int graph_write(int argc, const char **argv, const char *prefix,
struct string_list pack_indexes = STRING_LIST_INIT_DUP;
struct strbuf buf = STRBUF_INIT;
struct oidset commits = OIDSET_INIT;
- struct object_directory *odb = NULL;
+ struct odb_source *source = NULL;
int result = 0;
enum commit_graph_write_flags flags = 0;
struct progress *progress = NULL;
@@ -289,10 +289,10 @@ static int graph_write(int argc, const char **argv, const char *prefix,
git_env_bool(GIT_TEST_COMMIT_GRAPH_CHANGED_PATHS, 0))
flags |= COMMIT_GRAPH_WRITE_BLOOM_FILTERS;
- odb = find_odb(the_repository, opts.obj_dir);
+ source = odb_find_source(the_repository->objects, opts.obj_dir);
if (opts.reachable) {
- if (write_commit_graph_reachable(odb, flags, &write_opts))
+ if (write_commit_graph_reachable(source, flags, &write_opts))
result = 1;
goto cleanup;
}
@@ -311,6 +311,7 @@ static int graph_write(int argc, const char **argv, const char *prefix,
while (strbuf_getline(&buf, stdin) != EOF) {
if (read_one_commit(&commits, progress, buf.buf)) {
result = 1;
+ stop_progress(&progress);
goto cleanup;
}
}
@@ -318,7 +319,7 @@ static int graph_write(int argc, const char **argv, const char *prefix,
stop_progress(&progress);
}
- if (write_commit_graph(odb,
+ if (write_commit_graph(source,
opts.stdin_packs ? &pack_indexes : NULL,
opts.stdin_commits ? &commits : NULL,
flags,
diff --git a/builtin/commit-tree.c b/builtin/commit-tree.c
index ad6b2c9320..31cfd9bd15 100644
--- a/builtin/commit-tree.c
+++ b/builtin/commit-tree.c
@@ -9,7 +9,7 @@
#include "gettext.h"
#include "hex.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "commit.h"
#include "parse-options.h"
@@ -48,7 +48,7 @@ static int parse_parent_arg_callback(const struct option *opt,
if (repo_get_oid_commit(the_repository, arg, &oid))
die(_("not a valid object name %s"), arg);
- assert_oid_type(&oid, OBJ_COMMIT);
+ odb_assert_oid_type(the_repository->objects, &oid, OBJ_COMMIT);
new_parent(lookup_commit(the_repository, &oid), parents);
return 0;
}
diff --git a/builtin/commit.c b/builtin/commit.c
index fba0dded64..63e7158e98 100644
--- a/builtin/commit.c
+++ b/builtin/commit.c
@@ -688,6 +688,10 @@ static void adjust_comment_line_char(const struct strbuf *sb)
char candidates[] = "#;@!$%^&|:";
char *candidate;
const char *p;
+ size_t cutoff;
+
+ /* Ignore comment chars in trailing comments (e.g., Conflicts:) */
+ cutoff = sb->len - ignored_log_message_bytes(sb->buf, sb->len);
if (!memchr(sb->buf, candidates[0], sb->len)) {
free(comment_line_str_to_free);
@@ -700,7 +704,7 @@ static void adjust_comment_line_char(const struct strbuf *sb)
candidate = strchr(candidates, *p);
if (candidate)
*candidate = ' ';
- for (p = sb->buf; *p; p++) {
+ for (p = sb->buf; p + 1 < sb->buf + cutoff; p++) {
if ((p[0] == '\n' || p[0] == '\r') && p[1]) {
candidate = strchr(candidates, p[1]);
if (candidate)
diff --git a/builtin/config.c b/builtin/config.c
index f70d635477..5efe273010 100644
--- a/builtin/config.c
+++ b/builtin/config.c
@@ -17,9 +17,9 @@
static const char *const builtin_config_usage[] = {
N_("git config list [<file-option>] [<display-option>] [--includes]"),
- N_("git config get [<file-option>] [<display-option>] [--includes] [--all] [--regexp] [--value=<value>] [--fixed-value] [--default=<default>] <name>"),
- N_("git config set [<file-option>] [--type=<type>] [--all] [--value=<value>] [--fixed-value] <name> <value>"),
- N_("git config unset [<file-option>] [--all] [--value=<value>] [--fixed-value] <name>"),
+ N_("git config get [<file-option>] [<display-option>] [--includes] [--all] [--regexp] [--value=<pattern>] [--fixed-value] [--default=<default>] [--url=<url>] <name>"),
+ N_("git config set [<file-option>] [--type=<type>] [--all] [--value=<pattern>] [--fixed-value] <name> <value>"),
+ N_("git config unset [<file-option>] [--all] [--value=<pattern>] [--fixed-value] <name>"),
N_("git config rename-section [<file-option>] <old-name> <new-name>"),
N_("git config remove-section [<file-option>] <name>"),
N_("git config edit [<file-option>]"),
@@ -33,17 +33,17 @@ static const char *const builtin_config_list_usage[] = {
};
static const char *const builtin_config_get_usage[] = {
- N_("git config get [<file-option>] [<display-option>] [--includes] [--all] [--regexp=<regexp>] [--value=<value>] [--fixed-value] [--default=<default>] <name>"),
+ N_("git config get [<file-option>] [<display-option>] [--includes] [--all] [--regexp=<regexp>] [--value=<pattern>] [--fixed-value] [--default=<default>] <name>"),
NULL
};
static const char *const builtin_config_set_usage[] = {
- N_("git config set [<file-option>] [--type=<type>] [--comment=<message>] [--all] [--value=<value>] [--fixed-value] <name> <value>"),
+ N_("git config set [<file-option>] [--type=<type>] [--comment=<message>] [--all] [--value=<pattern>] [--fixed-value] <name> <value>"),
NULL
};
static const char *const builtin_config_unset_usage[] = {
- N_("git config unset [<file-option>] [--all] [--value=<value>] [--fixed-value] <name>"),
+ N_("git config unset [<file-option>] [--all] [--value=<pattern>] [--fixed-value] <name>"),
NULL
};
diff --git a/builtin/count-objects.c b/builtin/count-objects.c
index a88c0c9c09..f687647931 100644
--- a/builtin/count-objects.c
+++ b/builtin/count-objects.c
@@ -80,10 +80,10 @@ static int count_cruft(const char *basename UNUSED, const char *path,
return 0;
}
-static int print_alternate(struct object_directory *odb, void *data UNUSED)
+static int print_alternate(struct odb_source *alternate, void *data UNUSED)
{
printf("alternate: ");
- quote_c_style(odb->path, NULL, stdout, 0);
+ quote_c_style(alternate->path, NULL, stdout, 0);
putchar('\n');
return 0;
}
@@ -159,7 +159,7 @@ int cmd_count_objects(int argc,
printf("prune-packable: %lu\n", packed_loose);
printf("garbage: %lu\n", garbage);
printf("size-garbage: %s\n", garbage_buf.buf);
- foreach_alt_odb(print_alternate, NULL);
+ odb_for_each_alternate(the_repository->objects, print_alternate, NULL);
strbuf_release(&loose_buf);
strbuf_release(&pack_buf);
strbuf_release(&garbage_buf);
diff --git a/builtin/describe.c b/builtin/describe.c
index 2d50883b72..fbf305d762 100644
--- a/builtin/describe.c
+++ b/builtin/describe.c
@@ -19,7 +19,7 @@
#include "setup.h"
#include "strvec.h"
#include "run-command.h"
-#include "object-store.h"
+#include "odb.h"
#include "list-objects.h"
#include "commit-slab.h"
#include "wildmatch.h"
@@ -552,7 +552,8 @@ static void describe(const char *arg, int last_one)
if (cmit)
describe_commit(&oid, &sb);
- else if (oid_object_info(the_repository, &oid, NULL) == OBJ_BLOB)
+ else if (odb_read_object_info(the_repository->objects,
+ &oid, NULL) == OBJ_BLOB)
describe_blob(oid, &sb);
else
die(_("%s is neither a commit nor blob"), arg);
diff --git a/builtin/diff.c b/builtin/diff.c
index fa963808c3..eebffe36cc 100644
--- a/builtin/diff.c
+++ b/builtin/diff.c
@@ -35,7 +35,7 @@ static const char builtin_diff_usage[] =
" or: git diff [<options>] [--merge-base] <commit> [<commit>...] <commit> [--] [<path>...]\n"
" or: git diff [<options>] <commit>...<commit> [--] [<path>...]\n"
" or: git diff [<options>] <blob> <blob>\n"
-" or: git diff [<options>] --no-index [--] <path> <path>"
+" or: git diff [<options>] --no-index [--] <path> <path> [<pathspec>...]"
"\n"
COMMON_DIFF_OPTIONS_HELP;
@@ -483,7 +483,7 @@ int cmd_diff(int argc,
* configurable via a command line option.
*/
if (nongit)
- repo_set_hash_algo(the_repository, GIT_HASH_SHA1);
+ repo_set_hash_algo(the_repository, GIT_HASH_DEFAULT);
init_diff_ui_defaults();
git_config(git_diff_ui_config, NULL);
diff --git a/builtin/difftool.c b/builtin/difftool.c
index a3b64ce694..e4bc1f8316 100644
--- a/builtin/difftool.c
+++ b/builtin/difftool.c
@@ -30,7 +30,7 @@
#include "strbuf.h"
#include "lockfile.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "dir.h"
#include "entry.h"
#include "setup.h"
@@ -320,7 +320,7 @@ static char *get_symlink(struct repository *repo,
} else {
enum object_type type;
unsigned long size;
- data = repo_read_object_file(repo, oid, &type, &size);
+ data = odb_read_object(repo->objects, oid, &type, &size);
if (!data)
die(_("could not read object %s for symlink %s"),
oid_to_hex(oid), path);
diff --git a/builtin/fast-export.c b/builtin/fast-export.c
index fcf6b00d5f..f4169dc5f3 100644
--- a/builtin/fast-export.c
+++ b/builtin/fast-export.c
@@ -14,7 +14,7 @@
#include "refs.h"
#include "refspec.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "commit.h"
#include "object.h"
#include "tag.h"
@@ -29,6 +29,7 @@
#include "quote.h"
#include "remote.h"
#include "blob.h"
+#include "gpg-interface.h"
static const char *const fast_export_usage[] = {
N_("git fast-export [<rev-list-opts>]"),
@@ -323,7 +324,7 @@ static void export_blob(const struct object_id *oid)
object = (struct object *)lookup_blob(the_repository, oid);
eaten = 0;
} else {
- buf = repo_read_object_file(the_repository, oid, &type, &size);
+ buf = odb_read_object(the_repository->objects, oid, &type, &size);
if (!buf)
die("could not read blob %s", oid_to_hex(oid));
if (check_object_signature(the_repository, oid, buf, size,
@@ -652,6 +653,38 @@ static const char *find_commit_multiline_header(const char *msg,
return strbuf_detach(&val, NULL);
}
+static void print_signature(const char *signature, const char *object_hash)
+{
+ if (!signature)
+ return;
+
+ printf("gpgsig %s %s\ndata %u\n%s\n",
+ object_hash,
+ get_signature_format(signature),
+ (unsigned)strlen(signature),
+ signature);
+}
+
+static const char *append_signatures_for_header(struct string_list *signatures,
+ const char *pos,
+ const char *header,
+ const char *object_hash)
+{
+ const char *signature;
+ const char *start = pos;
+ const char *end = pos;
+
+ while ((signature = find_commit_multiline_header(start + 1,
+ header,
+ &end))) {
+ string_list_append(signatures, signature)->util = (void *)object_hash;
+ free((char *)signature);
+ start = end;
+ }
+
+ return end;
+}
+
static void handle_commit(struct commit *commit, struct rev_info *rev,
struct string_list *paths_of_changed_objects)
{
@@ -660,7 +693,7 @@ static void handle_commit(struct commit *commit, struct rev_info *rev,
const char *author, *author_end, *committer, *committer_end;
const char *encoding = NULL;
size_t encoding_len;
- const char *signature_alg = NULL, *signature = NULL;
+ struct string_list signatures = STRING_LIST_INIT_DUP;
const char *message;
char *reencoded = NULL;
struct commit_list *p;
@@ -700,10 +733,11 @@ static void handle_commit(struct commit *commit, struct rev_info *rev,
}
if (*commit_buffer_cursor == '\n') {
- if ((signature = find_commit_multiline_header(commit_buffer_cursor + 1, "gpgsig", &commit_buffer_cursor)))
- signature_alg = "sha1";
- else if ((signature = find_commit_multiline_header(commit_buffer_cursor + 1, "gpgsig-sha256", &commit_buffer_cursor)))
- signature_alg = "sha256";
+ const char *after_sha1 = append_signatures_for_header(&signatures, commit_buffer_cursor,
+ "gpgsig", "sha1");
+ const char *after_sha256 = append_signatures_for_header(&signatures, commit_buffer_cursor,
+ "gpgsig-sha256", "sha256");
+ commit_buffer_cursor = (after_sha1 > after_sha256) ? after_sha1 : after_sha256;
}
message = strstr(commit_buffer_cursor, "\n\n");
@@ -769,30 +803,30 @@ static void handle_commit(struct commit *commit, struct rev_info *rev,
printf("%.*s\n%.*s\n",
(int)(author_end - author), author,
(int)(committer_end - committer), committer);
- if (signature) {
+ if (signatures.nr) {
switch (signed_commit_mode) {
case SIGN_ABORT:
die("encountered signed commit %s; use "
"--signed-commits=<mode> to handle it",
oid_to_hex(&commit->object.oid));
case SIGN_WARN_VERBATIM:
- warning("exporting signed commit %s",
- oid_to_hex(&commit->object.oid));
+ warning("exporting %"PRIuMAX" signature(s) for commit %s",
+ (uintmax_t)signatures.nr, oid_to_hex(&commit->object.oid));
/* fallthru */
case SIGN_VERBATIM:
- printf("gpgsig %s\ndata %u\n%s",
- signature_alg,
- (unsigned)strlen(signature),
- signature);
+ for (size_t i = 0; i < signatures.nr; i++) {
+ struct string_list_item *item = &signatures.items[i];
+ print_signature(item->string, item->util);
+ }
break;
case SIGN_WARN_STRIP:
- warning("stripping signature from commit %s",
+ warning("stripping signature(s) from commit %s",
oid_to_hex(&commit->object.oid));
/* fallthru */
case SIGN_STRIP:
break;
}
- free((char *)signature);
+ string_list_clear(&signatures, 0);
}
if (!reencoded && encoding)
printf("encoding %.*s\n", (int)encoding_len, encoding);
@@ -869,8 +903,8 @@ static void handle_tag(const char *name, struct tag *tag)
return;
}
- buf = repo_read_object_file(the_repository, &tag->object.oid, &type,
- &size);
+ buf = odb_read_object(the_repository->objects, &tag->object.oid,
+ &type, &size);
if (!buf)
die("could not read tag %s", oid_to_hex(&tag->object.oid));
message = memmem(buf, size, "\n\n", 2);
@@ -1200,7 +1234,7 @@ static void import_marks(char *input_file, int check_exists)
if (last_idnum < mark)
last_idnum = mark;
- type = oid_object_info(the_repository, &oid, NULL);
+ type = odb_read_object_info(the_repository->objects, &oid, NULL);
if (type < 0)
die("object not found: %s", oid_to_hex(&oid));
diff --git a/builtin/fast-import.c b/builtin/fast-import.c
index b2839c5f43..6e7d0c3449 100644
--- a/builtin/fast-import.c
+++ b/builtin/fast-import.c
@@ -24,11 +24,12 @@
#include "packfile.h"
#include "object-file.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "mem-pool.h"
#include "commit-reach.h"
#include "khash.h"
#include "date.h"
+#include "gpg-interface.h"
#define PACK_ID_BITS 16
#define MAX_PACK_ID ((1<<PACK_ID_BITS)-1)
@@ -763,7 +764,8 @@ static void start_packfile(void)
struct packed_git *p;
int pack_fd;
- pack_fd = odb_mkstemp(&tmp_file, "pack/tmp_pack_XXXXXX");
+ pack_fd = odb_mkstemp(the_repository->objects, &tmp_file,
+ "pack/tmp_pack_XXXXXX");
FLEX_ALLOC_STR(p, pack_name, tmp_file.buf);
strbuf_release(&tmp_file);
@@ -1264,7 +1266,7 @@ static void load_tree(struct tree_entry *root)
die("Can't load tree %s", oid_to_hex(oid));
} else {
enum object_type type;
- buf = repo_read_object_file(the_repository, oid, &type, &size);
+ buf = odb_read_object(the_repository->objects, oid, &type, &size);
if (!buf || type != OBJ_TREE)
die("Can't load tree %s", oid_to_hex(oid));
}
@@ -1755,8 +1757,8 @@ static void insert_object_entry(struct mark_set **s, struct object_id *oid, uint
struct object_entry *e;
e = find_object(oid);
if (!e) {
- enum object_type type = oid_object_info(the_repository,
- oid, NULL);
+ enum object_type type = odb_read_object_info(the_repository->objects,
+ oid, NULL);
if (type < 0)
die("object not found: %s", oid_to_hex(oid));
e = insert_object(oid);
@@ -2415,8 +2417,8 @@ static void file_change_m(const char *p, struct branch *b)
enum object_type expected = S_ISDIR(mode) ?
OBJ_TREE: OBJ_BLOB;
enum object_type type = oe ? oe->type :
- oid_object_info(the_repository, &oid,
- NULL);
+ odb_read_object_info(the_repository->objects,
+ &oid, NULL);
if (type < 0)
die("%s not found: %s",
S_ISDIR(mode) ? "Tree" : "Blob",
@@ -2534,10 +2536,9 @@ static void note_change_n(const char *p, struct branch *b, unsigned char *old_fa
oidcpy(&commit_oid, &commit_oe->idx.oid);
} else if (!repo_get_oid(the_repository, p, &commit_oid)) {
unsigned long size;
- char *buf = read_object_with_reference(the_repository,
- &commit_oid,
- OBJ_COMMIT, &size,
- &commit_oid);
+ char *buf = odb_read_object_peeled(the_repository->objects,
+ &commit_oid, OBJ_COMMIT, &size,
+ &commit_oid);
if (!buf || size < the_hash_algo->hexsz + 6)
die("Not a valid commit: %s", p);
free(buf);
@@ -2552,7 +2553,7 @@ static void note_change_n(const char *p, struct branch *b, unsigned char *old_fa
die("Not a blob (actually a %s): %s",
type_name(oe->type), command_buf.buf);
} else if (!is_null_oid(&oid)) {
- enum object_type type = oid_object_info(the_repository, &oid,
+ enum object_type type = odb_read_object_info(the_repository->objects, &oid,
NULL);
if (type < 0)
die("Blob not found: %s", command_buf.buf);
@@ -2603,9 +2604,8 @@ static void parse_from_existing(struct branch *b)
unsigned long size;
char *buf;
- buf = read_object_with_reference(the_repository,
- &b->oid, OBJ_COMMIT, &size,
- &b->oid);
+ buf = odb_read_object_peeled(the_repository->objects, &b->oid,
+ OBJ_COMMIT, &size, &b->oid);
parse_from_commit(b, buf, size);
free(buf);
}
@@ -2698,10 +2698,9 @@ static struct hash_list *parse_merge(unsigned int *count)
oidcpy(&n->oid, &oe->idx.oid);
} else if (!repo_get_oid(the_repository, from, &n->oid)) {
unsigned long size;
- char *buf = read_object_with_reference(the_repository,
- &n->oid,
- OBJ_COMMIT,
- &size, &n->oid);
+ char *buf = odb_read_object_peeled(the_repository->objects,
+ &n->oid, OBJ_COMMIT,
+ &size, &n->oid);
if (!buf || size < the_hash_algo->hexsz + 6)
die("Not a valid commit: %s", from);
free(buf);
@@ -2718,15 +2717,82 @@ static struct hash_list *parse_merge(unsigned int *count)
return list;
}
+struct signature_data {
+ char *hash_algo; /* "sha1" or "sha256" */
+ char *sig_format; /* "openpgp", "x509", "ssh", or "unknown" */
+ struct strbuf data; /* The actual signature data */
+};
+
+static void parse_one_signature(struct signature_data *sig, const char *v)
+{
+ char *args = xstrdup(v); /* Will be freed when sig->hash_algo is freed */
+ char *space = strchr(args, ' ');
+
+ if (!space)
+ die("Expected gpgsig format: 'gpgsig <hash-algo> <signature-format>', "
+ "got 'gpgsig %s'", args);
+ *space = '\0';
+
+ sig->hash_algo = args;
+ sig->sig_format = space + 1;
+
+ /* Validate hash algorithm */
+ if (strcmp(sig->hash_algo, "sha1") &&
+ strcmp(sig->hash_algo, "sha256"))
+ die("Unknown git hash algorithm in gpgsig: '%s'", sig->hash_algo);
+
+ /* Validate signature format */
+ if (!valid_signature_format(sig->sig_format))
+ die("Invalid signature format in gpgsig: '%s'", sig->sig_format);
+ if (!strcmp(sig->sig_format, "unknown"))
+ warning("'unknown' signature format in gpgsig");
+
+ /* Read signature data */
+ read_next_command();
+ parse_data(&sig->data, 0, NULL);
+}
+
+static void add_gpgsig_to_commit(struct strbuf *commit_data,
+ const char *header,
+ struct signature_data *sig)
+{
+ struct string_list siglines = STRING_LIST_INIT_NODUP;
+
+ if (!sig->hash_algo)
+ return;
+
+ strbuf_addstr(commit_data, header);
+ string_list_split_in_place(&siglines, sig->data.buf, "\n", -1);
+ strbuf_add_separated_string_list(commit_data, "\n ", &siglines);
+ strbuf_addch(commit_data, '\n');
+ string_list_clear(&siglines, 1);
+ strbuf_release(&sig->data);
+ free(sig->hash_algo);
+}
+
+static void store_signature(struct signature_data *stored_sig,
+ struct signature_data *new_sig,
+ const char *hash_type)
+{
+ if (stored_sig->hash_algo) {
+ warning("multiple %s signatures found, "
+ "ignoring additional signature",
+ hash_type);
+ strbuf_release(&new_sig->data);
+ free(new_sig->hash_algo);
+ } else {
+ *stored_sig = *new_sig;
+ }
+}
+
static void parse_new_commit(const char *arg)
{
- static struct strbuf sig = STRBUF_INIT;
static struct strbuf msg = STRBUF_INIT;
- struct string_list siglines = STRING_LIST_INIT_NODUP;
+ struct signature_data sig_sha1 = { NULL, NULL, STRBUF_INIT };
+ struct signature_data sig_sha256 = { NULL, NULL, STRBUF_INIT };
struct branch *b;
char *author = NULL;
char *committer = NULL;
- char *sig_alg = NULL;
char *encoding = NULL;
struct hash_list *merge_list = NULL;
unsigned int merge_count;
@@ -2750,13 +2816,23 @@ static void parse_new_commit(const char *arg)
}
if (!committer)
die("Expected committer but didn't get one");
- if (skip_prefix(command_buf.buf, "gpgsig ", &v)) {
- sig_alg = xstrdup(v);
- read_next_command();
- parse_data(&sig, 0, NULL);
+
+ /* Process signatures (up to 2: one "sha1" and one "sha256") */
+ while (skip_prefix(command_buf.buf, "gpgsig ", &v)) {
+ struct signature_data sig = { NULL, NULL, STRBUF_INIT };
+
+ parse_one_signature(&sig, v);
+
+ if (!strcmp(sig.hash_algo, "sha1"))
+ store_signature(&sig_sha1, &sig, "SHA-1");
+ else if (!strcmp(sig.hash_algo, "sha256"))
+ store_signature(&sig_sha256, &sig, "SHA-256");
+ else
+ BUG("parse_one_signature() returned unknown hash algo");
+
read_next_command();
- } else
- strbuf_setlen(&sig, 0);
+ }
+
if (skip_prefix(command_buf.buf, "encoding ", &v)) {
encoding = xstrdup(v);
read_next_command();
@@ -2830,23 +2906,14 @@ static void parse_new_commit(const char *arg)
strbuf_addf(&new_data,
"encoding %s\n",
encoding);
- if (sig_alg) {
- if (!strcmp(sig_alg, "sha1"))
- strbuf_addstr(&new_data, "gpgsig ");
- else if (!strcmp(sig_alg, "sha256"))
- strbuf_addstr(&new_data, "gpgsig-sha256 ");
- else
- die("Expected gpgsig algorithm sha1 or sha256, got %s", sig_alg);
- string_list_split_in_place(&siglines, sig.buf, "\n", -1);
- strbuf_add_separated_string_list(&new_data, "\n ", &siglines);
- strbuf_addch(&new_data, '\n');
- }
+
+ add_gpgsig_to_commit(&new_data, "gpgsig ", &sig_sha1);
+ add_gpgsig_to_commit(&new_data, "gpgsig-sha256 ", &sig_sha256);
+
strbuf_addch(&new_data, '\n');
strbuf_addbuf(&new_data, &msg);
- string_list_clear(&siglines, 1);
free(author);
free(committer);
- free(sig_alg);
free(encoding);
if (!store_object(OBJ_COMMIT, &new_data, NULL, &b->oid, next_mark))
@@ -2894,7 +2961,8 @@ static void parse_new_tag(const char *arg)
} else if (!repo_get_oid(the_repository, from, &oid)) {
struct object_entry *oe = find_object(&oid);
if (!oe) {
- type = oid_object_info(the_repository, &oid, NULL);
+ type = odb_read_object_info(the_repository->objects,
+ &oid, NULL);
if (type < 0)
die("Not a valid object: %s", from);
} else
@@ -3000,7 +3068,7 @@ static void cat_blob(struct object_entry *oe, struct object_id *oid)
char *buf;
if (!oe || oe->pack_id == MAX_PACK_ID) {
- buf = repo_read_object_file(the_repository, oid, &type, &size);
+ buf = odb_read_object(the_repository->objects, oid, &type, &size);
} else {
type = oe->type;
buf = gfi_unpack_entry(oe, &size);
@@ -3084,8 +3152,8 @@ static struct object_entry *dereference(struct object_entry *oe,
const unsigned hexsz = the_hash_algo->hexsz;
if (!oe) {
- enum object_type type = oid_object_info(the_repository, oid,
- NULL);
+ enum object_type type = odb_read_object_info(the_repository->objects,
+ oid, NULL);
if (type < 0)
die("object not found: %s", oid_to_hex(oid));
/* cache it! */
@@ -3108,8 +3176,8 @@ static struct object_entry *dereference(struct object_entry *oe,
buf = gfi_unpack_entry(oe, &size);
} else {
enum object_type unused;
- buf = repo_read_object_file(the_repository, oid, &unused,
- &size);
+ buf = odb_read_object(the_repository->objects, oid,
+ &unused, &size);
}
if (!buf)
die("Can't load object %s", oid_to_hex(oid));
diff --git a/builtin/fetch-pack.c b/builtin/fetch-pack.c
index d07eec9e55..d9e42bad58 100644
--- a/builtin/fetch-pack.c
+++ b/builtin/fetch-pack.c
@@ -274,8 +274,10 @@ int cmd_fetch_pack(int argc,
}
close(fd[0]);
close(fd[1]);
- if (finish_connect(conn))
- return 1;
+ if (finish_connect(conn)) {
+ ret = 1;
+ goto cleanup;
+ }
ret = !fetched_refs;
@@ -291,6 +293,7 @@ int cmd_fetch_pack(int argc,
printf("%s %s\n",
oid_to_hex(&ref->old_oid), ref->name);
+cleanup:
for (size_t i = 0; i < nr_sought; i++)
free_one_ref(sought_to_free[i]);
free(sought_to_free);
diff --git a/builtin/fetch.c b/builtin/fetch.c
index 40a0e8d244..87a0cca799 100644
--- a/builtin/fetch.c
+++ b/builtin/fetch.c
@@ -14,7 +14,7 @@
#include "refs.h"
#include "refspec.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "oidset.h"
#include "oid-array.h"
#include "commit.h"
@@ -366,9 +366,9 @@ static void find_non_local_tags(const struct ref *refs,
*/
if (ends_with(ref->name, "^{}")) {
if (item &&
- !has_object(the_repository, &ref->old_oid, 0) &&
+ !odb_has_object(the_repository->objects, &ref->old_oid, 0) &&
!oidset_contains(&fetch_oids, &ref->old_oid) &&
- !has_object(the_repository, &item->oid, 0) &&
+ !odb_has_object(the_repository->objects, &item->oid, 0) &&
!oidset_contains(&fetch_oids, &item->oid))
clear_item(item);
item = NULL;
@@ -382,7 +382,7 @@ static void find_non_local_tags(const struct ref *refs,
* fetch.
*/
if (item &&
- !has_object(the_repository, &item->oid, 0) &&
+ !odb_has_object(the_repository->objects, &item->oid, 0) &&
!oidset_contains(&fetch_oids, &item->oid))
clear_item(item);
@@ -403,7 +403,7 @@ static void find_non_local_tags(const struct ref *refs,
* checked to see if it needs fetching.
*/
if (item &&
- !has_object(the_repository, &item->oid, 0) &&
+ !odb_has_object(the_repository->objects, &item->oid, 0) &&
!oidset_contains(&fetch_oids, &item->oid))
clear_item(item);
@@ -640,9 +640,6 @@ static struct ref *get_ref_map(struct remote *remote,
return ref_map;
}
-#define STORE_REF_ERROR_OTHER 1
-#define STORE_REF_ERROR_DF_CONFLICT 2
-
static int s_update_ref(const char *action,
struct ref *ref,
struct ref_transaction *transaction,
@@ -650,7 +647,6 @@ static int s_update_ref(const char *action,
{
char *msg;
char *rla = getenv("GIT_REFLOG_ACTION");
- struct ref_transaction *our_transaction = NULL;
struct strbuf err = STRBUF_INIT;
int ret;
@@ -660,43 +656,10 @@ static int s_update_ref(const char *action,
rla = default_rla.buf;
msg = xstrfmt("%s: %s", rla, action);
- /*
- * If no transaction was passed to us, we manage the transaction
- * ourselves. Otherwise, we trust the caller to handle the transaction
- * lifecycle.
- */
- if (!transaction) {
- transaction = our_transaction = ref_store_transaction_begin(get_main_ref_store(the_repository),
- 0, &err);
- if (!transaction) {
- ret = STORE_REF_ERROR_OTHER;
- goto out;
- }
- }
-
ret = ref_transaction_update(transaction, ref->name, &ref->new_oid,
check_old ? &ref->old_oid : NULL,
NULL, NULL, 0, msg, &err);
- if (ret) {
- ret = STORE_REF_ERROR_OTHER;
- goto out;
- }
-
- if (our_transaction) {
- switch (ref_transaction_commit(our_transaction, &err)) {
- case 0:
- break;
- case REF_TRANSACTION_ERROR_NAME_CONFLICT:
- ret = STORE_REF_ERROR_DF_CONFLICT;
- goto out;
- default:
- ret = STORE_REF_ERROR_OTHER;
- goto out;
- }
- }
-out:
- ref_transaction_free(our_transaction);
if (ret)
error("%s", err.buf);
strbuf_release(&err);
@@ -910,8 +873,8 @@ static int update_local_ref(struct ref *ref,
struct commit *current = NULL, *updated;
int fast_forward = 0;
- if (!has_object(the_repository, &ref->new_oid,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
+ if (!odb_has_object(the_repository->objects, &ref->new_oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
die(_("object %s not found"), oid_to_hex(&ref->new_oid));
if (oideq(&ref->old_oid, &ref->new_oid)) {
@@ -992,7 +955,7 @@ static int update_local_ref(struct ref *ref,
fast_forward = repo_in_merge_bases(the_repository, current,
updated);
if (fast_forward < 0)
- exit(128);
+ die(NULL);
forced_updates_ms += (getnanotime() - t_before) / 1000000;
} else {
fast_forward = 1;
@@ -1139,7 +1102,6 @@ N_("it took %.2f seconds to check forced updates; you can use\n"
"to avoid this check\n");
static int store_updated_refs(struct display_state *display_state,
- const char *remote_name,
int connectivity_checked,
struct ref_transaction *transaction, struct ref *ref_map,
struct fetch_head *fetch_head,
@@ -1277,11 +1239,6 @@ static int store_updated_refs(struct display_state *display_state,
}
}
- if (rc & STORE_REF_ERROR_DF_CONFLICT)
- error(_("some local refs could not be updated; try running\n"
- " 'git remote prune %s' to remove any old, conflicting "
- "branches"), remote_name);
-
if (advice_enabled(ADVICE_FETCH_SHOW_FORCED_UPDATES)) {
if (!config->show_forced_updates) {
warning(_(warn_show_forced_updates));
@@ -1330,7 +1287,8 @@ static int check_exist_and_connected(struct ref *ref_map)
* we need all direct targets to exist.
*/
for (r = rm; r; r = r->next) {
- if (!has_object(the_repository, &r->old_oid, HAS_OBJECT_RECHECK_PACKED))
+ if (!odb_has_object(the_repository->objects, &r->old_oid,
+ HAS_OBJECT_RECHECK_PACKED))
return -1;
}
@@ -1365,9 +1323,8 @@ static int fetch_and_consume_refs(struct display_state *display_state,
}
trace2_region_enter("fetch", "consume_refs", the_repository);
- ret = store_updated_refs(display_state, transport->remote->name,
- connectivity_checked, transaction, ref_map,
- fetch_head, config);
+ ret = store_updated_refs(display_state, connectivity_checked,
+ transaction, ref_map, fetch_head, config);
trace2_region_leave("fetch", "consume_refs", the_repository);
out:
@@ -1383,9 +1340,10 @@ static int prune_refs(struct display_state *display_state,
int result = 0;
struct ref *ref, *stale_refs = get_stale_heads(rs, ref_map);
struct strbuf err = STRBUF_INIT;
- const char *dangling_msg = dry_run
- ? _(" (%s will become dangling)")
- : _(" (%s has become dangling)");
+ struct string_list refnames = STRING_LIST_INIT_NODUP;
+
+ for (ref = stale_refs; ref; ref = ref->next)
+ string_list_append(&refnames, ref->name);
if (!dry_run) {
if (transaction) {
@@ -1396,15 +1354,9 @@ static int prune_refs(struct display_state *display_state,
goto cleanup;
}
} else {
- struct string_list refnames = STRING_LIST_INIT_NODUP;
-
- for (ref = stale_refs; ref; ref = ref->next)
- string_list_append(&refnames, ref->name);
-
result = refs_delete_refs(get_main_ref_store(the_repository),
"fetch: prune", &refnames,
0);
- string_list_clear(&refnames, 0);
}
}
@@ -1416,12 +1368,14 @@ static int prune_refs(struct display_state *display_state,
_("(none)"), ref->name,
&ref->new_oid, &ref->old_oid,
summary_width);
- refs_warn_dangling_symref(get_main_ref_store(the_repository),
- stderr, dangling_msg, ref->name);
}
+ string_list_sort(&refnames);
+ refs_warn_dangling_symrefs(get_main_ref_store(the_repository),
+ stderr, " ", dry_run, &refnames);
}
cleanup:
+ string_list_clear(&refnames, 0);
strbuf_release(&err);
free_refs(stale_refs);
return result;
@@ -1485,7 +1439,7 @@ static void add_negotiation_tips(struct git_transport_options *smart_options)
struct object_id oid;
if (repo_get_oid(the_repository, s, &oid))
die(_("%s is not a valid object"), s);
- if (!has_object(the_repository, &oid, 0))
+ if (!odb_has_object(the_repository->objects, &oid, 0))
die(_("the object %s does not exist"), s);
oid_array_append(oids, &oid);
continue;
@@ -1687,6 +1641,36 @@ cleanup:
return result;
}
+struct ref_rejection_data {
+ int *retcode;
+ int conflict_msg_shown;
+ const char *remote_name;
+};
+
+static void ref_transaction_rejection_handler(const char *refname,
+ const struct object_id *old_oid UNUSED,
+ const struct object_id *new_oid UNUSED,
+ const char *old_target UNUSED,
+ const char *new_target UNUSED,
+ enum ref_transaction_error err,
+ void *cb_data)
+{
+ struct ref_rejection_data *data = cb_data;
+
+ if (err == REF_TRANSACTION_ERROR_NAME_CONFLICT && !data->conflict_msg_shown) {
+ error(_("some local refs could not be updated; try running\n"
+ " 'git remote prune %s' to remove any old, conflicting "
+ "branches"), data->remote_name);
+ data->conflict_msg_shown = 1;
+ } else {
+ const char *reason = ref_transaction_error_msg(err);
+
+ error(_("fetching ref %s failed: %s"), refname, reason);
+ }
+
+ *data->retcode = 1;
+}
+
static int do_fetch(struct transport *transport,
struct refspec *rs,
const struct fetch_config *config)
@@ -1807,6 +1791,24 @@ static int do_fetch(struct transport *transport,
retcode = 1;
}
+ /*
+ * If not atomic, we can still use batched updates, which would be much
+ * more performant. We don't initiate the transaction before pruning,
+ * since pruning must be an independent step, to avoid F/D conflicts.
+ *
+ * TODO: if reference transactions gain logical conflict resolution, we
+ * can delete and create refs (with F/D conflicts) in the same transaction
+ * and this can be moved above the 'prune_refs()' block.
+ */
+ if (!transaction) {
+ transaction = ref_store_transaction_begin(get_main_ref_store(the_repository),
+ REF_TRANSACTION_ALLOW_FAILURE, &err);
+ if (!transaction) {
+ retcode = -1;
+ goto cleanup;
+ }
+ }
+
if (fetch_and_consume_refs(&display_state, transport, transaction, ref_map,
&fetch_head, config)) {
retcode = 1;
@@ -1838,16 +1840,31 @@ static int do_fetch(struct transport *transport,
free_refs(tags_ref_map);
}
- if (transaction) {
- if (retcode)
- goto cleanup;
+ if (retcode)
+ goto cleanup;
- retcode = ref_transaction_commit(transaction, &err);
+ retcode = ref_transaction_commit(transaction, &err);
+ if (retcode) {
+ /*
+ * Explicitly handle transaction cleanup to avoid
+ * aborting an already closed transaction.
+ */
+ ref_transaction_free(transaction);
+ transaction = NULL;
+ goto cleanup;
+ }
+
+ if (!atomic_fetch) {
+ struct ref_rejection_data data = {
+ .retcode = &retcode,
+ .conflict_msg_shown = 0,
+ .remote_name = transport->remote->name,
+ };
+
+ ref_transaction_for_each_rejected_update(transaction,
+ ref_transaction_rejection_handler,
+ &data);
if (retcode) {
- /*
- * Explicitly handle transaction cleanup to avoid
- * aborting an already closed transaction.
- */
ref_transaction_free(transaction);
transaction = NULL;
goto cleanup;
@@ -2653,7 +2670,7 @@ int cmd_fetch(int argc,
commit_graph_flags |= COMMIT_GRAPH_WRITE_PROGRESS;
trace2_region_enter("fetch", "write-commit-graph", the_repository);
- write_commit_graph_reachable(the_repository->objects->odb,
+ write_commit_graph_reachable(the_repository->objects->sources,
commit_graph_flags,
NULL);
trace2_region_leave("fetch", "write-commit-graph", the_repository);
diff --git a/builtin/fsck.c b/builtin/fsck.c
index e7d96a9c8e..0084cf7400 100644
--- a/builtin/fsck.c
+++ b/builtin/fsck.c
@@ -17,7 +17,7 @@
#include "packfile.h"
#include "object-file.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "path.h"
#include "read-cache-ll.h"
#include "replace-object.h"
@@ -71,7 +71,8 @@ static const char *printable_type(const struct object_id *oid,
const char *ret;
if (type == OBJ_NONE)
- type = oid_object_info(the_repository, oid, NULL);
+ type = odb_read_object_info(the_repository->objects,
+ oid, NULL);
ret = type_name(type);
if (!ret)
@@ -160,7 +161,7 @@ static int mark_object(struct object *obj, enum object_type type,
return 0;
if (!(obj->flags & HAS_OBJ)) {
- if (parent && !has_object(the_repository, &obj->oid, 1)) {
+ if (parent && !odb_has_object(the_repository->objects, &obj->oid, 1)) {
printf_ln(_("broken link from %7s %s\n"
" to %7s %s"),
printable_type(&parent->oid, parent->type),
@@ -232,8 +233,8 @@ static void mark_unreachable_referents(const struct object_id *oid)
* (and we want to avoid parsing blobs).
*/
if (obj->type == OBJ_NONE) {
- enum object_type type = oid_object_info(the_repository,
- &obj->oid, NULL);
+ enum object_type type = odb_read_object_info(the_repository->objects,
+ &obj->oid, NULL);
if (type > 0)
object_as_type(obj, type, 0);
}
@@ -956,7 +957,7 @@ int cmd_fsck(int argc,
struct repository *repo UNUSED)
{
int i;
- struct object_directory *odb;
+ struct odb_source *source;
/* fsck knows how to handle missing promisor objects */
fetch_if_missing = 0;
@@ -997,9 +998,9 @@ int cmd_fsck(int argc,
for_each_packed_object(the_repository,
mark_packed_for_connectivity, NULL, 0);
} else {
- prepare_alt_odb(the_repository);
- for (odb = the_repository->objects->odb; odb; odb = odb->next)
- fsck_object_dir(odb->path);
+ odb_prepare_alternates(the_repository->objects);
+ for (source = the_repository->objects->sources; source; source = source->next)
+ fsck_object_dir(source->path);
if (check_full) {
struct packed_git *p;
@@ -1108,12 +1109,12 @@ int cmd_fsck(int argc,
if (the_repository->settings.core_commit_graph) {
struct child_process commit_graph_verify = CHILD_PROCESS_INIT;
- prepare_alt_odb(the_repository);
- for (odb = the_repository->objects->odb; odb; odb = odb->next) {
+ odb_prepare_alternates(the_repository->objects);
+ for (source = the_repository->objects->sources; source; source = source->next) {
child_process_init(&commit_graph_verify);
commit_graph_verify.git_cmd = 1;
strvec_pushl(&commit_graph_verify.args, "commit-graph",
- "verify", "--object-dir", odb->path, NULL);
+ "verify", "--object-dir", source->path, NULL);
if (show_progress)
strvec_push(&commit_graph_verify.args, "--progress");
else
@@ -1126,12 +1127,12 @@ int cmd_fsck(int argc,
if (the_repository->settings.core_multi_pack_index) {
struct child_process midx_verify = CHILD_PROCESS_INIT;
- prepare_alt_odb(the_repository);
- for (odb = the_repository->objects->odb; odb; odb = odb->next) {
+ odb_prepare_alternates(the_repository->objects);
+ for (source = the_repository->objects->sources; source; source = source->next) {
child_process_init(&midx_verify);
midx_verify.git_cmd = 1;
strvec_pushl(&midx_verify.args, "multi-pack-index",
- "verify", "--object-dir", odb->path, NULL);
+ "verify", "--object-dir", source->path, NULL);
if (show_progress)
strvec_push(&midx_verify.args, "--progress");
else
diff --git a/builtin/gc.c b/builtin/gc.c
index 7dc94f243d..a02d6b7806 100644
--- a/builtin/gc.c
+++ b/builtin/gc.c
@@ -251,7 +251,24 @@ static enum schedule_priority parse_schedule(const char *value)
return SCHEDULE_NONE;
}
+enum maintenance_task_label {
+ TASK_PREFETCH,
+ TASK_LOOSE_OBJECTS,
+ TASK_INCREMENTAL_REPACK,
+ TASK_GC,
+ TASK_COMMIT_GRAPH,
+ TASK_PACK_REFS,
+ TASK_REFLOG_EXPIRE,
+ TASK_WORKTREE_PRUNE,
+ TASK_RERERE_GC,
+
+ /* Leave as final value */
+ TASK__COUNT
+};
+
struct maintenance_run_opts {
+ enum maintenance_task_label *tasks;
+ size_t tasks_nr, tasks_alloc;
int auto_flag;
int detach;
int quiet;
@@ -261,6 +278,11 @@ struct maintenance_run_opts {
.detach = -1, \
}
+static void maintenance_run_opts_release(struct maintenance_run_opts *opts)
+{
+ free(opts->tasks);
+}
+
static int pack_refs_condition(UNUSED struct gc_config *cfg)
{
/*
@@ -324,6 +346,7 @@ static int reflog_expire_condition(struct gc_config *cfg UNUSED)
count_reflog_entries, &data);
reflog_expiry_cleanup(&data.policy);
+ reflog_clear_expire_config(&data.policy.opts);
return data.count >= data.limit;
}
@@ -517,7 +540,7 @@ static uint64_t total_ram(void)
return total;
}
#elif defined(HAVE_BSD_SYSCTL) && (defined(HW_MEMSIZE) || defined(HW_PHYSMEM) || defined(HW_PHYSMEM64))
- int64_t physical_memory;
+ uint64_t physical_memory;
int mib[2];
size_t length;
@@ -529,9 +552,16 @@ static uint64_t total_ram(void)
# else
mib[1] = HW_PHYSMEM;
# endif
- length = sizeof(int64_t);
- if (!sysctl(mib, 2, &physical_memory, &length, NULL, 0))
+ length = sizeof(physical_memory);
+ if (!sysctl(mib, 2, &physical_memory, &length, NULL, 0)) {
+ if (length == 4) {
+ uint32_t mem;
+
+ if (!sysctl(mib, 2, &mem, &length, NULL, 0))
+ physical_memory = mem;
+ }
return physical_memory;
+ }
#elif defined(GIT_WINDOWS_NATIVE)
MEMORYSTATUSEX memInfo;
@@ -796,22 +826,14 @@ done:
return ret;
}
-static void gc_before_repack(struct maintenance_run_opts *opts,
- struct gc_config *cfg)
+static int gc_foreground_tasks(struct maintenance_run_opts *opts,
+ struct gc_config *cfg)
{
- /*
- * We may be called twice, as both the pre- and
- * post-daemonized phases will call us, but running these
- * commands more than once is pointless and wasteful.
- */
- static int done = 0;
- if (done++)
- return;
-
if (cfg->pack_refs && maintenance_task_pack_refs(opts, cfg))
- die(FAILED_RUN, "pack-refs");
+ return error(FAILED_RUN, "pack-refs");
if (cfg->prune_reflogs && maintenance_task_reflog_expire(opts, cfg))
- die(FAILED_RUN, "reflog");
+ return error(FAILED_RUN, "reflog");
+ return 0;
}
int cmd_gc(int argc,
@@ -820,12 +842,12 @@ int cmd_gc(int argc,
struct repository *repo UNUSED)
{
int aggressive = 0;
- int quiet = 0;
int force = 0;
const char *name;
pid_t pid;
int daemonized = 0;
int keep_largest_pack = -1;
+ int skip_foreground_tasks = 0;
timestamp_t dummy;
struct maintenance_run_opts opts = MAINTENANCE_RUN_OPTS_INIT;
struct gc_config cfg = GC_CONFIG_INIT;
@@ -833,7 +855,7 @@ int cmd_gc(int argc,
const char *prune_expire_arg = prune_expire_sentinel;
int ret;
struct option builtin_gc_options[] = {
- OPT__QUIET(&quiet, N_("suppress progress reporting")),
+ OPT__QUIET(&opts.quiet, N_("suppress progress reporting")),
{
.type = OPTION_STRING,
.long_name = "prune",
@@ -858,6 +880,8 @@ int cmd_gc(int argc,
N_("repack all other packs except the largest pack")),
OPT_STRING(0, "expire-to", &cfg.repack_expire_to, N_("dir"),
N_("pack prefix to store a pack containing pruned objects")),
+ OPT_HIDDEN_BOOL(0, "skip-foreground-tasks", &skip_foreground_tasks,
+ N_("skip maintenance tasks typically done in the foreground")),
OPT_END()
};
@@ -893,7 +917,7 @@ int cmd_gc(int argc,
if (cfg.aggressive_window > 0)
strvec_pushf(&repack, "--window=%d", cfg.aggressive_window);
}
- if (quiet)
+ if (opts.quiet)
strvec_push(&repack, "-q");
if (opts.auto_flag) {
@@ -908,7 +932,7 @@ int cmd_gc(int argc,
goto out;
}
- if (!quiet) {
+ if (!opts.quiet) {
if (opts.detach > 0)
fprintf(stderr, _("Auto packing the repository in background for optimum performance.\n"));
else
@@ -941,13 +965,16 @@ int cmd_gc(int argc,
goto out;
}
- if (lock_repo_for_gc(force, &pid)) {
- ret = 0;
- goto out;
- }
+ if (!skip_foreground_tasks) {
+ if (lock_repo_for_gc(force, &pid)) {
+ ret = 0;
+ goto out;
+ }
- gc_before_repack(&opts, &cfg); /* dies on failure */
- delete_tempfile(&pidfile);
+ if (gc_foreground_tasks(&opts, &cfg) < 0)
+ die(NULL);
+ delete_tempfile(&pidfile);
+ }
/*
* failure to daemonize is ok, we'll continue
@@ -976,9 +1003,10 @@ int cmd_gc(int argc,
free(path);
}
- gc_before_repack(&opts, &cfg);
+ if (opts.detach <= 0 && !skip_foreground_tasks)
+ gc_foreground_tasks(&opts, &cfg);
- if (!repository_format_precious_objects) {
+ if (!the_repository->repository_format_precious_objects) {
struct child_process repack_cmd = CHILD_PROCESS_INIT;
repack_cmd.git_cmd = 1;
@@ -993,7 +1021,7 @@ int cmd_gc(int argc,
strvec_pushl(&prune_cmd.args, "prune", "--expire", NULL);
/* run `git prune` even if using cruft packs */
strvec_push(&prune_cmd.args, cfg.prune_expire);
- if (quiet)
+ if (opts.quiet)
strvec_push(&prune_cmd.args, "--no-progress");
if (repo_has_promisor_remote(the_repository))
strvec_push(&prune_cmd.args,
@@ -1020,8 +1048,8 @@ int cmd_gc(int argc,
}
if (the_repository->settings.gc_write_commit_graph == 1)
- write_commit_graph_reachable(the_repository->objects->odb,
- !quiet && !daemonized ? COMMIT_GRAPH_WRITE_PROGRESS : 0,
+ write_commit_graph_reachable(the_repository->objects->sources,
+ !opts.quiet && !daemonized ? COMMIT_GRAPH_WRITE_PROGRESS : 0,
NULL);
if (opts.auto_flag && too_many_loose_objects(&cfg))
@@ -1035,6 +1063,7 @@ int cmd_gc(int argc,
}
out:
+ maintenance_run_opts_release(&opts);
gc_config_release(&cfg);
return 0;
}
@@ -1082,7 +1111,7 @@ static int dfs_on_ref(const char *refname UNUSED,
if (!peel_iterated_oid(the_repository, oid, &peeled))
oid = &peeled;
- if (oid_object_info(the_repository, oid, NULL) != OBJ_COMMIT)
+ if (odb_read_object_info(the_repository->objects, oid, NULL) != OBJ_COMMIT)
return 0;
commit = lookup_commit(the_repository, oid);
@@ -1211,8 +1240,14 @@ static int maintenance_task_prefetch(struct maintenance_run_opts *opts,
return 0;
}
-static int maintenance_task_gc(struct maintenance_run_opts *opts,
- struct gc_config *cfg UNUSED)
+static int maintenance_task_gc_foreground(struct maintenance_run_opts *opts,
+ struct gc_config *cfg)
+{
+ return gc_foreground_tasks(opts, cfg);
+}
+
+static int maintenance_task_gc_background(struct maintenance_run_opts *opts,
+ struct gc_config *cfg UNUSED)
{
struct child_process child = CHILD_PROCESS_INIT;
@@ -1226,6 +1261,7 @@ static int maintenance_task_gc(struct maintenance_run_opts *opts,
else
strvec_push(&child.args, "--no-quiet");
strvec_push(&child.args, "--no-detach");
+ strvec_push(&child.args, "--skip-foreground-tasks");
return run_command(&child);
}
@@ -1273,7 +1309,7 @@ static int loose_object_auto_condition(struct gc_config *cfg UNUSED)
if (loose_object_auto_limit < 0)
return 1;
- return for_each_loose_file_in_objdir(the_repository->objects->odb->path,
+ return for_each_loose_file_in_objdir(the_repository->objects->sources->path,
loose_object_count,
NULL, NULL, &count);
}
@@ -1308,7 +1344,7 @@ static int pack_loose(struct maintenance_run_opts *opts)
* Do not start pack-objects process
* if there are no loose objects.
*/
- if (!for_each_loose_file_in_objdir(r->objects->odb->path,
+ if (!for_each_loose_file_in_objdir(r->objects->sources->path,
bail_on_loose,
NULL, NULL, NULL))
return 0;
@@ -1320,7 +1356,7 @@ static int pack_loose(struct maintenance_run_opts *opts)
strvec_push(&pack_proc.args, "--quiet");
else
strvec_push(&pack_proc.args, "--no-quiet");
- strvec_pushf(&pack_proc.args, "%s/pack/loose", r->objects->odb->path);
+ strvec_pushf(&pack_proc.args, "%s/pack/loose", r->objects->sources->path);
pack_proc.in = -1;
@@ -1348,7 +1384,7 @@ static int pack_loose(struct maintenance_run_opts *opts)
else if (data.batch_size > 0)
data.batch_size--; /* Decrease for equality on limit. */
- for_each_loose_file_in_objdir(r->objects->odb->path,
+ for_each_loose_file_in_objdir(r->objects->sources->path,
write_loose_object_to_stdin,
NULL,
NULL,
@@ -1513,107 +1549,120 @@ static int maintenance_task_incremental_repack(struct maintenance_run_opts *opts
return 0;
}
-typedef int maintenance_task_fn(struct maintenance_run_opts *opts,
- struct gc_config *cfg);
-
-/*
- * An auto condition function returns 1 if the task should run
- * and 0 if the task should NOT run. See needs_to_gc() for an
- * example.
- */
-typedef int maintenance_auto_fn(struct gc_config *cfg);
+typedef int (*maintenance_task_fn)(struct maintenance_run_opts *opts,
+ struct gc_config *cfg);
+typedef int (*maintenance_auto_fn)(struct gc_config *cfg);
struct maintenance_task {
const char *name;
- maintenance_task_fn *fn;
- maintenance_auto_fn *auto_condition;
- unsigned enabled:1;
-
- enum schedule_priority schedule;
- /* -1 if not selected. */
- int selected_order;
-};
+ /*
+ * Work that will be executed before detaching. This should not include
+ * tasks that may run for an extended amount of time as it does cause
+ * auto-maintenance to block until foreground tasks have been run.
+ */
+ maintenance_task_fn foreground;
-enum maintenance_task_label {
- TASK_PREFETCH,
- TASK_LOOSE_OBJECTS,
- TASK_INCREMENTAL_REPACK,
- TASK_GC,
- TASK_COMMIT_GRAPH,
- TASK_PACK_REFS,
- TASK_REFLOG_EXPIRE,
- TASK_WORKTREE_PRUNE,
- TASK_RERERE_GC,
+ /*
+ * Work that will be executed after detaching. When not detaching the
+ * work will be run in the foreground, as well.
+ */
+ maintenance_task_fn background;
- /* Leave as final value */
- TASK__COUNT
+ /*
+ * An auto condition function returns 1 if the task should run and 0 if
+ * the task should NOT run. See needs_to_gc() for an example.
+ */
+ maintenance_auto_fn auto_condition;
};
-static struct maintenance_task tasks[] = {
+static const struct maintenance_task tasks[] = {
[TASK_PREFETCH] = {
- "prefetch",
- maintenance_task_prefetch,
+ .name = "prefetch",
+ .background = maintenance_task_prefetch,
},
[TASK_LOOSE_OBJECTS] = {
- "loose-objects",
- maintenance_task_loose_objects,
- loose_object_auto_condition,
+ .name = "loose-objects",
+ .background = maintenance_task_loose_objects,
+ .auto_condition = loose_object_auto_condition,
},
[TASK_INCREMENTAL_REPACK] = {
- "incremental-repack",
- maintenance_task_incremental_repack,
- incremental_repack_auto_condition,
+ .name = "incremental-repack",
+ .background = maintenance_task_incremental_repack,
+ .auto_condition = incremental_repack_auto_condition,
},
[TASK_GC] = {
- "gc",
- maintenance_task_gc,
- need_to_gc,
- 1,
+ .name = "gc",
+ .foreground = maintenance_task_gc_foreground,
+ .background = maintenance_task_gc_background,
+ .auto_condition = need_to_gc,
},
[TASK_COMMIT_GRAPH] = {
- "commit-graph",
- maintenance_task_commit_graph,
- should_write_commit_graph,
+ .name = "commit-graph",
+ .background = maintenance_task_commit_graph,
+ .auto_condition = should_write_commit_graph,
},
[TASK_PACK_REFS] = {
- "pack-refs",
- maintenance_task_pack_refs,
- pack_refs_condition,
+ .name = "pack-refs",
+ .foreground = maintenance_task_pack_refs,
+ .auto_condition = pack_refs_condition,
},
[TASK_REFLOG_EXPIRE] = {
- "reflog-expire",
- maintenance_task_reflog_expire,
- reflog_expire_condition,
+ .name = "reflog-expire",
+ .foreground = maintenance_task_reflog_expire,
+ .auto_condition = reflog_expire_condition,
},
[TASK_WORKTREE_PRUNE] = {
- "worktree-prune",
- maintenance_task_worktree_prune,
- worktree_prune_condition,
+ .name = "worktree-prune",
+ .background = maintenance_task_worktree_prune,
+ .auto_condition = worktree_prune_condition,
},
[TASK_RERERE_GC] = {
- "rerere-gc",
- maintenance_task_rerere_gc,
- rerere_gc_condition,
+ .name = "rerere-gc",
+ .background = maintenance_task_rerere_gc,
+ .auto_condition = rerere_gc_condition,
},
};
-static int compare_tasks_by_selection(const void *a_, const void *b_)
+enum task_phase {
+ TASK_PHASE_FOREGROUND,
+ TASK_PHASE_BACKGROUND,
+};
+
+static int maybe_run_task(const struct maintenance_task *task,
+ struct repository *repo,
+ struct maintenance_run_opts *opts,
+ struct gc_config *cfg,
+ enum task_phase phase)
{
- const struct maintenance_task *a = a_;
- const struct maintenance_task *b = b_;
+ int foreground = (phase == TASK_PHASE_FOREGROUND);
+ maintenance_task_fn fn = foreground ? task->foreground : task->background;
+ const char *region = foreground ? "maintenance foreground" : "maintenance";
+ int ret = 0;
- return b->selected_order - a->selected_order;
+ if (!fn)
+ return 0;
+ if (opts->auto_flag &&
+ (!task->auto_condition || !task->auto_condition(cfg)))
+ return 0;
+
+ trace2_region_enter(region, task->name, repo);
+ if (fn(opts, cfg)) {
+ error(_("task '%s' failed"), task->name);
+ ret = 1;
+ }
+ trace2_region_leave(region, task->name, repo);
+
+ return ret;
}
static int maintenance_run_tasks(struct maintenance_run_opts *opts,
struct gc_config *cfg)
{
- int i, found_selected = 0;
int result = 0;
struct lock_file lk;
struct repository *r = the_repository;
- char *lock_path = xstrfmt("%s/maintenance", r->objects->odb->path);
+ char *lock_path = xstrfmt("%s/maintenance", r->objects->sources->path);
if (hold_lock_file_for_update(&lk, lock_path, LOCK_NO_DEREF) < 0) {
/*
@@ -1631,6 +1680,11 @@ static int maintenance_run_tasks(struct maintenance_run_opts *opts,
}
free(lock_path);
+ for (size_t i = 0; i < opts->tasks_nr; i++)
+ if (maybe_run_task(&tasks[opts->tasks[i]], r, opts, cfg,
+ TASK_PHASE_FOREGROUND))
+ result = 1;
+
/* Failure to daemonize is ok, we'll continue in foreground. */
if (opts->detach > 0) {
trace2_region_enter("maintenance", "detach", the_repository);
@@ -1638,120 +1692,138 @@ static int maintenance_run_tasks(struct maintenance_run_opts *opts,
trace2_region_leave("maintenance", "detach", the_repository);
}
- for (i = 0; !found_selected && i < TASK__COUNT; i++)
- found_selected = tasks[i].selected_order >= 0;
-
- if (found_selected)
- QSORT(tasks, TASK__COUNT, compare_tasks_by_selection);
-
- for (i = 0; i < TASK__COUNT; i++) {
- if (found_selected && tasks[i].selected_order < 0)
- continue;
-
- if (!found_selected && !tasks[i].enabled)
- continue;
-
- if (opts->auto_flag &&
- (!tasks[i].auto_condition ||
- !tasks[i].auto_condition(cfg)))
- continue;
-
- if (opts->schedule && tasks[i].schedule < opts->schedule)
- continue;
-
- trace2_region_enter("maintenance", tasks[i].name, r);
- if (tasks[i].fn(opts, cfg)) {
- error(_("task '%s' failed"), tasks[i].name);
+ for (size_t i = 0; i < opts->tasks_nr; i++)
+ if (maybe_run_task(&tasks[opts->tasks[i]], r, opts, cfg,
+ TASK_PHASE_BACKGROUND))
result = 1;
- }
- trace2_region_leave("maintenance", tasks[i].name, r);
- }
rollback_lock_file(&lk);
return result;
}
-static void initialize_maintenance_strategy(void)
+struct maintenance_strategy {
+ struct {
+ int enabled;
+ enum schedule_priority schedule;
+ } tasks[TASK__COUNT];
+};
+
+static const struct maintenance_strategy none_strategy = { 0 };
+static const struct maintenance_strategy default_strategy = {
+ .tasks = {
+ [TASK_GC].enabled = 1,
+ },
+};
+static const struct maintenance_strategy incremental_strategy = {
+ .tasks = {
+ [TASK_COMMIT_GRAPH].enabled = 1,
+ [TASK_COMMIT_GRAPH].schedule = SCHEDULE_HOURLY,
+ [TASK_PREFETCH].enabled = 1,
+ [TASK_PREFETCH].schedule = SCHEDULE_HOURLY,
+ [TASK_INCREMENTAL_REPACK].enabled = 1,
+ [TASK_INCREMENTAL_REPACK].schedule = SCHEDULE_DAILY,
+ [TASK_LOOSE_OBJECTS].enabled = 1,
+ [TASK_LOOSE_OBJECTS].schedule = SCHEDULE_DAILY,
+ [TASK_PACK_REFS].enabled = 1,
+ [TASK_PACK_REFS].schedule = SCHEDULE_WEEKLY,
+ },
+};
+
+static void initialize_task_config(struct maintenance_run_opts *opts,
+ const struct string_list *selected_tasks)
{
+ struct strbuf config_name = STRBUF_INIT;
+ struct maintenance_strategy strategy;
const char *config_str;
- if (git_config_get_string_tmp("maintenance.strategy", &config_str))
- return;
+ /*
+ * In case the user has asked us to run tasks explicitly we only use
+ * those specified tasks. Specifically, we do _not_ want to consult the
+ * config or maintenance strategy.
+ */
+ if (selected_tasks->nr) {
+ for (size_t i = 0; i < selected_tasks->nr; i++) {
+ enum maintenance_task_label label = (intptr_t)selected_tasks->items[i].util;;
+ ALLOC_GROW(opts->tasks, opts->tasks_nr + 1, opts->tasks_alloc);
+ opts->tasks[opts->tasks_nr++] = label;
+ }
- if (!strcasecmp(config_str, "incremental")) {
- tasks[TASK_GC].schedule = SCHEDULE_NONE;
- tasks[TASK_COMMIT_GRAPH].enabled = 1;
- tasks[TASK_COMMIT_GRAPH].schedule = SCHEDULE_HOURLY;
- tasks[TASK_PREFETCH].enabled = 1;
- tasks[TASK_PREFETCH].schedule = SCHEDULE_HOURLY;
- tasks[TASK_INCREMENTAL_REPACK].enabled = 1;
- tasks[TASK_INCREMENTAL_REPACK].schedule = SCHEDULE_DAILY;
- tasks[TASK_LOOSE_OBJECTS].enabled = 1;
- tasks[TASK_LOOSE_OBJECTS].schedule = SCHEDULE_DAILY;
- tasks[TASK_PACK_REFS].enabled = 1;
- tasks[TASK_PACK_REFS].schedule = SCHEDULE_WEEKLY;
+ return;
}
-}
-static void initialize_task_config(int schedule)
-{
- int i;
- struct strbuf config_name = STRBUF_INIT;
+ /*
+ * Otherwise, the strategy depends on whether we run as part of a
+ * scheduled job or not:
+ *
+ * - Scheduled maintenance does not perform any housekeeping by
+ * default, but requires the user to pick a maintenance strategy.
+ *
+ * - Unscheduled maintenance uses our default strategy.
+ *
+ * Both of these are affected by the gitconfig though, which may
+ * override specific aspects of our strategy.
+ */
+ if (opts->schedule) {
+ strategy = none_strategy;
- if (schedule)
- initialize_maintenance_strategy();
+ if (!git_config_get_string_tmp("maintenance.strategy", &config_str)) {
+ if (!strcasecmp(config_str, "incremental"))
+ strategy = incremental_strategy;
+ }
+ } else {
+ strategy = default_strategy;
+ }
- for (i = 0; i < TASK__COUNT; i++) {
+ for (size_t i = 0; i < TASK__COUNT; i++) {
int config_value;
- char *config_str;
strbuf_reset(&config_name);
strbuf_addf(&config_name, "maintenance.%s.enabled",
tasks[i].name);
-
if (!git_config_get_bool(config_name.buf, &config_value))
- tasks[i].enabled = config_value;
-
- strbuf_reset(&config_name);
- strbuf_addf(&config_name, "maintenance.%s.schedule",
- tasks[i].name);
+ strategy.tasks[i].enabled = config_value;
+ if (!strategy.tasks[i].enabled)
+ continue;
- if (!git_config_get_string(config_name.buf, &config_str)) {
- tasks[i].schedule = parse_schedule(config_str);
- free(config_str);
+ if (opts->schedule) {
+ strbuf_reset(&config_name);
+ strbuf_addf(&config_name, "maintenance.%s.schedule",
+ tasks[i].name);
+ if (!git_config_get_string_tmp(config_name.buf, &config_str))
+ strategy.tasks[i].schedule = parse_schedule(config_str);
+ if (strategy.tasks[i].schedule < opts->schedule)
+ continue;
}
+
+ ALLOC_GROW(opts->tasks, opts->tasks_nr + 1, opts->tasks_alloc);
+ opts->tasks[opts->tasks_nr++] = i;
}
strbuf_release(&config_name);
}
-static int task_option_parse(const struct option *opt UNUSED,
+static int task_option_parse(const struct option *opt,
const char *arg, int unset)
{
- int i, num_selected = 0;
- struct maintenance_task *task = NULL;
+ struct string_list *selected_tasks = opt->value;
+ size_t i;
BUG_ON_OPT_NEG(unset);
- for (i = 0; i < TASK__COUNT; i++) {
- if (tasks[i].selected_order >= 0)
- num_selected++;
- if (!strcasecmp(tasks[i].name, arg)) {
- task = &tasks[i];
- }
- }
-
- if (!task) {
+ for (i = 0; i < TASK__COUNT; i++)
+ if (!strcasecmp(tasks[i].name, arg))
+ break;
+ if (i >= TASK__COUNT) {
error(_("'%s' is not a valid task"), arg);
return 1;
}
- if (task->selected_order >= 0) {
+ if (unsorted_string_list_has_string(selected_tasks, arg)) {
error(_("task '%s' cannot be selected multiple times"), arg);
return 1;
}
- task->selected_order = num_selected + 1;
+ string_list_append(selected_tasks, arg)->util = (void *)(intptr_t)i;
return 0;
}
@@ -1759,8 +1831,8 @@ static int task_option_parse(const struct option *opt UNUSED,
static int maintenance_run(int argc, const char **argv, const char *prefix,
struct repository *repo UNUSED)
{
- int i;
struct maintenance_run_opts opts = MAINTENANCE_RUN_OPTS_INIT;
+ struct string_list selected_tasks = STRING_LIST_INIT_DUP;
struct gc_config cfg = GC_CONFIG_INIT;
struct option builtin_maintenance_run_options[] = {
OPT_BOOL(0, "auto", &opts.auto_flag,
@@ -1772,7 +1844,7 @@ static int maintenance_run(int argc, const char **argv, const char *prefix,
maintenance_opt_schedule),
OPT_BOOL(0, "quiet", &opts.quiet,
N_("do not report progress or other information over stderr")),
- OPT_CALLBACK_F(0, "task", NULL, N_("task"),
+ OPT_CALLBACK_F(0, "task", &selected_tasks, N_("task"),
N_("run a specific task"),
PARSE_OPT_NONEG, task_option_parse),
OPT_END()
@@ -1781,25 +1853,27 @@ static int maintenance_run(int argc, const char **argv, const char *prefix,
opts.quiet = !isatty(2);
- for (i = 0; i < TASK__COUNT; i++)
- tasks[i].selected_order = -1;
-
argc = parse_options(argc, argv, prefix,
builtin_maintenance_run_options,
builtin_maintenance_run_usage,
PARSE_OPT_STOP_AT_NON_OPTION);
- if (opts.auto_flag && opts.schedule)
- die(_("use at most one of --auto and --schedule=<frequency>"));
+ die_for_incompatible_opt2(opts.auto_flag, "--auto",
+ opts.schedule, "--schedule=");
+ die_for_incompatible_opt2(selected_tasks.nr, "--task=",
+ opts.schedule, "--schedule=");
gc_config(&cfg);
- initialize_task_config(opts.schedule);
+ initialize_task_config(&opts, &selected_tasks);
if (argc != 0)
usage_with_options(builtin_maintenance_run_usage,
builtin_maintenance_run_options);
ret = maintenance_run_tasks(&opts, &cfg);
+
+ string_list_clear(&selected_tasks, 0);
+ maintenance_run_opts_release(&opts);
gc_config_release(&cfg);
return ret;
}
@@ -3085,7 +3159,7 @@ static int update_background_schedule(const struct maintenance_start_opts *opts,
unsigned int i;
int result = 0;
struct lock_file lk;
- char *lock_path = xstrfmt("%s/schedule", the_repository->objects->odb->path);
+ char *lock_path = xstrfmt("%s/schedule", the_repository->objects->sources->path);
if (hold_lock_file_for_update(&lk, lock_path, LOCK_NO_DEREF) < 0) {
if (errno == EEXIST)
diff --git a/builtin/grep.c b/builtin/grep.c
index 3ce574a605..39273d9c0f 100644
--- a/builtin/grep.c
+++ b/builtin/grep.c
@@ -26,7 +26,7 @@
#include "submodule-config.h"
#include "object-file.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "packfile.h"
#include "pager.h"
#include "path.h"
@@ -462,7 +462,7 @@ static int grep_submodule(struct grep_opt *opt,
/*
* NEEDSWORK: repo_read_gitmodules() might call
- * add_to_alternates_memory() via config_from_gitmodules(). This
+ * odb_add_to_alternates_memory() via config_from_gitmodules(). This
* operation causes a race condition with concurrent object readings
* performed by the worker threads. That's why we need obj_read_lock()
* here. It should be removed once it's no longer necessary to add the
@@ -505,7 +505,8 @@ static int grep_submodule(struct grep_opt *opt,
* lazily registered as alternates when needed (and except in an
* unexpected code interaction, it won't be needed).
*/
- add_submodule_odb_by_path(subrepo->objects->odb->path);
+ odb_add_submodule_source_by_path(the_repository->objects,
+ subrepo->objects->sources->path);
obj_read_unlock();
memcpy(&subopt, opt, sizeof(subopt));
@@ -519,11 +520,9 @@ static int grep_submodule(struct grep_opt *opt,
struct strbuf base = STRBUF_INIT;
obj_read_lock();
- object_type = oid_object_info(subrepo, oid, NULL);
+ object_type = odb_read_object_info(subrepo->objects, oid, NULL);
obj_read_unlock();
- data = read_object_with_reference(subrepo,
- oid, OBJ_TREE,
- &size, NULL);
+ data = odb_read_object_peeled(subrepo->objects, oid, OBJ_TREE, &size, NULL);
if (!data)
die(_("unable to read tree (%s)"), oid_to_hex(oid));
@@ -572,8 +571,8 @@ static int grep_cache(struct grep_opt *opt,
void *data;
unsigned long size;
- data = repo_read_object_file(the_repository, &ce->oid,
- &type, &size);
+ data = odb_read_object(the_repository->objects, &ce->oid,
+ &type, &size);
if (!data)
die(_("unable to read tree %s"), oid_to_hex(&ce->oid));
init_tree_desc(&tree, &ce->oid, data, size);
@@ -665,8 +664,8 @@ static int grep_tree(struct grep_opt *opt, const struct pathspec *pathspec,
void *data;
unsigned long size;
- data = repo_read_object_file(the_repository,
- &entry.oid, &type, &size);
+ data = odb_read_object(the_repository->objects,
+ &entry.oid, &type, &size);
if (!data)
die(_("unable to read tree (%s)"),
oid_to_hex(&entry.oid));
@@ -704,9 +703,8 @@ static int grep_object(struct grep_opt *opt, const struct pathspec *pathspec,
struct strbuf base;
int hit, len;
- data = read_object_with_reference(opt->repo,
- &obj->oid, OBJ_TREE,
- &size, NULL);
+ data = odb_read_object_peeled(opt->repo->objects, &obj->oid,
+ OBJ_TREE, &size, NULL);
if (!data)
die(_("unable to read tree (%s)"), oid_to_hex(&obj->oid));
diff --git a/builtin/hash-object.c b/builtin/hash-object.c
index 6a99ec250d..ddf281413a 100644
--- a/builtin/hash-object.c
+++ b/builtin/hash-object.c
@@ -11,7 +11,7 @@
#include "gettext.h"
#include "hex.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "blob.h"
#include "quote.h"
#include "parse-options.h"
@@ -104,7 +104,7 @@ int cmd_hash_object(int argc,
prefix = setup_git_directory_gently(&nongit);
if (nongit && !the_hash_algo)
- repo_set_hash_algo(the_repository, GIT_HASH_SHA1);
+ repo_set_hash_algo(the_repository, GIT_HASH_DEFAULT);
if (vpath && prefix) {
vpath_free = prefix_filename(prefix, vpath);
diff --git a/builtin/index-pack.c b/builtin/index-pack.c
index bb7925bd29..0a5c8a1ac8 100644
--- a/builtin/index-pack.c
+++ b/builtin/index-pack.c
@@ -21,7 +21,7 @@
#include "packfile.h"
#include "pack-revindex.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "oid-array.h"
#include "oidset.h"
#include "path.h"
@@ -260,7 +260,8 @@ static unsigned check_object(struct object *obj)
if (!(obj->flags & FLAG_CHECKED)) {
unsigned long size;
- int type = oid_object_info(the_repository, &obj->oid, &size);
+ int type = odb_read_object_info(the_repository->objects,
+ &obj->oid, &size);
if (type <= 0)
die(_("did not receive expected object %s"),
oid_to_hex(&obj->oid));
@@ -362,7 +363,7 @@ static const char *open_pack_file(const char *pack_name)
input_fd = 0;
if (!pack_name) {
struct strbuf tmp_file = STRBUF_INIT;
- output_fd = odb_mkstemp(&tmp_file,
+ output_fd = odb_mkstemp(the_repository->objects, &tmp_file,
"pack/tmp_pack_XXXXXX");
pack_name = strbuf_detach(&tmp_file, NULL);
} else {
@@ -892,8 +893,8 @@ static void sha1_object(const void *data, struct object_entry *obj_entry,
if (startup_info->have_repository) {
read_lock();
- collision_test_needed = has_object(the_repository, oid,
- HAS_OBJECT_FETCH_PROMISOR);
+ collision_test_needed = odb_has_object(the_repository->objects, oid,
+ HAS_OBJECT_FETCH_PROMISOR);
read_unlock();
}
@@ -908,13 +909,13 @@ static void sha1_object(const void *data, struct object_entry *obj_entry,
enum object_type has_type;
unsigned long has_size;
read_lock();
- has_type = oid_object_info(the_repository, oid, &has_size);
+ has_type = odb_read_object_info(the_repository->objects, oid, &has_size);
if (has_type < 0)
die(_("cannot read existing object info %s"), oid_to_hex(oid));
if (has_type != type || has_size != size)
die(_("SHA1 COLLISION FOUND WITH %s !"), oid_to_hex(oid));
- has_data = repo_read_object_file(the_repository, oid,
- &has_type, &has_size);
+ has_data = odb_read_object(the_repository->objects, oid,
+ &has_type, &has_size);
read_unlock();
if (!data)
data = new_data = get_data_from_pack(obj_entry);
@@ -1501,9 +1502,9 @@ static void fix_unresolved_deltas(struct hashfile *f)
struct oid_array to_fetch = OID_ARRAY_INIT;
for (i = 0; i < nr_ref_deltas; i++) {
struct ref_delta_entry *d = sorted_by_pos[i];
- if (!oid_object_info_extended(the_repository, &d->oid,
- NULL,
- OBJECT_INFO_FOR_PREFETCH))
+ if (!odb_read_object_info_extended(the_repository->objects,
+ &d->oid, NULL,
+ OBJECT_INFO_FOR_PREFETCH))
continue;
oid_array_append(&to_fetch, &d->oid);
}
@@ -1520,8 +1521,8 @@ static void fix_unresolved_deltas(struct hashfile *f)
if (objects[d->obj_no].real_type != OBJ_REF_DELTA)
continue;
- data = repo_read_object_file(the_repository, &d->oid, &type,
- &size);
+ data = odb_read_object(the_repository->objects, &d->oid,
+ &type, &size);
if (!data)
continue;
@@ -1829,7 +1830,7 @@ static void repack_local_links(void)
oidset_iter_init(&outgoing_links, &iter);
while ((oid = oidset_iter_next(&iter))) {
struct object_info info = OBJECT_INFO_INIT;
- if (oid_object_info_extended(the_repository, oid, &info, 0))
+ if (odb_read_object_info_extended(the_repository->objects, oid, &info, 0))
/* Missing; assume it is a promisor object */
continue;
if (info.whence == OI_PACKED && info.u.packed.pack->pack_promisor)
@@ -2034,7 +2035,7 @@ int cmd_index_pack(int argc,
* choice but to guess the object hash.
*/
if (!the_repository->hash_algo)
- repo_set_hash_algo(the_repository, GIT_HASH_SHA1);
+ repo_set_hash_algo(the_repository, GIT_HASH_DEFAULT);
opts.flags &= ~(WRITE_REV | WRITE_REV_VERIFY);
if (rev_index) {
diff --git a/builtin/log.c b/builtin/log.c
index b450cd3bde..fb42e094af 100644
--- a/builtin/log.c
+++ b/builtin/log.c
@@ -15,7 +15,7 @@
#include "hex.h"
#include "refs.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "pager.h"
#include "color.h"
#include "commit.h"
@@ -113,6 +113,15 @@ struct log_config {
int fmt_patch_name_max;
char *fmt_pretty;
char *default_date_mode;
+
+#ifndef WITH_BREAKING_CHANGES
+ /*
+ * Note: git_log_config() does not touch this member and that
+ * is very deliberate. This member is only to be used to
+ * resurrect whatchanged that is deprecated.
+ */
+ int i_still_use_this;
+#endif
};
static void log_config_init(struct log_config *cfg)
@@ -267,6 +276,10 @@ static void cmd_log_init_finish(int argc, const char **argv, const char *prefix,
OPT__QUIET(&quiet, N_("suppress diff output")),
OPT_BOOL(0, "source", &source, N_("show source")),
OPT_BOOL(0, "use-mailmap", &mailmap, N_("use mail map file")),
+#ifndef WITH_BREAKING_CHANGES
+ OPT_HIDDEN_BOOL(0, "i-still-use-this", &cfg->i_still_use_this,
+ "<use this deprecated command>"),
+#endif
OPT_ALIAS(0, "mailmap", "use-mailmap"),
OPT_CALLBACK_F(0, "clear-decorations", NULL, NULL,
N_("clear all previously-defined decoration filters"),
@@ -378,129 +391,6 @@ static void cmd_log_init(int argc, const char **argv, const char *prefix,
cmd_log_init_finish(argc, argv, prefix, rev, opt, cfg);
}
-/*
- * This gives a rough estimate for how many commits we
- * will print out in the list.
- */
-static int estimate_commit_count(struct commit_list *list)
-{
- int n = 0;
-
- while (list) {
- struct commit *commit = list->item;
- unsigned int flags = commit->object.flags;
- list = list->next;
- if (!(flags & (TREESAME | UNINTERESTING)))
- n++;
- }
- return n;
-}
-
-static void show_early_header(struct rev_info *rev, const char *stage, int nr)
-{
- if (rev->shown_one) {
- rev->shown_one = 0;
- if (rev->commit_format != CMIT_FMT_ONELINE)
- putchar(rev->diffopt.line_termination);
- }
- fprintf(rev->diffopt.file, _("Final output: %d %s\n"), nr, stage);
-}
-
-static struct itimerval early_output_timer;
-
-static void log_show_early(struct rev_info *revs, struct commit_list *list)
-{
- int i = revs->early_output;
- int show_header = 1;
- int no_free = revs->diffopt.no_free;
-
- revs->diffopt.no_free = 0;
- sort_in_topological_order(&list, revs->sort_order);
- while (list && i) {
- struct commit *commit = list->item;
- switch (simplify_commit(revs, commit)) {
- case commit_show:
- if (show_header) {
- int n = estimate_commit_count(list);
- show_early_header(revs, "incomplete", n);
- show_header = 0;
- }
- log_tree_commit(revs, commit);
- i--;
- break;
- case commit_ignore:
- break;
- case commit_error:
- revs->diffopt.no_free = no_free;
- diff_free(&revs->diffopt);
- return;
- }
- list = list->next;
- }
-
- /* Did we already get enough commits for the early output? */
- if (!i) {
- revs->diffopt.no_free = 0;
- diff_free(&revs->diffopt);
- return;
- }
-
- /*
- * ..if no, then repeat it twice a second until we
- * do.
- *
- * NOTE! We don't use "it_interval", because if the
- * reader isn't listening, we want our output to be
- * throttled by the writing, and not have the timer
- * trigger every second even if we're blocked on a
- * reader!
- */
- early_output_timer.it_value.tv_sec = 0;
- early_output_timer.it_value.tv_usec = 500000;
- setitimer(ITIMER_REAL, &early_output_timer, NULL);
-}
-
-static void early_output(int signal UNUSED)
-{
- show_early_output = log_show_early;
-}
-
-static void setup_early_output(void)
-{
- struct sigaction sa;
-
- /*
- * Set up the signal handler, minimally intrusively:
- * we only set a single volatile integer word (not
- * using sigatomic_t - trying to avoid unnecessary
- * system dependencies and headers), and using
- * SA_RESTART.
- */
- memset(&sa, 0, sizeof(sa));
- sa.sa_handler = early_output;
- sigemptyset(&sa.sa_mask);
- sa.sa_flags = SA_RESTART;
- sigaction(SIGALRM, &sa, NULL);
-
- /*
- * If we can get the whole output in less than a
- * tenth of a second, don't even bother doing the
- * early-output thing..
- *
- * This is a one-time-only trigger.
- */
- early_output_timer.it_value.tv_sec = 0;
- early_output_timer.it_value.tv_usec = 100000;
- setitimer(ITIMER_REAL, &early_output_timer, NULL);
-}
-
-static void finish_early_output(struct rev_info *rev)
-{
- int n = estimate_commit_count(rev->commits);
- signal(SIGALRM, SIG_IGN);
- show_early_header(rev, "done", n);
-}
-
static int cmd_log_walk_no_free(struct rev_info *rev)
{
struct commit *commit;
@@ -508,15 +398,9 @@ static int cmd_log_walk_no_free(struct rev_info *rev)
int saved_dcctc = 0;
int result;
- if (rev->early_output)
- setup_early_output();
-
if (prepare_revision_walk(rev))
die(_("revision walk setup failed"));
- if (rev->early_output)
- finish_early_output(rev);
-
/*
* For --check and --exit-code, the exit code is based on CHECK_FAILED
* and HAS_CHANGES being accumulated in rev->diffopt, so be careful to
@@ -633,6 +517,7 @@ static int git_log_config(const char *var, const char *value,
return git_diff_ui_config(var, value, ctx, cb);
}
+#ifndef WITH_BREAKING_CHANGES
int cmd_whatchanged(int argc,
const char **argv,
const char *prefix,
@@ -656,6 +541,10 @@ int cmd_whatchanged(int argc,
opt.def = "HEAD";
opt.revarg_opt = REVARG_COMMITTISH;
cmd_log_init(argc, argv, prefix, &rev, &opt, &cfg);
+
+ if (!cfg.i_still_use_this)
+ you_still_use_that("git whatchanged");
+
if (!rev.diffopt.output_format)
rev.diffopt.output_format = DIFF_FORMAT_RAW;
@@ -665,6 +554,7 @@ int cmd_whatchanged(int argc,
log_config_release(&cfg);
return ret;
}
+#endif
static void show_tagger(const char *buf, struct rev_info *rev)
{
@@ -714,7 +604,7 @@ static int show_tag_object(const struct object_id *oid, struct rev_info *rev)
{
unsigned long size;
enum object_type type;
- char *buf = repo_read_object_file(the_repository, oid, &type, &size);
+ char *buf = odb_read_object(the_repository->objects, oid, &type, &size);
unsigned long offset = 0;
if (!buf)
diff --git a/builtin/ls-files.c b/builtin/ls-files.c
index be74f0a03b..ff975e7be0 100644
--- a/builtin/ls-files.c
+++ b/builtin/ls-files.c
@@ -25,7 +25,7 @@
#include "setup.h"
#include "sparse-index.h"
#include "submodule.h"
-#include "object-store.h"
+#include "odb.h"
#include "hex.h"
@@ -251,7 +251,7 @@ static void expand_objectsize(struct repository *repo, struct strbuf *line,
{
if (type == OBJ_BLOB) {
unsigned long size;
- if (oid_object_info(repo, oid, &size) < 0)
+ if (odb_read_object_info(repo->objects, oid, &size) < 0)
die(_("could not get object info about '%s'"),
oid_to_hex(oid));
if (padded)
diff --git a/builtin/ls-remote.c b/builtin/ls-remote.c
index 01a4d4daa1..df09000b30 100644
--- a/builtin/ls-remote.c
+++ b/builtin/ls-remote.c
@@ -112,7 +112,7 @@ int cmd_ls_remote(int argc,
* depending on what object hash the remote uses.
*/
if (!the_repository->hash_algo)
- repo_set_hash_algo(the_repository, GIT_HASH_SHA1);
+ repo_set_hash_algo(the_repository, GIT_HASH_DEFAULT);
packet_trace_identity("ls-remote");
diff --git a/builtin/ls-tree.c b/builtin/ls-tree.c
index 8aafc30ca4..4d616dd528 100644
--- a/builtin/ls-tree.c
+++ b/builtin/ls-tree.c
@@ -10,7 +10,7 @@
#include "gettext.h"
#include "hex.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "tree.h"
#include "path.h"
#include "quote.h"
@@ -27,7 +27,7 @@ static void expand_objectsize(struct strbuf *line, const struct object_id *oid,
{
if (type == OBJ_BLOB) {
unsigned long size;
- if (oid_object_info(the_repository, oid, &size) < 0)
+ if (odb_read_object_info(the_repository->objects, oid, &size) < 0)
die(_("could not get object info about '%s'"),
oid_to_hex(oid));
if (padded)
@@ -217,7 +217,7 @@ static int show_tree_long(const struct object_id *oid, struct strbuf *base,
if (type == OBJ_BLOB) {
unsigned long size;
- if (oid_object_info(the_repository, oid, &size) == OBJ_BAD)
+ if (odb_read_object_info(the_repository->objects, oid, &size) == OBJ_BAD)
xsnprintf(size_text, sizeof(size_text), "BAD");
else
xsnprintf(size_text, sizeof(size_text),
diff --git a/builtin/merge-file.c b/builtin/merge-file.c
index 2b16b10d2c..9464f27562 100644
--- a/builtin/merge-file.c
+++ b/builtin/merge-file.c
@@ -7,7 +7,7 @@
#include "hex.h"
#include "object-file.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "config.h"
#include "gettext.h"
#include "setup.h"
diff --git a/builtin/merge-tree.c b/builtin/merge-tree.c
index 7f41665dfd..cf8b06cadc 100644
--- a/builtin/merge-tree.c
+++ b/builtin/merge-tree.c
@@ -10,7 +10,7 @@
#include "commit-reach.h"
#include "merge-ort.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "parse-options.h"
#include "blob.h"
#include "merge-blobs.h"
@@ -75,9 +75,9 @@ static void *result(struct merge_list *entry, unsigned long *size)
const char *path = entry->path;
if (!entry->stage)
- return repo_read_object_file(the_repository,
- &entry->blob->object.oid, &type,
- size);
+ return odb_read_object(the_repository->objects,
+ &entry->blob->object.oid, &type,
+ size);
base = NULL;
if (entry->stage == 1) {
base = entry->blob;
@@ -100,9 +100,9 @@ static void *origin(struct merge_list *entry, unsigned long *size)
enum object_type type;
while (entry) {
if (entry->stage == 2)
- return repo_read_object_file(the_repository,
- &entry->blob->object.oid,
- &type, size);
+ return odb_read_object(the_repository->objects,
+ &entry->blob->object.oid,
+ &type, size);
entry = entry->link;
}
return NULL;
diff --git a/builtin/merge.c b/builtin/merge.c
index ce90e52fe4..18b22c0a26 100644
--- a/builtin/merge.c
+++ b/builtin/merge.c
@@ -69,7 +69,10 @@ static const char * const builtin_merge_usage[] = {
NULL
};
-static int show_diffstat = 1, shortlog_len = -1, squash;
+#define MERGE_SHOW_DIFFSTAT 1
+#define MERGE_SHOW_COMPACTSUMMARY 2
+
+static int show_diffstat = MERGE_SHOW_DIFFSTAT, shortlog_len = -1, squash;
static int option_commit = -1;
static int option_edit = -1;
static int allow_trivial = 1, have_message, verify_signatures;
@@ -243,12 +246,28 @@ static int option_parse_strategy(const struct option *opt UNUSED,
return 0;
}
+static int option_parse_compact_summary(const struct option *opt,
+ const char *name UNUSED, int unset)
+{
+ int *setting = opt->value;
+
+ if (unset)
+ *setting = 0;
+ else
+ *setting = MERGE_SHOW_COMPACTSUMMARY;
+ return 0;
+}
+
static struct option builtin_merge_options[] = {
OPT_SET_INT('n', NULL, &show_diffstat,
N_("do not show a diffstat at the end of the merge"), 0),
OPT_BOOL(0, "stat", &show_diffstat,
N_("show a diffstat at the end of the merge")),
OPT_BOOL(0, "summary", &show_diffstat, N_("(synonym to --stat)")),
+ OPT_CALLBACK_F(0, "compact-summary", &show_diffstat, N_("compact-summary"),
+ N_("show a compact-summary at the end of the merge"),
+ PARSE_OPT_NOARG,
+ option_parse_compact_summary),
{
.type = OPTION_INTEGER,
.long_name = "log",
@@ -494,8 +513,19 @@ static void finish(struct commit *head_commit,
struct diff_options opts;
repo_diff_setup(the_repository, &opts);
init_diffstat_widths(&opts);
- opts.output_format |=
- DIFF_FORMAT_SUMMARY | DIFF_FORMAT_DIFFSTAT;
+
+ switch (show_diffstat) {
+ case MERGE_SHOW_DIFFSTAT: /* 1 */
+ opts.output_format |=
+ DIFF_FORMAT_SUMMARY | DIFF_FORMAT_DIFFSTAT;
+ break;
+ case MERGE_SHOW_COMPACTSUMMARY: /* 2 */
+ opts.output_format |= DIFF_FORMAT_DIFFSTAT;
+ opts.flags.stat_with_summary = 1;
+ break;
+ default:
+ break;
+ }
opts.detect_rename = DIFF_DETECT_RENAME;
diff_setup_done(&opts);
diff_tree_oid(head, new_head, "", &opts);
@@ -643,7 +673,35 @@ static int git_merge_config(const char *k, const char *v,
}
if (!strcmp(k, "merge.diffstat") || !strcmp(k, "merge.stat")) {
- show_diffstat = git_config_bool(k, v);
+ int val = git_parse_maybe_bool_text(v);
+ switch (val) {
+ case 0:
+ show_diffstat = 0;
+ break;
+ case 1:
+ show_diffstat = MERGE_SHOW_DIFFSTAT;
+ break;
+ default:
+ if (!strcmp(v, "compact"))
+ show_diffstat = MERGE_SHOW_COMPACTSUMMARY;
+ /*
+ * We do not need to have an explicit
+ *
+ * else if (!strcmp(v, "diffstat"))
+ * show_diffstat = MERGE_SHOW_DIFFSTAT;
+ *
+ * here, because the catch-all uses the
+ * diffstat style anyway.
+ */
+ else
+ /*
+ * A setting from a future? It is not an
+ * error grave enough to fail the command.
+ * proceed using the default one.
+ */
+ show_diffstat = MERGE_SHOW_DIFFSTAT;
+ break;
+ }
} else if (!strcmp(k, "merge.verifysignatures")) {
verify_signatures = git_config_bool(k, v);
} else if (!strcmp(k, "pull.twohead")) {
diff --git a/builtin/mktag.c b/builtin/mktag.c
index 7ac11c46d5..27e649736c 100644
--- a/builtin/mktag.c
+++ b/builtin/mktag.c
@@ -6,7 +6,7 @@
#include "strbuf.h"
#include "replace-object.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "fsck.h"
#include "config.h"
@@ -41,7 +41,7 @@ static int mktag_fsck_error_func(struct fsck_options *o UNUSED,
fprintf_ln(stderr, _("error: tag input does not pass fsck: %s"), message);
return 1;
default:
- BUG(_("%d (FSCK_IGNORE?) should never trigger this callback"),
+ BUG("%d (FSCK_IGNORE?) should never trigger this callback",
msg_type);
}
}
@@ -54,8 +54,8 @@ static int verify_object_in_tag(struct object_id *tagged_oid, int *tagged_type)
void *buffer;
const struct object_id *repl;
- buffer = repo_read_object_file(the_repository, tagged_oid, &type,
- &size);
+ buffer = odb_read_object(the_repository->objects, tagged_oid,
+ &type, &size);
if (!buffer)
die(_("could not read tagged object '%s'"),
oid_to_hex(tagged_oid));
diff --git a/builtin/mktree.c b/builtin/mktree.c
index 4b47803467..81df7f6099 100644
--- a/builtin/mktree.c
+++ b/builtin/mktree.c
@@ -12,7 +12,7 @@
#include "tree.h"
#include "parse-options.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
static struct treeent {
unsigned mode;
@@ -124,10 +124,10 @@ static void mktree_line(char *buf, int nul_term_line, int allow_missing)
/* Check the type of object identified by oid without fetching objects */
oi.typep = &obj_type;
- if (oid_object_info_extended(the_repository, &oid, &oi,
- OBJECT_INFO_LOOKUP_REPLACE |
- OBJECT_INFO_QUICK |
- OBJECT_INFO_SKIP_FETCH_OBJECT) < 0)
+ if (odb_read_object_info_extended(the_repository->objects, &oid, &oi,
+ OBJECT_INFO_LOOKUP_REPLACE |
+ OBJECT_INFO_QUICK |
+ OBJECT_INFO_SKIP_FETCH_OBJECT) < 0)
obj_type = -1;
if (obj_type < 0) {
diff --git a/builtin/multi-pack-index.c b/builtin/multi-pack-index.c
index 69a9750732..aa25b06f9d 100644
--- a/builtin/multi-pack-index.c
+++ b/builtin/multi-pack-index.c
@@ -7,7 +7,7 @@
#include "midx.h"
#include "strbuf.h"
#include "trace2.h"
-#include "object-store.h"
+#include "odb.h"
#include "replace-object.h"
#include "repository.h"
@@ -294,8 +294,8 @@ int cmd_multi_pack_index(int argc,
if (the_repository &&
the_repository->objects &&
- the_repository->objects->odb)
- opts.object_dir = xstrdup(the_repository->objects->odb->path);
+ the_repository->objects->sources)
+ opts.object_dir = xstrdup(the_repository->objects->sources->path);
argc = parse_options(argc, argv, prefix, options,
builtin_multi_pack_index_usage, 0);
diff --git a/builtin/notes.c b/builtin/notes.c
index a3f433ca4c..a9529b1696 100644
--- a/builtin/notes.c
+++ b/builtin/notes.c
@@ -16,7 +16,7 @@
#include "notes.h"
#include "object-file.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "path.h"
#include "pretty.h"
@@ -152,7 +152,7 @@ static void copy_obj_to_fd(int fd, const struct object_id *oid)
{
unsigned long size;
enum object_type type;
- char *buf = repo_read_object_file(the_repository, oid, &type, &size);
+ char *buf = odb_read_object(the_repository->objects, oid, &type, &size);
if (buf) {
if (size)
write_or_die(fd, buf, size);
@@ -319,7 +319,7 @@ static int parse_reuse_arg(const struct option *opt, const char *arg, int unset)
strbuf_init(&msg->buf, 0);
if (repo_get_oid(the_repository, arg, &object))
die(_("failed to resolve '%s' as a valid ref."), arg);
- if (!(value = repo_read_object_file(the_repository, &object, &type, &len)))
+ if (!(value = odb_read_object(the_repository->objects, &object, &type, &len)))
die(_("failed to read object '%s'."), arg);
if (type != OBJ_BLOB) {
strbuf_release(&msg->buf);
@@ -722,7 +722,7 @@ static int append_edit(int argc, const char **argv, const char *prefix,
unsigned long size;
enum object_type type;
struct strbuf buf = STRBUF_INIT;
- char *prev_buf = repo_read_object_file(the_repository, note, &type, &size);
+ char *prev_buf = odb_read_object(the_repository->objects, note, &type, &size);
if (!prev_buf)
die(_("unable to read %s"), oid_to_hex(note));
diff --git a/builtin/pack-objects.c b/builtin/pack-objects.c
index 8b33edc2ff..067b9e322a 100644
--- a/builtin/pack-objects.c
+++ b/builtin/pack-objects.c
@@ -32,7 +32,7 @@
#include "list.h"
#include "packfile.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "replace-object.h"
#include "dir.h"
#include "midx.h"
@@ -41,6 +41,10 @@
#include "promisor-remote.h"
#include "pack-mtimes.h"
#include "parse-options.h"
+#include "blob.h"
+#include "tree.h"
+#include "path-walk.h"
+#include "trace2.h"
/*
* Objects we are going to pack are collected in the `to_pack` structure.
@@ -184,8 +188,14 @@ static inline void oe_set_delta_size(struct packing_data *pack,
#define SET_DELTA_SIBLING(obj, val) oe_set_delta_sibling(&to_pack, obj, val)
static const char *const pack_usage[] = {
- N_("git pack-objects --stdout [<options>] [< <ref-list> | < <object-list>]"),
- N_("git pack-objects [<options>] <base-name> [< <ref-list> | < <object-list>]"),
+ N_("git pack-objects [-q | --progress | --all-progress] [--all-progress-implied]\n"
+ " [--no-reuse-delta] [--delta-base-offset] [--non-empty]\n"
+ " [--local] [--incremental] [--window=<n>] [--depth=<n>]\n"
+ " [--revs [--unpacked | --all]] [--keep-pack=<pack-name>]\n"
+ " [--cruft] [--cruft-expiration=<time>]\n"
+ " [--stdout [--filter=<filter-spec>] | <base-name>]\n"
+ " [--shallow] [--keep-true-parents] [--[no-]sparse]\n"
+ " [--name-hash-version=<n>] [--path-walk] < <object-list>"),
NULL
};
@@ -200,6 +210,7 @@ static int keep_unreachable, unpack_unreachable, include_tag;
static timestamp_t unpack_unreachable_expiration;
static int pack_loose_unreachable;
static int cruft;
+static int shallow = 0;
static timestamp_t cruft_expiration;
static int local;
static int have_non_local_packs;
@@ -218,6 +229,7 @@ static int delta_search_threads;
static int pack_to_stdout;
static int sparse;
static int thin;
+static int path_walk = -1;
static int num_preferred_base;
static struct progress *progress_state;
@@ -272,6 +284,12 @@ static struct oidmap configured_exclusions;
static struct oidset excluded_by_config;
static int name_hash_version = -1;
+enum stdin_packs_mode {
+ STDIN_PACKS_MODE_NONE,
+ STDIN_PACKS_MODE_STANDARD,
+ STDIN_PACKS_MODE_FOLLOW,
+};
+
/**
* Check whether the name_hash_version chosen by user input is appropriate,
* and also validate whether it is compatible with other features.
@@ -337,13 +355,13 @@ static void *get_delta(struct object_entry *entry)
void *buf, *base_buf, *delta_buf;
enum object_type type;
- buf = repo_read_object_file(the_repository, &entry->idx.oid, &type,
- &size);
+ buf = odb_read_object(the_repository->objects, &entry->idx.oid,
+ &type, &size);
if (!buf)
die(_("unable to read %s"), oid_to_hex(&entry->idx.oid));
- base_buf = repo_read_object_file(the_repository,
- &DELTA(entry)->idx.oid, &type,
- &base_size);
+ base_buf = odb_read_object(the_repository->objects,
+ &DELTA(entry)->idx.oid, &type,
+ &base_size);
if (!base_buf)
die("unable to read %s",
oid_to_hex(&DELTA(entry)->idx.oid));
@@ -506,9 +524,9 @@ static unsigned long write_no_reuse_object(struct hashfile *f, struct object_ent
&size, NULL)) != NULL)
buf = NULL;
else {
- buf = repo_read_object_file(the_repository,
- &entry->idx.oid, &type,
- &size);
+ buf = odb_read_object(the_repository->objects,
+ &entry->idx.oid, &type,
+ &size);
if (!buf)
die(_("unable to read %s"),
oid_to_hex(&entry->idx.oid));
@@ -1895,7 +1913,7 @@ static struct pbase_tree_cache *pbase_tree_get(const struct object_id *oid)
/* Did not find one. Either we got a bogus request or
* we need to read and perhaps cache.
*/
- data = repo_read_object_file(the_repository, oid, &type, &size);
+ data = odb_read_object(the_repository->objects, oid, &type, &size);
if (!data)
return NULL;
if (type != OBJ_TREE) {
@@ -2055,8 +2073,8 @@ static void add_preferred_base(struct object_id *oid)
if (window <= num_preferred_base++)
return;
- data = read_object_with_reference(the_repository, oid,
- OBJ_TREE, &size, &tree_oid);
+ data = odb_read_object_peeled(the_repository->objects, oid,
+ OBJ_TREE, &size, &tree_oid);
if (!data)
return;
@@ -2154,10 +2172,10 @@ static void prefetch_to_pack(uint32_t object_index_start) {
for (i = object_index_start; i < to_pack.nr_objects; i++) {
struct object_entry *entry = to_pack.objects + i;
- if (!oid_object_info_extended(the_repository,
- &entry->idx.oid,
- NULL,
- OBJECT_INFO_FOR_PREFETCH))
+ if (!odb_read_object_info_extended(the_repository->objects,
+ &entry->idx.oid,
+ NULL,
+ OBJECT_INFO_FOR_PREFETCH))
continue;
oid_array_append(&to_fetch, &entry->idx.oid);
}
@@ -2298,19 +2316,19 @@ static void check_object(struct object_entry *entry, uint32_t object_index)
/*
* No choice but to fall back to the recursive delta walk
- * with oid_object_info() to find about the object type
+ * with odb_read_object_info() to find about the object type
* at this point...
*/
give_up:
unuse_pack(&w_curs);
}
- if (oid_object_info_extended(the_repository, &entry->idx.oid, &oi,
- OBJECT_INFO_SKIP_FETCH_OBJECT | OBJECT_INFO_LOOKUP_REPLACE) < 0) {
+ if (odb_read_object_info_extended(the_repository->objects, &entry->idx.oid, &oi,
+ OBJECT_INFO_SKIP_FETCH_OBJECT | OBJECT_INFO_LOOKUP_REPLACE) < 0) {
if (repo_has_promisor_remote(the_repository)) {
prefetch_to_pack(object_index);
- if (oid_object_info_extended(the_repository, &entry->idx.oid, &oi,
- OBJECT_INFO_SKIP_FETCH_OBJECT | OBJECT_INFO_LOOKUP_REPLACE) < 0)
+ if (odb_read_object_info_extended(the_repository->objects, &entry->idx.oid, &oi,
+ OBJECT_INFO_SKIP_FETCH_OBJECT | OBJECT_INFO_LOOKUP_REPLACE) < 0)
type = -1;
} else {
type = -1;
@@ -2384,12 +2402,13 @@ static void drop_reused_delta(struct object_entry *entry)
if (packed_object_info(the_repository, IN_PACK(entry), entry->in_pack_offset, &oi) < 0) {
/*
* We failed to get the info from this pack for some reason;
- * fall back to oid_object_info, which may find another copy.
+ * fall back to odb_read_object_info, which may find another copy.
* And if that fails, the error will be recorded in oe_type(entry)
* and dealt with in prepare_pack().
*/
oe_set_type(entry,
- oid_object_info(the_repository, &entry->idx.oid, &size));
+ odb_read_object_info(the_repository->objects,
+ &entry->idx.oid, &size));
} else {
oe_set_type(entry, type);
}
@@ -2677,7 +2696,8 @@ unsigned long oe_get_size_slow(struct packing_data *pack,
if (e->type_ != OBJ_OFS_DELTA && e->type_ != OBJ_REF_DELTA) {
packing_data_lock(&to_pack);
- if (oid_object_info(the_repository, &e->idx.oid, &size) < 0)
+ if (odb_read_object_info(the_repository->objects,
+ &e->idx.oid, &size) < 0)
die(_("unable to get size of %s"),
oid_to_hex(&e->idx.oid));
packing_data_unlock(&to_pack);
@@ -2760,9 +2780,9 @@ static int try_delta(struct unpacked *trg, struct unpacked *src,
/* Load data if not already done */
if (!trg->data) {
packing_data_lock(&to_pack);
- trg->data = repo_read_object_file(the_repository,
- &trg_entry->idx.oid, &type,
- &sz);
+ trg->data = odb_read_object(the_repository->objects,
+ &trg_entry->idx.oid, &type,
+ &sz);
packing_data_unlock(&to_pack);
if (!trg->data)
die(_("object %s cannot be read"),
@@ -2775,9 +2795,9 @@ static int try_delta(struct unpacked *trg, struct unpacked *src,
}
if (!src->data) {
packing_data_lock(&to_pack);
- src->data = repo_read_object_file(the_repository,
- &src_entry->idx.oid, &type,
- &sz);
+ src->data = odb_read_object(the_repository->objects,
+ &src_entry->idx.oid, &type,
+ &sz);
packing_data_unlock(&to_pack);
if (!src->data) {
if (src_entry->preferred_base) {
@@ -3041,6 +3061,7 @@ static void find_deltas(struct object_entry **list, unsigned *list_size,
struct thread_params {
pthread_t thread;
struct object_entry **list;
+ struct packing_region *regions;
unsigned list_size;
unsigned remaining;
int window;
@@ -3283,6 +3304,242 @@ static int add_ref_tag(const char *tag UNUSED, const char *referent UNUSED, cons
return 0;
}
+static int should_attempt_deltas(struct object_entry *entry)
+{
+ if (DELTA(entry))
+ /* This happens if we decided to reuse existing
+ * delta from a pack. "reuse_delta &&" is implied.
+ */
+ return 0;
+
+ if (!entry->type_valid ||
+ oe_size_less_than(&to_pack, entry, 50))
+ return 0;
+
+ if (entry->no_try_delta)
+ return 0;
+
+ if (!entry->preferred_base) {
+ if (oe_type(entry) < 0)
+ die(_("unable to get type of object %s"),
+ oid_to_hex(&entry->idx.oid));
+ } else if (oe_type(entry) < 0) {
+ /*
+ * This object is not found, but we
+ * don't have to include it anyway.
+ */
+ return 0;
+ }
+
+ return 1;
+}
+
+static void find_deltas_for_region(struct object_entry *list,
+ struct packing_region *region,
+ unsigned int *processed)
+{
+ struct object_entry **delta_list;
+ unsigned int delta_list_nr = 0;
+
+ ALLOC_ARRAY(delta_list, region->nr);
+ for (size_t i = 0; i < region->nr; i++) {
+ struct object_entry *entry = list + region->start + i;
+ if (should_attempt_deltas(entry))
+ delta_list[delta_list_nr++] = entry;
+ }
+
+ QSORT(delta_list, delta_list_nr, type_size_sort);
+ find_deltas(delta_list, &delta_list_nr, window, depth, processed);
+ free(delta_list);
+}
+
+static void find_deltas_by_region(struct object_entry *list,
+ struct packing_region *regions,
+ size_t start, size_t nr)
+{
+ unsigned int processed = 0;
+ size_t progress_nr;
+
+ if (!nr)
+ return;
+
+ progress_nr = regions[nr - 1].start + regions[nr - 1].nr;
+
+ if (progress)
+ progress_state = start_progress(the_repository,
+ _("Compressing objects by path"),
+ progress_nr);
+
+ while (nr--)
+ find_deltas_for_region(list,
+ &regions[start++],
+ &processed);
+
+ display_progress(progress_state, progress_nr);
+ stop_progress(&progress_state);
+}
+
+static void *threaded_find_deltas_by_path(void *arg)
+{
+ struct thread_params *me = arg;
+
+ progress_lock();
+ while (me->remaining) {
+ while (me->remaining) {
+ progress_unlock();
+ find_deltas_for_region(to_pack.objects,
+ me->regions,
+ me->processed);
+ progress_lock();
+ me->remaining--;
+ me->regions++;
+ }
+
+ me->working = 0;
+ pthread_cond_signal(&progress_cond);
+ progress_unlock();
+
+ /*
+ * We must not set ->data_ready before we wait on the
+ * condition because the main thread may have set it to 1
+ * before we get here. In order to be sure that new
+ * work is available if we see 1 in ->data_ready, it
+ * was initialized to 0 before this thread was spawned
+ * and we reset it to 0 right away.
+ */
+ pthread_mutex_lock(&me->mutex);
+ while (!me->data_ready)
+ pthread_cond_wait(&me->cond, &me->mutex);
+ me->data_ready = 0;
+ pthread_mutex_unlock(&me->mutex);
+
+ progress_lock();
+ }
+ progress_unlock();
+ /* leave ->working 1 so that this doesn't get more work assigned */
+ return NULL;
+}
+
+static void ll_find_deltas_by_region(struct object_entry *list,
+ struct packing_region *regions,
+ uint32_t start, uint32_t nr)
+{
+ struct thread_params *p;
+ int i, ret, active_threads = 0;
+ unsigned int processed = 0;
+ uint32_t progress_nr;
+ init_threaded_search();
+
+ if (!nr)
+ return;
+
+ progress_nr = regions[nr - 1].start + regions[nr - 1].nr;
+ if (delta_search_threads <= 1) {
+ find_deltas_by_region(list, regions, start, nr);
+ cleanup_threaded_search();
+ return;
+ }
+
+ if (progress > pack_to_stdout)
+ fprintf_ln(stderr,
+ Q_("Path-based delta compression using up to %d thread",
+ "Path-based delta compression using up to %d threads",
+ delta_search_threads),
+ delta_search_threads);
+ CALLOC_ARRAY(p, delta_search_threads);
+
+ if (progress)
+ progress_state = start_progress(the_repository,
+ _("Compressing objects by path"),
+ progress_nr);
+ /* Partition the work amongst work threads. */
+ for (i = 0; i < delta_search_threads; i++) {
+ unsigned sub_size = nr / (delta_search_threads - i);
+
+ p[i].window = window;
+ p[i].depth = depth;
+ p[i].processed = &processed;
+ p[i].working = 1;
+ p[i].data_ready = 0;
+
+ p[i].regions = regions;
+ p[i].list_size = sub_size;
+ p[i].remaining = sub_size;
+
+ regions += sub_size;
+ nr -= sub_size;
+ }
+
+ /* Start work threads. */
+ for (i = 0; i < delta_search_threads; i++) {
+ if (!p[i].list_size)
+ continue;
+ pthread_mutex_init(&p[i].mutex, NULL);
+ pthread_cond_init(&p[i].cond, NULL);
+ ret = pthread_create(&p[i].thread, NULL,
+ threaded_find_deltas_by_path, &p[i]);
+ if (ret)
+ die(_("unable to create thread: %s"), strerror(ret));
+ active_threads++;
+ }
+
+ /*
+ * Now let's wait for work completion. Each time a thread is done
+ * with its work, we steal half of the remaining work from the
+ * thread with the largest number of unprocessed objects and give
+ * it to that newly idle thread. This ensure good load balancing
+ * until the remaining object list segments are simply too short
+ * to be worth splitting anymore.
+ */
+ while (active_threads) {
+ struct thread_params *target = NULL;
+ struct thread_params *victim = NULL;
+ unsigned sub_size = 0;
+
+ progress_lock();
+ for (;;) {
+ for (i = 0; !target && i < delta_search_threads; i++)
+ if (!p[i].working)
+ target = &p[i];
+ if (target)
+ break;
+ pthread_cond_wait(&progress_cond, &progress_mutex);
+ }
+
+ for (i = 0; i < delta_search_threads; i++)
+ if (p[i].remaining > 2*window &&
+ (!victim || victim->remaining < p[i].remaining))
+ victim = &p[i];
+ if (victim) {
+ sub_size = victim->remaining / 2;
+ target->regions = victim->regions + victim->remaining - sub_size;
+ victim->list_size -= sub_size;
+ victim->remaining -= sub_size;
+ }
+ target->list_size = sub_size;
+ target->remaining = sub_size;
+ target->working = 1;
+ progress_unlock();
+
+ pthread_mutex_lock(&target->mutex);
+ target->data_ready = 1;
+ pthread_cond_signal(&target->cond);
+ pthread_mutex_unlock(&target->mutex);
+
+ if (!sub_size) {
+ pthread_join(target->thread, NULL);
+ pthread_cond_destroy(&target->cond);
+ pthread_mutex_destroy(&target->mutex);
+ active_threads--;
+ }
+ }
+ cleanup_threaded_search();
+ free(p);
+
+ display_progress(progress_state, progress_nr);
+ stop_progress(&progress_state);
+}
+
static void prepare_pack(int window, int depth)
{
struct object_entry **delta_list;
@@ -3307,39 +3564,21 @@ static void prepare_pack(int window, int depth)
if (!to_pack.nr_objects || !window || !depth)
return;
+ if (path_walk)
+ ll_find_deltas_by_region(to_pack.objects, to_pack.regions,
+ 0, to_pack.nr_regions);
+
ALLOC_ARRAY(delta_list, to_pack.nr_objects);
nr_deltas = n = 0;
for (i = 0; i < to_pack.nr_objects; i++) {
struct object_entry *entry = to_pack.objects + i;
- if (DELTA(entry))
- /* This happens if we decided to reuse existing
- * delta from a pack. "reuse_delta &&" is implied.
- */
- continue;
-
- if (!entry->type_valid ||
- oe_size_less_than(&to_pack, entry, 50))
+ if (!should_attempt_deltas(entry))
continue;
- if (entry->no_try_delta)
- continue;
-
- if (!entry->preferred_base) {
+ if (!entry->preferred_base)
nr_deltas++;
- if (oe_type(entry) < 0)
- die(_("unable to get type of object %s"),
- oid_to_hex(&entry->idx.oid));
- } else {
- if (oe_type(entry) < 0) {
- /*
- * This object is not found, but we
- * don't have to include it anyway.
- */
- continue;
- }
- }
delta_list[n++] = entry;
}
@@ -3494,7 +3733,6 @@ static int add_object_entry_from_pack(const struct object_id *oid,
return 0;
if (p) {
- struct rev_info *revs = _data;
struct object_info oi = OBJECT_INFO_INIT;
oi.typep = &type;
@@ -3502,6 +3740,7 @@ static int add_object_entry_from_pack(const struct object_id *oid,
die(_("could not get type of object %s in pack %s"),
oid_to_hex(oid), p->pack_name);
} else if (type == OBJ_COMMIT) {
+ struct rev_info *revs = _data;
/*
* commits in included packs are used as starting points for the
* subsequent revision walk
@@ -3517,32 +3756,48 @@ static int add_object_entry_from_pack(const struct object_id *oid,
return 0;
}
-static void show_commit_pack_hint(struct commit *commit UNUSED,
- void *data UNUSED)
+static void show_object_pack_hint(struct object *object, const char *name,
+ void *data)
{
- /* nothing to do; commits don't have a namehash */
+ enum stdin_packs_mode mode = *(enum stdin_packs_mode *)data;
+ if (mode == STDIN_PACKS_MODE_FOLLOW) {
+ if (object->type == OBJ_BLOB &&
+ !has_object(the_repository, &object->oid, 0))
+ return;
+ add_object_entry(&object->oid, object->type, name, 0);
+ } else {
+ struct object_entry *oe = packlist_find(&to_pack, &object->oid);
+ if (!oe)
+ return;
+
+ /*
+ * Our 'to_pack' list was constructed by iterating all
+ * objects packed in included packs, and so doesn't have
+ * a non-zero hash field that you would typically pick
+ * up during a reachability traversal.
+ *
+ * Make a best-effort attempt to fill in the ->hash and
+ * ->no_try_delta fields here in order to perhaps
+ * improve the delta selection process.
+ */
+ oe->hash = pack_name_hash_fn(name);
+ oe->no_try_delta = name && no_try_delta(name);
+
+ stdin_packs_hints_nr++;
+ }
}
-static void show_object_pack_hint(struct object *object, const char *name,
- void *data UNUSED)
+static void show_commit_pack_hint(struct commit *commit, void *data)
{
- struct object_entry *oe = packlist_find(&to_pack, &object->oid);
- if (!oe)
+ enum stdin_packs_mode mode = *(enum stdin_packs_mode *)data;
+
+ if (mode == STDIN_PACKS_MODE_FOLLOW) {
+ show_object_pack_hint((struct object *)commit, "", data);
return;
+ }
- /*
- * Our 'to_pack' list was constructed by iterating all objects packed in
- * included packs, and so doesn't have a non-zero hash field that you
- * would typically pick up during a reachability traversal.
- *
- * Make a best-effort attempt to fill in the ->hash and ->no_try_delta
- * here using a now in order to perhaps improve the delta selection
- * process.
- */
- oe->hash = pack_name_hash_fn(name);
- oe->no_try_delta = name && no_try_delta(name);
+ /* nothing to do; commits don't have a namehash */
- stdin_packs_hints_nr++;
}
static int pack_mtime_cmp(const void *_a, const void *_b)
@@ -3562,7 +3817,7 @@ static int pack_mtime_cmp(const void *_a, const void *_b)
return 0;
}
-static void read_packs_list_from_stdin(void)
+static void read_packs_list_from_stdin(struct rev_info *revs)
{
struct strbuf buf = STRBUF_INIT;
struct string_list include_packs = STRING_LIST_INIT_DUP;
@@ -3570,24 +3825,6 @@ static void read_packs_list_from_stdin(void)
struct string_list_item *item = NULL;
struct packed_git *p;
- struct rev_info revs;
-
- repo_init_revisions(the_repository, &revs, NULL);
- /*
- * Use a revision walk to fill in the namehash of objects in the include
- * packs. To save time, we'll avoid traversing through objects that are
- * in excluded packs.
- *
- * That may cause us to avoid populating all of the namehash fields of
- * all included objects, but our goal is best-effort, since this is only
- * an optimization during delta selection.
- */
- revs.no_kept_objects = 1;
- revs.keep_pack_cache_flags |= IN_CORE_KEEP_PACKS;
- revs.blob_objects = 1;
- revs.tree_objects = 1;
- revs.tag_objects = 1;
- revs.ignore_missing_links = 1;
while (strbuf_getline(&buf, stdin) != EOF) {
if (!buf.len)
@@ -3657,25 +3894,55 @@ static void read_packs_list_from_stdin(void)
struct packed_git *p = item->util;
for_each_object_in_pack(p,
add_object_entry_from_pack,
- &revs,
+ revs,
FOR_EACH_OBJECT_PACK_ORDER);
}
+ strbuf_release(&buf);
+ string_list_clear(&include_packs, 0);
+ string_list_clear(&exclude_packs, 0);
+}
+
+static void add_unreachable_loose_objects(struct rev_info *revs);
+
+static void read_stdin_packs(enum stdin_packs_mode mode, int rev_list_unpacked)
+{
+ struct rev_info revs;
+
+ repo_init_revisions(the_repository, &revs, NULL);
+ /*
+ * Use a revision walk to fill in the namehash of objects in the include
+ * packs. To save time, we'll avoid traversing through objects that are
+ * in excluded packs.
+ *
+ * That may cause us to avoid populating all of the namehash fields of
+ * all included objects, but our goal is best-effort, since this is only
+ * an optimization during delta selection.
+ */
+ revs.no_kept_objects = 1;
+ revs.keep_pack_cache_flags |= IN_CORE_KEEP_PACKS;
+ revs.blob_objects = 1;
+ revs.tree_objects = 1;
+ revs.tag_objects = 1;
+ revs.ignore_missing_links = 1;
+
+ /* avoids adding objects in excluded packs */
+ ignore_packed_keep_in_core = 1;
+ read_packs_list_from_stdin(&revs);
+ if (rev_list_unpacked)
+ add_unreachable_loose_objects(&revs);
+
if (prepare_revision_walk(&revs))
die(_("revision walk setup failed"));
traverse_commit_list(&revs,
show_commit_pack_hint,
show_object_pack_hint,
- NULL);
+ &mode);
trace2_data_intmax("pack-objects", the_repository, "stdin_packs_found",
stdin_packs_found_nr);
trace2_data_intmax("pack-objects", the_repository, "stdin_packs_hints",
stdin_packs_hints_nr);
-
- strbuf_release(&buf);
- string_list_clear(&include_packs, 0);
- string_list_clear(&exclude_packs, 0);
}
static void add_cruft_object_entry(const struct object_id *oid, enum object_type type,
@@ -3773,7 +4040,6 @@ static void mark_pack_kept_in_core(struct string_list *packs, unsigned keep)
}
}
-static void add_unreachable_loose_objects(void);
static void add_objects_in_unpacked_packs(void);
static void enumerate_cruft_objects(void)
@@ -3783,7 +4049,7 @@ static void enumerate_cruft_objects(void)
_("Enumerating cruft objects"), 0);
add_objects_in_unpacked_packs();
- add_unreachable_loose_objects();
+ add_unreachable_loose_objects(NULL);
stop_progress(&progress_state);
}
@@ -3966,7 +4232,7 @@ static void show_object__ma_allow_any(struct object *obj, const char *name, void
* Quietly ignore ALL missing objects. This avoids problems with
* staging them now and getting an odd error later.
*/
- if (!has_object(the_repository, &obj->oid, 0))
+ if (!odb_has_object(the_repository->objects, &obj->oid, 0))
return;
show_object(obj, name, data);
@@ -3980,7 +4246,7 @@ static void show_object__ma_allow_promisor(struct object *obj, const char *name,
* Quietly ignore EXPECTED missing objects. This avoids problems with
* staging them now and getting an odd error later.
*/
- if (!has_object(the_repository, &obj->oid, 0) &&
+ if (!odb_has_object(the_repository->objects, &obj->oid, 0) &&
is_promisor_object(to_pack.repo, &obj->oid))
return;
@@ -4061,9 +4327,10 @@ static void add_objects_in_unpacked_packs(void)
}
static int add_loose_object(const struct object_id *oid, const char *path,
- void *data UNUSED)
+ void *data)
{
- enum object_type type = oid_object_info(the_repository, oid, NULL);
+ struct rev_info *revs = data;
+ enum object_type type = odb_read_object_info(the_repository->objects, oid, NULL);
if (type < 0) {
warning(_("loose object at %s could not be examined"), path);
@@ -4083,6 +4350,10 @@ static int add_loose_object(const struct object_id *oid, const char *path,
} else {
add_object_entry(oid, type, "", 0);
}
+
+ if (revs && type == OBJ_COMMIT)
+ add_pending_oid(revs, NULL, oid, 0);
+
return 0;
}
@@ -4091,11 +4362,10 @@ static int add_loose_object(const struct object_id *oid, const char *path,
* add_object_entry will weed out duplicates, so we just add every
* loose object we find.
*/
-static void add_unreachable_loose_objects(void)
+static void add_unreachable_loose_objects(struct rev_info *revs)
{
for_each_loose_file_in_objdir(repo_get_object_directory(the_repository),
- add_loose_object,
- NULL, NULL, NULL);
+ add_loose_object, NULL, NULL, revs);
}
static int has_sha1_pack_kept_or_nonlocal(const struct object_id *oid)
@@ -4272,6 +4542,93 @@ static void mark_bitmap_preferred_tips(void)
}
}
+static inline int is_oid_uninteresting(struct repository *repo,
+ struct object_id *oid)
+{
+ struct object *o = lookup_object(repo, oid);
+ return !o || (o->flags & UNINTERESTING);
+}
+
+static int add_objects_by_path(const char *path,
+ struct oid_array *oids,
+ enum object_type type,
+ void *data)
+{
+ size_t oe_start = to_pack.nr_objects;
+ size_t oe_end;
+ unsigned int *processed = data;
+
+ /*
+ * First, add all objects to the packing data, including the ones
+ * marked UNINTERESTING (translated to 'exclude') as they can be
+ * used as delta bases.
+ */
+ for (size_t i = 0; i < oids->nr; i++) {
+ int exclude;
+ struct object_info oi = OBJECT_INFO_INIT;
+ struct object_id *oid = &oids->oid[i];
+
+ /* Skip objects that do not exist locally. */
+ if ((exclude_promisor_objects || arg_missing_action != MA_ERROR) &&
+ oid_object_info_extended(the_repository, oid, &oi,
+ OBJECT_INFO_FOR_PREFETCH) < 0)
+ continue;
+
+ exclude = is_oid_uninteresting(the_repository, oid);
+
+ if (exclude && !thin)
+ continue;
+
+ add_object_entry(oid, type, path, exclude);
+ }
+
+ oe_end = to_pack.nr_objects;
+
+ /* We can skip delta calculations if it is a no-op. */
+ if (oe_end == oe_start || !window)
+ return 0;
+
+ ALLOC_GROW(to_pack.regions,
+ to_pack.nr_regions + 1,
+ to_pack.nr_regions_alloc);
+
+ to_pack.regions[to_pack.nr_regions].start = oe_start;
+ to_pack.regions[to_pack.nr_regions].nr = oe_end - oe_start;
+ to_pack.nr_regions++;
+
+ *processed += oids->nr;
+ display_progress(progress_state, *processed);
+
+ return 0;
+}
+
+static void get_object_list_path_walk(struct rev_info *revs)
+{
+ struct path_walk_info info = PATH_WALK_INFO_INIT;
+ unsigned int processed = 0;
+ int result;
+
+ info.revs = revs;
+ info.path_fn = add_objects_by_path;
+ info.path_fn_data = &processed;
+
+ /*
+ * Allow the --[no-]sparse option to be interesting here, if only
+ * for testing purposes. Paths with no interesting objects will not
+ * contribute to the resulting pack, but only create noisy preferred
+ * base objects.
+ */
+ info.prune_all_uninteresting = sparse;
+ info.edge_aggressive = shallow;
+
+ trace2_region_enter("pack-objects", "path-walk", revs->repo);
+ result = walk_objects_by_path(&info);
+ trace2_region_leave("pack-objects", "path-walk", revs->repo);
+
+ if (result)
+ die(_("failed to pack objects via path-walk"));
+}
+
static void get_object_list(struct rev_info *revs, int ac, const char **av)
{
struct setup_revision_opt s_r_opt = {
@@ -4327,15 +4684,19 @@ static void get_object_list(struct rev_info *revs, int ac, const char **av)
if (write_bitmap_index)
mark_bitmap_preferred_tips();
- if (prepare_revision_walk(revs))
- die(_("revision walk setup failed"));
- mark_edges_uninteresting(revs, show_edge, sparse);
-
if (!fn_show_object)
fn_show_object = show_object;
- traverse_commit_list(revs,
- show_commit, fn_show_object,
- NULL);
+
+ if (path_walk) {
+ get_object_list_path_walk(revs);
+ } else {
+ if (prepare_revision_walk(revs))
+ die(_("revision walk setup failed"));
+ mark_edges_uninteresting(revs, show_edge, sparse);
+ traverse_commit_list(revs,
+ show_commit, fn_show_object,
+ NULL);
+ }
if (unpack_unreachable_expiration) {
revs->ignore_missing_links = 1;
@@ -4351,7 +4712,7 @@ static void get_object_list(struct rev_info *revs, int ac, const char **av)
if (keep_unreachable)
add_objects_in_unpacked_packs();
if (pack_loose_unreachable)
- add_unreachable_loose_objects();
+ add_unreachable_loose_objects(NULL);
if (unpack_unreachable)
loosen_unused_packed_objects();
@@ -4449,7 +4810,7 @@ static int option_parse_cruft_expiration(const struct option *opt UNUSED,
static int is_not_in_promisor_pack_obj(struct object *obj, void *data UNUSED)
{
struct object_info info = OBJECT_INFO_INIT;
- if (oid_object_info_extended(the_repository, &obj->oid, &info, 0))
+ if (odb_read_object_info_extended(the_repository->objects, &obj->oid, &info, 0))
BUG("should_include_obj should only be called on existing objects");
return info.whence != OI_PACKED || !info.u.packed.pack->pack_promisor;
}
@@ -4458,18 +4819,34 @@ static int is_not_in_promisor_pack(struct commit *commit, void *data) {
return is_not_in_promisor_pack_obj((struct object *) commit, data);
}
+static int parse_stdin_packs_mode(const struct option *opt, const char *arg,
+ int unset)
+{
+ enum stdin_packs_mode *mode = opt->value;
+
+ if (unset)
+ *mode = STDIN_PACKS_MODE_NONE;
+ else if (!arg || !*arg)
+ *mode = STDIN_PACKS_MODE_STANDARD;
+ else if (!strcmp(arg, "follow"))
+ *mode = STDIN_PACKS_MODE_FOLLOW;
+ else
+ die(_("invalid value for '%s': '%s'"), opt->long_name, arg);
+
+ return 0;
+}
+
int cmd_pack_objects(int argc,
const char **argv,
const char *prefix,
struct repository *repo UNUSED)
{
int use_internal_rev_list = 0;
- int shallow = 0;
int all_progress_implied = 0;
struct strvec rp = STRVEC_INIT;
int rev_list_unpacked = 0, rev_list_all = 0, rev_list_reflog = 0;
int rev_list_index = 0;
- int stdin_packs = 0;
+ enum stdin_packs_mode stdin_packs = STDIN_PACKS_MODE_NONE;
struct string_list keep_pack_list = STRING_LIST_INIT_NODUP;
struct list_objects_filter_options filter_options =
LIST_OBJECTS_FILTER_INIT;
@@ -4524,6 +4901,9 @@ int cmd_pack_objects(int argc,
OPT_SET_INT_F(0, "indexed-objects", &rev_list_index,
N_("include objects referred to by the index"),
1, PARSE_OPT_NONEG),
+ OPT_CALLBACK_F(0, "stdin-packs", &stdin_packs, N_("mode"),
+ N_("read packs from stdin"),
+ PARSE_OPT_OPTARG, parse_stdin_packs_mode),
OPT_BOOL(0, "stdin-packs", &stdin_packs,
N_("read packs from stdin")),
OPT_BOOL(0, "stdout", &pack_to_stdout,
@@ -4545,6 +4925,8 @@ int cmd_pack_objects(int argc,
N_("use the sparse reachability algorithm")),
OPT_BOOL(0, "thin", &thin,
N_("create thin packs")),
+ OPT_BOOL(0, "path-walk", &path_walk,
+ N_("use the path-walk API to walk objects when possible")),
OPT_BOOL(0, "shallow", &shallow,
N_("create packs suitable for shallow fetches")),
OPT_BOOL(0, "honor-pack-keep", &ignore_packed_keep_on_disk,
@@ -4614,6 +4996,17 @@ int cmd_pack_objects(int argc,
if (pack_to_stdout != !base_name || argc)
usage_with_options(pack_usage, pack_objects_options);
+ if (path_walk < 0) {
+ if (use_bitmap_index > 0 ||
+ !use_internal_rev_list)
+ path_walk = 0;
+ else if (the_repository->gitdir &&
+ the_repository->settings.pack_use_path_walk)
+ path_walk = 1;
+ else
+ path_walk = git_env_bool("GIT_TEST_PACK_PATH_WALK", 0);
+ }
+
if (depth < 0)
depth = 0;
if (depth >= (1 << OE_DEPTH_BITS)) {
@@ -4630,7 +5023,28 @@ int cmd_pack_objects(int argc,
window = 0;
strvec_push(&rp, "pack-objects");
- if (thin) {
+
+ if (path_walk) {
+ const char *option = NULL;
+ if (filter_options.choice)
+ option = "--filter";
+ else if (use_delta_islands)
+ option = "--delta-islands";
+
+ if (option) {
+ warning(_("cannot use %s with %s"),
+ option, "--path-walk");
+ path_walk = 0;
+ }
+ }
+ if (path_walk) {
+ strvec_push(&rp, "--boundary");
+ /*
+ * We must disable the bitmaps because we are removing
+ * the --objects / --objects-edge[-aggressive] options.
+ */
+ use_bitmap_index = 0;
+ } else if (thin) {
use_internal_rev_list = 1;
strvec_push(&rp, shallow
? "--objects-edge-aggressive"
@@ -4655,9 +5069,10 @@ int cmd_pack_objects(int argc,
strvec_push(&rp, "--unpacked");
}
- if (exclude_promisor_objects && exclude_promisor_objects_best_effort)
- die(_("options '%s' and '%s' cannot be used together"),
- "--exclude-promisor-objects", "--exclude-promisor-objects-best-effort");
+ die_for_incompatible_opt2(exclude_promisor_objects,
+ "--exclude-promisor-objects",
+ exclude_promisor_objects_best_effort,
+ "--exclude-promisor-objects-best-effort");
if (exclude_promisor_objects) {
use_internal_rev_list = 1;
fetch_if_missing = 0;
@@ -4695,13 +5110,14 @@ int cmd_pack_objects(int argc,
if (!pack_to_stdout && thin)
die(_("--thin cannot be used to build an indexable pack"));
- if (keep_unreachable && unpack_unreachable)
- die(_("options '%s' and '%s' cannot be used together"), "--keep-unreachable", "--unpack-unreachable");
+ die_for_incompatible_opt2(keep_unreachable, "--keep-unreachable",
+ unpack_unreachable, "--unpack-unreachable");
if (!rev_list_all || !rev_list_reflog || !rev_list_index)
unpack_unreachable_expiration = 0;
- if (stdin_packs && filter_options.choice)
- die(_("cannot use --filter with --stdin-packs"));
+ die_for_incompatible_opt2(stdin_packs, "--stdin-packs",
+ filter_options.choice, "--filter");
+
if (stdin_packs && use_internal_rev_list)
die(_("cannot use internal rev list with --stdin-packs"));
@@ -4709,8 +5125,8 @@ int cmd_pack_objects(int argc,
if (cruft) {
if (use_internal_rev_list)
die(_("cannot use internal rev list with --cruft"));
- if (stdin_packs)
- die(_("cannot use --stdin-packs with --cruft"));
+ die_for_incompatible_opt2(stdin_packs, "--stdin-packs",
+ cruft, "--cruft");
}
/*
@@ -4778,11 +5194,7 @@ int cmd_pack_objects(int argc,
progress_state = start_progress(the_repository,
_("Enumerating objects"), 0);
if (stdin_packs) {
- /* avoids adding objects in excluded packs */
- ignore_packed_keep_in_core = 1;
- read_packs_list_from_stdin();
- if (rev_list_unpacked)
- add_unreachable_loose_objects();
+ read_stdin_packs(stdin_packs, rev_list_unpacked);
} else if (cruft) {
read_cruft_objects();
} else if (!use_internal_rev_list) {
diff --git a/builtin/pack-redundant.c b/builtin/pack-redundant.c
index 5d1fc78176..fe81c293e3 100644
--- a/builtin/pack-redundant.c
+++ b/builtin/pack-redundant.c
@@ -13,7 +13,7 @@
#include "hex.h"
#include "packfile.h"
-#include "object-store.h"
+#include "odb.h"
#include "strbuf.h"
#define BLKSIZE 512
@@ -625,14 +625,8 @@ int cmd_pack_redundant(int argc, const char **argv, const char *prefix UNUSED, s
break;
}
- if (!i_still_use_this) {
- fputs(_("'git pack-redundant' is nominated for removal.\n"
- "If you still use this command, please add an extra\n"
- "option, '--i-still-use-this', on the command line\n"
- "and let us know you still use it by sending an e-mail\n"
- "to <git@vger.kernel.org>. Thanks.\n"), stderr);
- die(_("refusing to run without --i-still-use-this"));
- }
+ if (!i_still_use_this)
+ you_still_use_that("git pack-redundant");
if (load_all_packs)
load_all();
diff --git a/builtin/patch-id.c b/builtin/patch-id.c
index cdef2ec10a..26f04b0335 100644
--- a/builtin/patch-id.c
+++ b/builtin/patch-id.c
@@ -254,7 +254,7 @@ int cmd_patch_id(int argc,
* the code that computes patch IDs to always use SHA1.
*/
if (!the_hash_algo)
- repo_set_hash_algo(the_repository, GIT_HASH_SHA1);
+ repo_set_hash_algo(the_repository, GIT_HASH_DEFAULT);
generate_id_list(opts ? opts > 1 : config.stable,
opts ? opts == 3 : config.verbatim);
diff --git a/builtin/prune.c b/builtin/prune.c
index e930caa0c0..d1c0ee1419 100644
--- a/builtin/prune.c
+++ b/builtin/prune.c
@@ -1,4 +1,3 @@
-#define USE_THE_REPOSITORY_VARIABLE
#define DISABLE_SIGN_COMPARE_WARNINGS
#include "builtin.h"
@@ -17,7 +16,7 @@
#include "replace-object.h"
#include "object-file.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "shallow.h"
static const char * const prune_usage[] = {
@@ -64,7 +63,7 @@ static void perform_reachability_traversal(struct rev_info *revs)
return;
if (show_progress)
- progress = start_delayed_progress(the_repository,
+ progress = start_delayed_progress(revs->repo,
_("Checking connectivity"), 0);
mark_reachable_objects(revs, 1, expire, progress);
stop_progress(&progress);
@@ -78,7 +77,7 @@ static int is_object_reachable(const struct object_id *oid,
perform_reachability_traversal(revs);
- obj = lookup_object(the_repository, oid);
+ obj = lookup_object(revs->repo, oid);
return obj && (obj->flags & SEEN);
}
@@ -99,8 +98,8 @@ static int prune_object(const struct object_id *oid, const char *fullpath,
if (st.st_mtime > expire)
return 0;
if (show_only || verbose) {
- enum object_type type = oid_object_info(the_repository, oid,
- NULL);
+ enum object_type type =
+ odb_read_object_info(revs->repo->objects, oid, NULL);
printf("%s %s\n", oid_to_hex(oid),
(type > 0) ? type_name(type) : "unknown");
}
@@ -154,7 +153,7 @@ static void remove_temporary_files(const char *path)
int cmd_prune(int argc,
const char **argv,
const char *prefix,
- struct repository *repo UNUSED)
+ struct repository *repo)
{
struct rev_info revs;
int exclude_promisor_objects = 0;
@@ -173,20 +172,19 @@ int cmd_prune(int argc,
expire = TIME_MAX;
save_commit_buffer = 0;
disable_replace_refs();
- repo_init_revisions(the_repository, &revs, prefix);
argc = parse_options(argc, argv, prefix, options, prune_usage, 0);
- if (repository_format_precious_objects)
+ repo_init_revisions(repo, &revs, prefix);
+ if (repo->repository_format_precious_objects)
die(_("cannot prune in a precious-objects repo"));
while (argc--) {
struct object_id oid;
const char *name = *argv++;
- if (!repo_get_oid(the_repository, name, &oid)) {
- struct object *object = parse_object_or_die(the_repository, &oid,
- name);
+ if (!repo_get_oid(repo, name, &oid)) {
+ struct object *object = parse_object_or_die(repo, &oid, name);
add_pending_object(&revs, object, "");
}
else
@@ -200,16 +198,16 @@ int cmd_prune(int argc,
revs.exclude_promisor_objects = 1;
}
- for_each_loose_file_in_objdir(repo_get_object_directory(the_repository),
+ for_each_loose_file_in_objdir(repo_get_object_directory(repo),
prune_object, prune_cruft, prune_subdir, &revs);
prune_packed_objects(show_only ? PRUNE_PACKED_DRY_RUN : 0);
- remove_temporary_files(repo_get_object_directory(the_repository));
- s = mkpathdup("%s/pack", repo_get_object_directory(the_repository));
+ remove_temporary_files(repo_get_object_directory(repo));
+ s = mkpathdup("%s/pack", repo_get_object_directory(repo));
remove_temporary_files(s);
free(s);
- if (is_repository_shallow(the_repository)) {
+ if (is_repository_shallow(repo)) {
perform_reachability_traversal(&revs);
prune_shallow(show_only ? PRUNE_SHOW_ONLY : 0);
}
diff --git a/builtin/pull.c b/builtin/pull.c
index a1ebc6ad33..c593f324fe 100644
--- a/builtin/pull.c
+++ b/builtin/pull.c
@@ -143,6 +143,9 @@ static struct option pull_options[] = {
OPT_PASSTHRU(0, "summary", &opt_diffstat, NULL,
N_("(synonym to --stat)"),
PARSE_OPT_NOARG | PARSE_OPT_HIDDEN),
+ OPT_PASSTHRU(0, "compact-summary", &opt_diffstat, NULL,
+ N_("show a compact-summary at the end of the merge"),
+ PARSE_OPT_NOARG),
OPT_PASSTHRU(0, "log", &opt_log, N_("n"),
N_("add (at most <n>) entries from shortlog to merge commit message"),
PARSE_OPT_OPTARG),
@@ -487,7 +490,7 @@ static void NORETURN die_no_merge_candidates(const char *repo, const char **refs
} else
fprintf_ln(stderr, _("Your configuration specifies to merge with the ref '%s'\n"
"from the remote, but no such ref was fetched."),
- *curr_branch->merge_name);
+ curr_branch->merge[0]->src);
exit(1);
}
diff --git a/builtin/rebase.c b/builtin/rebase.c
index 2e8c4ee678..e90562a3b8 100644
--- a/builtin/rebase.c
+++ b/builtin/rebase.c
@@ -1128,6 +1128,7 @@ int cmd_rebase(int argc,
.short_name = 'n',
.long_name = "no-stat",
.value = &options.flags,
+ .precision = sizeof(options.flags),
.help = N_("do not show diffstat of what changed upstream"),
.flags = PARSE_OPT_NOARG,
.defval = REBASE_DIFFSTAT,
diff --git a/builtin/receive-pack.c b/builtin/receive-pack.c
index a317d6c278..7974d157eb 100644
--- a/builtin/receive-pack.c
+++ b/builtin/receive-pack.c
@@ -33,7 +33,7 @@
#include "packfile.h"
#include "object-file.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "path.h"
#include "protocol.h"
#include "commit-reach.h"
@@ -359,7 +359,8 @@ static void write_head_info(void)
refs_for_each_fullref_in(get_main_ref_store(the_repository), "",
exclude_patterns, show_ref_cb, &seen);
- for_each_alternate_ref(show_one_alternate_ref, &seen);
+ odb_for_each_alternate_ref(the_repository->objects,
+ show_one_alternate_ref, &seen);
oidset_clear(&seen);
strvec_clear(&excludes_vector);
@@ -1508,8 +1509,8 @@ static const char *update(struct command *cmd, struct shallow_info *si)
}
if (!is_null_oid(new_oid) &&
- !has_object(the_repository, new_oid,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR)) {
+ !odb_has_object(the_repository->objects, new_oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR)) {
error("unpack should have generated %s, "
"but I can't find it!", oid_to_hex(new_oid));
ret = "bad pack";
@@ -1846,36 +1847,102 @@ static void BUG_if_skipped_connectivity_check(struct command *commands,
BUG_if_bug("connectivity check skipped???");
}
+static void ref_transaction_rejection_handler(const char *refname,
+ const struct object_id *old_oid UNUSED,
+ const struct object_id *new_oid UNUSED,
+ const char *old_target UNUSED,
+ const char *new_target UNUSED,
+ enum ref_transaction_error err,
+ void *cb_data)
+{
+ struct strmap *failed_refs = cb_data;
+
+ strmap_put(failed_refs, refname, (char *)ref_transaction_error_msg(err));
+}
+
static void execute_commands_non_atomic(struct command *commands,
struct shallow_info *si)
{
struct command *cmd;
struct strbuf err = STRBUF_INIT;
+ const char *reported_error = NULL;
+ struct strmap failed_refs = STRMAP_INIT;
- for (cmd = commands; cmd; cmd = cmd->next) {
- if (!should_process_cmd(cmd) || cmd->run_proc_receive)
- continue;
+ /*
+ * Reference updates, where D/F conflicts shouldn't arise due to
+ * one reference being deleted, while the other being created
+ * are treated as conflicts in batched updates. This is because
+ * we don't do conflict resolution inside a transaction. To
+ * mitigate this, delete references in a separate batch.
+ *
+ * NEEDSWORK: Add conflict resolution between deletion and creation
+ * of reference updates within a transaction. With that, we can
+ * combine the two phases.
+ */
+ enum processing_phase {
+ PHASE_DELETIONS,
+ PHASE_OTHERS
+ };
- transaction = ref_store_transaction_begin(get_main_ref_store(the_repository),
- 0, &err);
- if (!transaction) {
- rp_error("%s", err.buf);
- strbuf_reset(&err);
- cmd->error_string = "transaction failed to start";
- continue;
+ for (enum processing_phase phase = PHASE_DELETIONS; phase <= PHASE_OTHERS; phase++) {
+ for (cmd = commands; cmd; cmd = cmd->next) {
+ if (!should_process_cmd(cmd) || cmd->run_proc_receive)
+ continue;
+
+ if (phase == PHASE_DELETIONS && !is_null_oid(&cmd->new_oid))
+ continue;
+ else if (phase == PHASE_OTHERS && is_null_oid(&cmd->new_oid))
+ continue;
+
+ /*
+ * Lazily create a transaction only when we know there are
+ * updates to be added.
+ */
+ if (!transaction) {
+ transaction = ref_store_transaction_begin(get_main_ref_store(the_repository),
+ REF_TRANSACTION_ALLOW_FAILURE, &err);
+ if (!transaction) {
+ rp_error("%s", err.buf);
+ strbuf_reset(&err);
+ reported_error = "transaction failed to start";
+ goto failure;
+ }
+ }
+
+ cmd->error_string = update(cmd, si);
}
- cmd->error_string = update(cmd, si);
+ /* No transaction, so nothing to commit */
+ if (!transaction)
+ goto cleanup;
- if (!cmd->error_string
- && ref_transaction_commit(transaction, &err)) {
+ if (ref_transaction_commit(transaction, &err)) {
rp_error("%s", err.buf);
- strbuf_reset(&err);
- cmd->error_string = "failed to update ref";
+ reported_error = "failed to update refs";
+ goto failure;
}
+
+ ref_transaction_for_each_rejected_update(transaction,
+ ref_transaction_rejection_handler,
+ &failed_refs);
+
+ if (strmap_empty(&failed_refs))
+ goto cleanup;
+
+ failure:
+ for (cmd = commands; cmd; cmd = cmd->next) {
+ if (reported_error)
+ cmd->error_string = reported_error;
+ else if (strmap_contains(&failed_refs, cmd->ref_name))
+ cmd->error_string = strmap_get(&failed_refs, cmd->ref_name);
+ }
+
+ cleanup:
ref_transaction_free(transaction);
+ transaction = NULL;
+ strmap_clear(&failed_refs, 0);
+ strbuf_release(&err);
}
- strbuf_release(&err);
}
static void execute_commands_atomic(struct command *commands,
@@ -2136,7 +2203,7 @@ static struct command *read_head_info(struct packet_reader *reader,
use_push_options = 1;
hash = parse_feature_value(feature_list, "object-format", &len, NULL);
if (!hash) {
- hash = hash_algos[GIT_HASH_SHA1].name;
+ hash = hash_algos[GIT_HASH_SHA1_LEGACY].name;
len = strlen(hash);
}
if (xstrncmpz(the_hash_algo->name, hash, len))
diff --git a/builtin/reflog.c b/builtin/reflog.c
index 3acaf3e32c..d4da41aaea 100644
--- a/builtin/reflog.c
+++ b/builtin/reflog.c
@@ -283,6 +283,9 @@ static int cmd_reflog_expire(int argc, const char **argv, const char *prefix,
&cb);
free(ref);
}
+
+ reflog_clear_expire_config(&opts);
+
return status;
}
diff --git a/builtin/remote.c b/builtin/remote.c
index 0d6755bcb7..5dd6cbbaee 100644
--- a/builtin/remote.c
+++ b/builtin/remote.c
@@ -14,7 +14,7 @@
#include "rebase.h"
#include "refs.h"
#include "refspec.h"
-#include "object-store.h"
+#include "odb.h"
#include "strvec.h"
#include "commit-reach.h"
#include "progress.h"
@@ -157,6 +157,21 @@ static int parse_mirror_opt(const struct option *opt, const char *arg, int not)
return 0;
}
+static int check_remote_collision(struct remote *remote, void *data)
+{
+ const char *name = data;
+ const char *p;
+
+ if (skip_prefix(name, remote->name, &p) && *p == '/')
+ die(_("remote name '%s' is a subset of existing remote '%s'"),
+ name, remote->name);
+ if (skip_prefix(remote->name, name, &p) && *p == '/')
+ die(_("remote name '%s' is a superset of existing remote '%s'"),
+ name, remote->name);
+
+ return 0;
+}
+
static int add(int argc, const char **argv, const char *prefix,
struct repository *repo UNUSED)
{
@@ -208,6 +223,8 @@ static int add(int argc, const char **argv, const char *prefix,
if (!valid_remote_name(name))
die(_("'%s' is not a valid remote name"), name);
+ for_each_remote(check_remote_collision, (void *)name);
+
strbuf_addf(&buf, "remote.%s.url", name);
git_config_set(buf.buf, url);
@@ -454,8 +471,8 @@ static int get_push_ref_states(const struct ref *remote_refs,
info->status = PUSH_STATUS_UPTODATE;
else if (is_null_oid(&ref->old_oid))
info->status = PUSH_STATUS_CREATE;
- else if (has_object(the_repository, &ref->old_oid,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR) &&
+ else if (odb_has_object(the_repository->objects, &ref->old_oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR) &&
ref_newer(&ref->new_oid, &ref->old_oid))
info->status = PUSH_STATUS_FASTFORWARD;
else
@@ -1521,9 +1538,6 @@ static int prune_remote(const char *remote, int dry_run)
struct ref_states states = REF_STATES_INIT;
struct string_list refs_to_prune = STRING_LIST_INIT_NODUP;
struct string_list_item *item;
- const char *dangling_msg = dry_run
- ? _(" %s will become dangling!")
- : _(" %s has become dangling!");
get_remote_ref_states(remote, &states, GET_REF_STATES);
@@ -1555,7 +1569,7 @@ static int prune_remote(const char *remote, int dry_run)
}
refs_warn_dangling_symrefs(get_main_ref_store(the_repository),
- stdout, dangling_msg, &refs_to_prune);
+ stdout, " ", dry_run, &refs_to_prune);
string_list_clear(&refs_to_prune, 0);
free_remote_ref_states(&states);
diff --git a/builtin/repack.c b/builtin/repack.c
index 59214dbdfd..75158d7776 100644
--- a/builtin/repack.c
+++ b/builtin/repack.c
@@ -17,7 +17,7 @@
#include "midx.h"
#include "packfile.h"
#include "prune-packed.h"
-#include "object-store.h"
+#include "odb.h"
#include "promisor-remote.h"
#include "shallow.h"
#include "pack.h"
@@ -39,11 +39,12 @@ static int write_bitmaps = -1;
static int use_delta_islands;
static int run_update_server_info = 1;
static char *packdir, *packtmp_name, *packtmp;
+static int midx_must_contain_cruft = 1;
static const char *const git_repack_usage[] = {
N_("git repack [-a] [-A] [-d] [-f] [-F] [-l] [-n] [-q] [-b] [-m]\n"
"[--window=<n>] [--depth=<n>] [--threads=<n>] [--keep-pack=<pack-name>]\n"
- "[--write-midx] [--name-hash-version=<n>]"),
+ "[--write-midx] [--name-hash-version=<n>] [--path-walk]"),
NULL
};
@@ -63,6 +64,7 @@ struct pack_objects_args {
int quiet;
int local;
int name_hash_version;
+ int path_walk;
struct list_objects_filter_options filter_options;
};
@@ -107,6 +109,10 @@ static int repack_config(const char *var, const char *value,
free(cruft_po_args->threads);
return git_config_string(&cruft_po_args->threads, var, value);
}
+ if (!strcmp(var, "repack.midxmustcontaincruft")) {
+ midx_must_contain_cruft = git_config_bool(var, value);
+ return 0;
+ }
return git_default_config(var, value, ctx, cb);
}
@@ -313,6 +319,8 @@ static void prepare_pack_objects(struct child_process *cmd,
strvec_pushf(&cmd->args, "--no-reuse-object");
if (args->name_hash_version)
strvec_pushf(&cmd->args, "--name-hash-version=%d", args->name_hash_version);
+ if (args->path_walk)
+ strvec_pushf(&cmd->args, "--path-walk");
if (args->local)
strvec_push(&cmd->args, "--local");
if (args->quiet)
@@ -687,6 +695,77 @@ static void free_pack_geometry(struct pack_geometry *geometry)
free(geometry->pack);
}
+static int midx_has_unknown_packs(char **midx_pack_names,
+ size_t midx_pack_names_nr,
+ struct string_list *include,
+ struct pack_geometry *geometry,
+ struct existing_packs *existing)
+{
+ size_t i;
+
+ string_list_sort(include);
+
+ for (i = 0; i < midx_pack_names_nr; i++) {
+ const char *pack_name = midx_pack_names[i];
+
+ /*
+ * Determine whether or not each MIDX'd pack from the existing
+ * MIDX (if any) is represented in the new MIDX. For each pack
+ * in the MIDX, it must either be:
+ *
+ * - In the "include" list of packs to be included in the new
+ * MIDX. Note this function is called before the include
+ * list is populated with any cruft pack(s).
+ *
+ * - Below the geometric split line (if using pack geometry),
+ * indicating that the pack won't be included in the new
+ * MIDX, but its contents were rolled up as part of the
+ * geometric repack.
+ *
+ * - In the existing non-kept packs list (if not using pack
+ * geometry), and marked as non-deleted.
+ */
+ if (string_list_has_string(include, pack_name)) {
+ continue;
+ } else if (geometry) {
+ struct strbuf buf = STRBUF_INIT;
+ uint32_t j;
+
+ for (j = 0; j < geometry->split; j++) {
+ strbuf_reset(&buf);
+ strbuf_addstr(&buf, pack_basename(geometry->pack[j]));
+ strbuf_strip_suffix(&buf, ".pack");
+ strbuf_addstr(&buf, ".idx");
+
+ if (!strcmp(pack_name, buf.buf)) {
+ strbuf_release(&buf);
+ break;
+ }
+ }
+
+ strbuf_release(&buf);
+
+ if (j < geometry->split)
+ continue;
+ } else {
+ struct string_list_item *item;
+
+ item = string_list_lookup(&existing->non_kept_packs,
+ pack_name);
+ if (item && !pack_is_marked_for_deletion(item))
+ continue;
+ }
+
+ /*
+ * If we got to this point, the MIDX includes some pack that we
+ * don't know about.
+ */
+ return 1;
+ }
+
+ return 0;
+}
+
struct midx_snapshot_ref_data {
struct tempfile *f;
struct oidset seen;
@@ -707,7 +786,7 @@ static int midx_snapshot_ref_one(const char *refname UNUSED,
if (oidset_insert(&data->seen, oid))
return 0; /* already seen */
- if (oid_object_info(the_repository, oid, NULL) != OBJ_COMMIT)
+ if (odb_read_object_info(the_repository->objects, oid, NULL) != OBJ_COMMIT)
return 0;
fprintf(data->f->fp, "%s%s\n", data->preferred ? "+" : "",
@@ -755,6 +834,8 @@ static void midx_snapshot_refs(struct tempfile *f)
static void midx_included_packs(struct string_list *include,
struct existing_packs *existing,
+ char **midx_pack_names,
+ size_t midx_pack_names_nr,
struct string_list *names,
struct pack_geometry *geometry)
{
@@ -808,26 +889,56 @@ static void midx_included_packs(struct string_list *include,
}
}
- for_each_string_list_item(item, &existing->cruft_packs) {
+ if (midx_must_contain_cruft ||
+ midx_has_unknown_packs(midx_pack_names, midx_pack_names_nr,
+ include, geometry, existing)) {
/*
- * When doing a --geometric repack, there is no need to check
- * for deleted packs, since we're by definition not doing an
- * ALL_INTO_ONE repack (hence no packs will be deleted).
- * Otherwise we must check for and exclude any packs which are
- * enqueued for deletion.
+ * If there are one or more unknown pack(s) present (see
+ * midx_has_unknown_packs() for what makes a pack
+ * "unknown") in the MIDX before the repack, keep them
+ * as they may be required to form a reachability
+ * closure if the MIDX is bitmapped.
*
- * So we could omit the conditional below in the --geometric
- * case, but doing so is unnecessary since no packs are marked
- * as pending deletion (since we only call
- * `mark_packs_for_deletion()` when doing an all-into-one
- * repack).
+ * For example, a cruft pack can be required to form a
+ * reachability closure if the MIDX is bitmapped and one
+ * or more of the bitmap's selected commits reaches a
+ * once-cruft object that was later made reachable.
*/
- if (pack_is_marked_for_deletion(item))
- continue;
+ for_each_string_list_item(item, &existing->cruft_packs) {
+ /*
+ * When doing a --geometric repack, there is no
+ * need to check for deleted packs, since we're
+ * by definition not doing an ALL_INTO_ONE
+ * repack (hence no packs will be deleted).
+ * Otherwise we must check for and exclude any
+ * packs which are enqueued for deletion.
+ *
+ * So we could omit the conditional below in the
+ * --geometric case, but doing so is unnecessary
+ * since no packs are marked as pending
+ * deletion (since we only call
+ * `mark_packs_for_deletion()` when doing an
+ * all-into-one repack).
+ */
+ if (pack_is_marked_for_deletion(item))
+ continue;
- strbuf_reset(&buf);
- strbuf_addf(&buf, "%s.idx", item->string);
- string_list_insert(include, buf.buf);
+ strbuf_reset(&buf);
+ strbuf_addf(&buf, "%s.idx", item->string);
+ string_list_insert(include, buf.buf);
+ }
+ } else {
+ /*
+ * Modern versions of Git (with the appropriate
+ * configuration setting) will write new copies of
+ * once-cruft objects when doing a --geometric repack.
+ *
+ * If the MIDX has no cruft pack, new packs written
+ * during a --geometric repack will not rely on the
+ * cruft pack to form a reachability closure, so we can
+ * avoid including them in the MIDX in that case.
+ */
+ ;
}
strbuf_release(&buf);
@@ -1142,6 +1253,8 @@ int cmd_repack(int argc,
struct tempfile *refs_snapshot = NULL;
int i, ext, ret;
int show_progress;
+ char **midx_pack_names = NULL;
+ size_t midx_pack_names_nr = 0;
/* variables to be filled by option parsing */
int delete_redundant = 0;
@@ -1184,6 +1297,8 @@ int cmd_repack(int argc,
N_("pass --no-reuse-object to git-pack-objects")),
OPT_INTEGER(0, "name-hash-version", &po_args.name_hash_version,
N_("specify the name hash version to use for grouping similar objects by path")),
+ OPT_BOOL(0, "path-walk", &po_args.path_walk,
+ N_("pass --path-walk to git-pack-objects")),
OPT_NEGBIT('n', NULL, &run_update_server_info,
N_("do not run git-update-server-info"), 1),
OPT__QUIET(&po_args.quiet, N_("be quiet")),
@@ -1235,7 +1350,7 @@ int cmd_repack(int argc,
po_args.depth = xstrdup_or_null(opt_depth);
po_args.threads = xstrdup_or_null(opt_threads);
- if (delete_redundant && repository_format_precious_objects)
+ if (delete_redundant && the_repository->repository_format_precious_objects)
die(_("cannot delete packs in a precious-objects repo"));
die_for_incompatible_opt3(unpack_unreachable || (pack_everything & LOOSEN_UNREACHABLE), "-A",
@@ -1256,7 +1371,8 @@ int cmd_repack(int argc,
if (write_bitmaps && !(pack_everything & ALL_INTO_ONE) && !write_midx)
die(_(incremental_bitmap_conflict_error));
- if (write_bitmaps && po_args.local && has_alt_odb(the_repository)) {
+ if (write_bitmaps && po_args.local &&
+ odb_has_alternates(the_repository->objects)) {
/*
* When asked to do a local repack, but we have
* packfiles that are inherited from an alternate, then
@@ -1356,7 +1472,10 @@ int cmd_repack(int argc,
!(pack_everything & PACK_CRUFT))
strvec_push(&cmd.args, "--pack-loose-unreachable");
} else if (geometry.split_factor) {
- strvec_push(&cmd.args, "--stdin-packs");
+ if (midx_must_contain_cruft)
+ strvec_push(&cmd.args, "--stdin-packs");
+ else
+ strvec_push(&cmd.args, "--stdin-packs=follow");
strvec_push(&cmd.args, "--unpacked");
} else {
strvec_push(&cmd.args, "--unpacked");
@@ -1396,8 +1515,25 @@ int cmd_repack(int argc,
if (ret)
goto cleanup;
- if (!names.nr && !po_args.quiet)
- printf_ln(_("Nothing new to pack."));
+ if (!names.nr) {
+ if (!po_args.quiet)
+ printf_ln(_("Nothing new to pack."));
+ /*
+ * If we didn't write any new packs, the non-cruft packs
+ * may refer to once-unreachable objects in the cruft
+ * pack(s).
+ *
+ * If there isn't already a MIDX, the one we write
+ * must include the cruft pack(s), in case the
+ * non-cruft pack(s) refer to once-cruft objects.
+ *
+ * If there is already a MIDX, we can punt here, since
+ * midx_has_unknown_packs() will make the decision for
+ * us.
+ */
+ if (!get_local_multi_pack_index(the_repository))
+ midx_must_contain_cruft = 1;
+ }
if (pack_everything & PACK_CRUFT) {
const char *pack_prefix = find_pack_prefix(packdir, packtmp);
@@ -1478,6 +1614,19 @@ int cmd_repack(int argc,
string_list_sort(&names);
+ if (get_local_multi_pack_index(the_repository)) {
+ struct multi_pack_index *m =
+ get_local_multi_pack_index(the_repository);
+
+ ALLOC_ARRAY(midx_pack_names,
+ m->num_packs + m->num_packs_in_base);
+
+ for (; m; m = m->base_midx)
+ for (uint32_t i = 0; i < m->num_packs; i++)
+ midx_pack_names[midx_pack_names_nr++] =
+ xstrdup(m->pack_names[i]);
+ }
+
close_object_store(the_repository->objects);
/*
@@ -1519,7 +1668,8 @@ int cmd_repack(int argc,
if (write_midx) {
struct string_list include = STRING_LIST_INIT_DUP;
- midx_included_packs(&include, &existing, &names, &geometry);
+ midx_included_packs(&include, &existing, midx_pack_names,
+ midx_pack_names_nr, &names, &geometry);
ret = write_midx_included_packs(&include, &geometry, &names,
refs_snapshot ? get_tempfile_path(refs_snapshot) : NULL,
@@ -1570,6 +1720,9 @@ cleanup:
string_list_clear(&names, 1);
existing_packs_release(&existing);
free_pack_geometry(&geometry);
+ for (size_t i = 0; i < midx_pack_names_nr; i++)
+ free(midx_pack_names[i]);
+ free(midx_pack_names);
pack_objects_args_release(&po_args);
pack_objects_args_release(&cruft_po_args);
diff --git a/builtin/replace.c b/builtin/replace.c
index 48c7c6a2d5..5ff2ab723c 100644
--- a/builtin/replace.c
+++ b/builtin/replace.c
@@ -19,7 +19,7 @@
#include "run-command.h"
#include "object-file.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "replace-object.h"
#include "tag.h"
#include "wildmatch.h"
@@ -65,8 +65,8 @@ static int show_reference(const char *refname,
if (repo_get_oid(data->repo, refname, &object))
return error(_("failed to resolve '%s' as a valid ref"), refname);
- obj_type = oid_object_info(data->repo, &object, NULL);
- repl_type = oid_object_info(data->repo, oid, NULL);
+ obj_type = odb_read_object_info(data->repo->objects, &object, NULL);
+ repl_type = odb_read_object_info(data->repo->objects, oid, NULL);
printf("%s (%s) -> %s (%s)\n", refname, type_name(obj_type),
oid_to_hex(oid), type_name(repl_type));
@@ -185,8 +185,8 @@ static int replace_object_oid(const char *object_ref,
struct strbuf err = STRBUF_INIT;
int res = 0;
- obj_type = oid_object_info(the_repository, object, NULL);
- repl_type = oid_object_info(the_repository, repl, NULL);
+ obj_type = odb_read_object_info(the_repository->objects, object, NULL);
+ repl_type = odb_read_object_info(the_repository->objects, repl, NULL);
if (!force && obj_type != repl_type)
return error(_("Objects must be of the same type.\n"
"'%s' points to a replaced object of type '%s'\n"
@@ -334,7 +334,7 @@ static int edit_and_replace(const char *object_ref, int force, int raw)
if (repo_get_oid(the_repository, object_ref, &old_oid) < 0)
return error(_("not a valid object name: '%s'"), object_ref);
- type = oid_object_info(the_repository, &old_oid, NULL);
+ type = odb_read_object_info(the_repository->objects, &old_oid, NULL);
if (type < 0)
return error(_("unable to get object type for %s"),
oid_to_hex(&old_oid));
diff --git a/builtin/replay.c b/builtin/replay.c
index 225cef0880..6172c8aacc 100644
--- a/builtin/replay.c
+++ b/builtin/replay.c
@@ -84,6 +84,7 @@ static struct commit *create_commit(struct repository *repo,
obj = parse_object(repo, &ret);
out:
+ repo_unuse_commit_buffer(the_repository, based_on, message);
free_commit_extra_headers(extra);
free_commit_list(parents);
strbuf_release(&msg);
diff --git a/builtin/rev-list.c b/builtin/rev-list.c
index 0984b607bf..97a3af36ec 100644
--- a/builtin/rev-list.c
+++ b/builtin/rev-list.c
@@ -14,7 +14,7 @@
#include "object.h"
#include "object-name.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "pack-bitmap.h"
#include "parse-options.h"
#include "log-tree.h"
@@ -28,6 +28,14 @@
#include "quote.h"
#include "strbuf.h"
+struct rev_list_info {
+ struct rev_info *revs;
+ int flags;
+ int show_timestamp;
+ int hdr_termination;
+ const char *header_prefix;
+};
+
static const char rev_list_usage[] =
"git rev-list [<options>] <commit>... [--] [<path>...]\n"
"\n"
@@ -110,7 +118,8 @@ static off_t get_object_disk_usage(struct object *obj)
off_t size;
struct object_info oi = OBJECT_INFO_INIT;
oi.disk_sizep = &size;
- if (oid_object_info_extended(the_repository, &obj->oid, &oi, 0) < 0)
+ if (odb_read_object_info_extended(the_repository->objects,
+ &obj->oid, &oi, 0) < 0)
die(_("unable to get disk usage of %s"), oid_to_hex(&obj->oid));
return size;
}
@@ -346,7 +355,8 @@ static void show_commit(struct commit *commit, void *data)
static int finish_object(struct object *obj, const char *name, void *cb_data)
{
struct rev_list_info *info = cb_data;
- if (oid_object_info_extended(the_repository, &obj->oid, NULL, 0) < 0) {
+ if (odb_read_object_info_extended(the_repository->objects,
+ &obj->oid, NULL, 0) < 0) {
finish_object__ma(obj, name);
return 1;
}
@@ -650,17 +660,21 @@ int cmd_rev_list(int argc,
*
* Let "--missing" to conditionally set fetch_if_missing.
*/
+
/*
- * NEEDSWORK: These loops that attempt to find presence of
- * options without understanding that the options they are
- * skipping are broken (e.g., it would not know "--grep
+ * NEEDSWORK: The next loop is utterly broken. It tries to
+ * notice an option is used, but without understanding if each
+ * option takes an argument, which fundamentally would not
+ * work. It would not know "--grep
* --exclude-promisor-objects" is not triggering
- * "--exclude-promisor-objects" option). We really need
- * setup_revisions() to have a mechanism to allow and disallow
- * some sets of options for different commands (like rev-list,
- * replay, etc). Such a mechanism should do an early parsing
- * of options and be able to manage the `--missing=...` and
- * `--exclude-promisor-objects` options below.
+ * "--exclude-promisor-objects" option, for example.
+ *
+ * We really need setup_revisions() to have a mechanism to
+ * allow and disallow some sets of options for different
+ * commands (like rev-list, replay, etc). Such a mechanism
+ * should do an early parsing of options and be able to manage
+ * the `--missing=...` and `--exclude-promisor-objects`
+ * options below.
*/
for (i = 1; i < argc; i++) {
const char *arg = argv[i];
diff --git a/builtin/send-pack.c b/builtin/send-pack.c
index c6e0e9d051..28b69d26b4 100644
--- a/builtin/send-pack.c
+++ b/builtin/send-pack.c
@@ -304,9 +304,10 @@ int cmd_send_pack(int argc,
flags |= MATCH_REFS_MIRROR;
/* match them up */
- if (match_push_refs(local_refs, &remote_refs, &rs, flags))
- return -1;
-
+ if (match_push_refs(local_refs, &remote_refs, &rs, flags)) {
+ ret = -1;
+ goto cleanup;
+ }
if (!is_empty_cas(&cas))
apply_push_cas(&cas, remote, remote_refs);
@@ -339,10 +340,12 @@ int cmd_send_pack(int argc,
/* stable plumbing output; do not modify or localize */
fprintf(stderr, "Everything up-to-date\n");
+cleanup:
string_list_clear(&push_options, 0);
free_refs(remote_refs);
free_refs(local_refs);
refspec_clear(&rs);
+ oid_array_clear(&extra_have);
oid_array_clear(&shallow);
clear_cas_option(&cas);
return ret;
diff --git a/builtin/shortlog.c b/builtin/shortlog.c
index 30075b67be..60adc5e7a5 100644
--- a/builtin/shortlog.c
+++ b/builtin/shortlog.c
@@ -187,7 +187,7 @@ static void insert_records_from_trailers(struct shortlog *log,
ctx->output_encoding);
body = strstr(commit_buffer, "\n\n");
if (!body)
- return;
+ goto out;
trailer_iterator_init(&iter, body);
while (trailer_iterator_advance(&iter)) {
@@ -206,6 +206,7 @@ static void insert_records_from_trailers(struct shortlog *log,
}
trailer_iterator_release(&iter);
+out:
strbuf_release(&ident);
repo_unuse_commit_buffer(the_repository, commit, commit_buffer);
}
@@ -418,7 +419,7 @@ int cmd_shortlog(int argc,
* git/nongit so that we do not have to do this.
*/
if (nongit && !the_hash_algo)
- repo_set_hash_algo(the_repository, GIT_HASH_SHA1);
+ repo_set_hash_algo(the_repository, GIT_HASH_DEFAULT);
git_config(git_default_config, NULL);
shortlog_init(&log);
diff --git a/builtin/show-index.c b/builtin/show-index.c
index 9d4ecf5e7b..2c3e2940ce 100644
--- a/builtin/show-index.c
+++ b/builtin/show-index.c
@@ -47,7 +47,7 @@ int cmd_show_index(int argc,
* the index file passed in and use that instead.
*/
if (!the_hash_algo)
- repo_set_hash_algo(the_repository, GIT_HASH_SHA1);
+ repo_set_hash_algo(the_repository, GIT_HASH_DEFAULT);
hashsz = the_hash_algo->rawsz;
diff --git a/builtin/show-ref.c b/builtin/show-ref.c
index 623a52a45f..117709cb07 100644
--- a/builtin/show-ref.c
+++ b/builtin/show-ref.c
@@ -5,7 +5,7 @@
#include "hex.h"
#include "refs/refs-internal.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "object.h"
#include "string-list.h"
#include "parse-options.h"
@@ -35,8 +35,8 @@ static void show_one(const struct show_one_options *opts,
const char *hex;
struct object_id peeled;
- if (!has_object(the_repository, oid,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
+ if (!odb_has_object(the_repository->objects, oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
die("git show-ref: bad ref %s (%s)", refname,
oid_to_hex(oid));
diff --git a/builtin/stash.c b/builtin/stash.c
index cfbd92852a..e2f95cc2eb 100644
--- a/builtin/stash.c
+++ b/builtin/stash.c
@@ -28,7 +28,10 @@
#include "log-tree.h"
#include "diffcore.h"
#include "reflog.h"
+#include "reflog-walk.h"
#include "add-interactive.h"
+#include "oid-array.h"
+#include "commit.h"
#define INCLUDE_ALL_FILES 2
@@ -56,6 +59,10 @@
" [-u | --include-untracked] [-a | --all] [<message>]")
#define BUILTIN_STASH_CREATE_USAGE \
N_("git stash create [<message>]")
+#define BUILTIN_STASH_EXPORT_USAGE \
+ N_("git stash export (--print | --to-ref <ref>) [<stash>...]")
+#define BUILTIN_STASH_IMPORT_USAGE \
+ N_("git stash import <commit>")
#define BUILTIN_STASH_CLEAR_USAGE \
"git stash clear"
@@ -71,6 +78,8 @@ static const char * const git_stash_usage[] = {
BUILTIN_STASH_CLEAR_USAGE,
BUILTIN_STASH_CREATE_USAGE,
BUILTIN_STASH_STORE_USAGE,
+ BUILTIN_STASH_EXPORT_USAGE,
+ BUILTIN_STASH_IMPORT_USAGE,
NULL
};
@@ -124,6 +133,16 @@ static const char * const git_stash_save_usage[] = {
NULL
};
+static const char * const git_stash_export_usage[] = {
+ BUILTIN_STASH_EXPORT_USAGE,
+ NULL
+};
+
+static const char * const git_stash_import_usage[] = {
+ BUILTIN_STASH_IMPORT_USAGE,
+ NULL
+};
+
static const char ref_stash[] = "refs/stash";
static struct strbuf stash_index_path = STRBUF_INIT;
@@ -132,6 +151,7 @@ static struct strbuf stash_index_path = STRBUF_INIT;
* b_commit is set to the base commit
* i_commit is set to the commit containing the index tree
* u_commit is set to the commit containing the untracked files tree
+ * c_commit is set to the first parent (chain commit) when importing and is otherwise unset
* w_tree is set to the working tree
* b_tree is set to the base tree
* i_tree is set to the index tree
@@ -142,6 +162,7 @@ struct stash_info {
struct object_id b_commit;
struct object_id i_commit;
struct object_id u_commit;
+ struct object_id c_commit;
struct object_id w_tree;
struct object_id b_tree;
struct object_id i_tree;
@@ -160,6 +181,33 @@ static void free_stash_info(struct stash_info *info)
strbuf_release(&info->revision);
}
+static int check_stash_topology(struct repository *r, struct commit *stash)
+{
+ struct commit *p1, *p2, *p3 = NULL;
+
+ /* stash must have two or three parents */
+ if (!stash->parents || !stash->parents->next ||
+ (stash->parents->next->next && stash->parents->next->next->next))
+ return -1;
+ p1 = stash->parents->item;
+ p2 = stash->parents->next->item;
+ if (stash->parents->next->next)
+ p3 = stash->parents->next->next->item;
+ if (repo_parse_commit(r, p1) || repo_parse_commit(r, p2) ||
+ (p3 && repo_parse_commit(r, p3)))
+ return -1;
+ /* p2 must have a single parent, p3 must have no parents */
+ if (!p2->parents || p2->parents->next || (p3 && p3->parents))
+ return -1;
+ if (repo_parse_commit(r, p2->parents->item))
+ return -1;
+ /* p2^1 must equal p1 */
+ if (!oideq(&p1->object.oid, &p2->parents->item->object.oid))
+ return -1;
+
+ return 0;
+}
+
static void assert_stash_like(struct stash_info *info, const char *revision)
{
if (get_oidf(&info->b_commit, "%s^1", revision) ||
@@ -169,6 +217,25 @@ static void assert_stash_like(struct stash_info *info, const char *revision)
die(_("'%s' is not a stash-like commit"), revision);
}
+static int parse_stash_revision(struct strbuf *revision, const char *commit, int quiet)
+{
+ strbuf_reset(revision);
+ if (!commit) {
+ if (!refs_ref_exists(get_main_ref_store(the_repository), ref_stash)) {
+ if (!quiet)
+ fprintf_ln(stderr, _("No stash entries found."));
+ return -1;
+ }
+
+ strbuf_addf(revision, "%s@{0}", ref_stash);
+ } else if (strspn(commit, "0123456789") == strlen(commit)) {
+ strbuf_addf(revision, "%s@{%s}", ref_stash, commit);
+ } else {
+ strbuf_addstr(revision, commit);
+ }
+ return 0;
+}
+
static int get_stash_info(struct stash_info *info, int argc, const char **argv)
{
int ret;
@@ -196,17 +263,9 @@ static int get_stash_info(struct stash_info *info, int argc, const char **argv)
if (argc == 1)
commit = argv[0];
- if (!commit) {
- if (!refs_ref_exists(get_main_ref_store(the_repository), ref_stash)) {
- fprintf_ln(stderr, _("No stash entries found."));
- return -1;
- }
-
- strbuf_addf(&info->revision, "%s@{0}", ref_stash);
- } else if (strspn(commit, "0123456789") == strlen(commit)) {
- strbuf_addf(&info->revision, "%s@{%s}", ref_stash, commit);
- } else {
- strbuf_addstr(&info->revision, commit);
+ strbuf_init(&info->revision, 0);
+ if (parse_stash_revision(&info->revision, commit, 0)) {
+ return -1;
}
revision = info->revision.buf;
@@ -1372,6 +1431,7 @@ static int do_create_stash(const struct pathspec *ps, struct strbuf *stash_msg_b
const char *head_short_sha1 = NULL;
const char *branch_ref = NULL;
const char *branch_name = "(no branch)";
+ char *branch_name_buf = NULL;
struct commit *head_commit = NULL;
struct commit_list *parents = NULL;
struct strbuf msg = STRBUF_INIT;
@@ -1404,8 +1464,12 @@ static int do_create_stash(const struct pathspec *ps, struct strbuf *stash_msg_b
branch_ref = refs_resolve_ref_unsafe(get_main_ref_store(the_repository),
"HEAD", 0, NULL, &flags);
- if (flags & REF_ISSYMREF)
- skip_prefix(branch_ref, "refs/heads/", &branch_name);
+
+ if (flags & REF_ISSYMREF) {
+ if (skip_prefix(branch_ref, "refs/heads/", &branch_name))
+ branch_name = branch_name_buf = xstrdup(branch_name);
+ }
+
head_short_sha1 = repo_find_unique_abbrev(the_repository,
&head_commit->object.oid,
DEFAULT_ABBREV);
@@ -1495,6 +1559,7 @@ done:
strbuf_release(&msg);
strbuf_release(&untracked_files);
free_commit_list(parents);
+ free(branch_name_buf);
return ret;
}
@@ -1789,11 +1854,15 @@ static int push_stash(int argc, const char **argv, const char *prefix,
int ret;
if (argc) {
- force_assume = !strcmp(argv[0], "-p");
+ int flags = PARSE_OPT_KEEP_DASHDASH;
+
+ if (push_assumed)
+ flags |= PARSE_OPT_STOP_AT_NON_OPTION;
+
argc = parse_options(argc, argv, prefix, options,
push_assumed ? git_stash_usage :
- git_stash_push_usage,
- PARSE_OPT_KEEP_DASHDASH);
+ git_stash_push_usage, flags);
+ force_assume |= patch_mode;
}
if (argc) {
@@ -1884,6 +1953,383 @@ static int save_stash(int argc, const char **argv, const char *prefix,
return ret;
}
+static int write_commit_with_parents(struct repository *r,
+ struct object_id *out,
+ const struct object_id *oid,
+ struct commit_list *parents)
+{
+ size_t author_len, committer_len;
+ struct commit *this;
+ const char *orig_author, *orig_committer;
+ char *author = NULL, *committer = NULL;
+ const char *buffer;
+ unsigned long bufsize;
+ const char *p;
+ struct strbuf msg = STRBUF_INIT;
+ int ret = 0;
+ struct ident_split id;
+
+ this = lookup_commit_reference(r, oid);
+ buffer = repo_get_commit_buffer(r, this, &bufsize);
+ orig_author = find_commit_header(buffer, "author", &author_len);
+ orig_committer = find_commit_header(buffer, "committer", &committer_len);
+
+ if (!orig_author || !orig_committer) {
+ ret = error(_("cannot parse commit %s"), oid_to_hex(oid));
+ goto out;
+ }
+
+ if (split_ident_line(&id, orig_author, author_len) < 0 ||
+ split_ident_line(&id, orig_committer, committer_len) < 0) {
+ ret = error(_("invalid author or committer for %s"), oid_to_hex(oid));
+ goto out;
+ }
+
+ p = strstr(buffer, "\n\n");
+ strbuf_addstr(&msg, "git stash: ");
+
+ if (p)
+ strbuf_add(&msg, p + 2, bufsize - (p + 2 - buffer));
+ strbuf_complete_line(&msg);
+
+ author = xmemdupz(orig_author, author_len);
+ committer = xmemdupz(orig_committer, committer_len);
+
+ if (commit_tree_extended(msg.buf, msg.len,
+ r->hash_algo->empty_tree, parents,
+ out, author, committer,
+ NULL, NULL)) {
+ ret = error(_("could not write commit"));
+ goto out;
+ }
+out:
+ strbuf_release(&msg);
+ repo_unuse_commit_buffer(r, this, buffer);
+ free(author);
+ free(committer);
+ return ret;
+}
+
+static int do_import_stash(struct repository *r, const char *rev)
+{
+ struct object_id chain;
+ int res = 0;
+ const char *buffer = NULL;
+ unsigned long bufsize;
+ struct commit *this = NULL;
+ struct commit_list *items = NULL, *cur;
+ char *msg = NULL;
+
+ if (repo_get_oid(r, rev, &chain))
+ return error(_("not a valid revision: %s"), rev);
+
+ this = lookup_commit_reference(r, &chain);
+ if (!this)
+ return error(_("not a commit: %s"), rev);
+
+ /*
+ * Walk the commit history, finding each stash entry, and load data into
+ * the array.
+ */
+ for (;;) {
+ const char *author, *committer;
+ size_t author_len, committer_len;
+ const char *p;
+ const char *expected = "git stash <git@stash> 1000684800 +0000";
+ const char *prefix = "git stash: ";
+ struct commit *stash;
+ struct tree *tree = repo_get_commit_tree(r, this);
+
+ if (!tree ||
+ !oideq(&tree->object.oid, r->hash_algo->empty_tree) ||
+ (this->parents &&
+ (!this->parents->next || this->parents->next->next))) {
+ res = error(_("%s is not a valid exported stash commit"),
+ oid_to_hex(&this->object.oid));
+ goto out;
+ }
+
+ buffer = repo_get_commit_buffer(r, this, &bufsize);
+
+ if (!this->parents) {
+ /*
+ * We don't have any parents. Make sure this is our
+ * root commit.
+ */
+ author = find_commit_header(buffer, "author", &author_len);
+ committer = find_commit_header(buffer, "committer", &committer_len);
+
+ if (!author || !committer) {
+ error(_("cannot parse commit %s"), oid_to_hex(&this->object.oid));
+ goto out;
+ }
+
+ if (author_len != strlen(expected) ||
+ committer_len != strlen(expected) ||
+ memcmp(author, expected, author_len) ||
+ memcmp(committer, expected, committer_len)) {
+ res = error(_("found root commit %s with invalid data"), oid_to_hex(&this->object.oid));
+ goto out;
+ }
+ break;
+ }
+
+ p = strstr(buffer, "\n\n");
+ if (!p) {
+ res = error(_("cannot parse commit %s"), oid_to_hex(&this->object.oid));
+ goto out;
+ }
+
+ p += 2;
+ if (((size_t)(bufsize - (p - buffer)) < strlen(prefix)) ||
+ memcmp(prefix, p, strlen(prefix))) {
+ res = error(_("found stash commit %s without expected prefix"), oid_to_hex(&this->object.oid));
+ goto out;
+ }
+
+ stash = this->parents->next->item;
+
+ if (repo_parse_commit(r, this->parents->item) ||
+ repo_parse_commit(r, stash)) {
+ res = error(_("cannot parse parents of commit: %s"),
+ oid_to_hex(&this->object.oid));
+ goto out;
+ }
+
+ if (check_stash_topology(r, stash)) {
+ res = error(_("%s does not look like a stash commit"),
+ oid_to_hex(&stash->object.oid));
+ goto out;
+ }
+
+ repo_unuse_commit_buffer(r, this, buffer);
+ buffer = NULL;
+ items = commit_list_insert(stash, &items);
+ this = this->parents->item;
+ }
+
+ /*
+ * Now, walk each entry, adding it to the stash as a normal stash
+ * commit.
+ */
+ for (cur = items; cur; cur = cur->next) {
+ const char *p;
+ struct object_id *oid;
+
+ this = cur->item;
+ oid = &this->object.oid;
+ buffer = repo_get_commit_buffer(r, this, &bufsize);
+ if (!buffer) {
+ res = error(_("cannot read commit buffer for %s"), oid_to_hex(oid));
+ goto out;
+ }
+
+ p = strstr(buffer, "\n\n");
+ if (!p) {
+ res = error(_("cannot parse commit %s"), oid_to_hex(oid));
+ goto out;
+ }
+
+ p += 2;
+ msg = xmemdupz(p, bufsize - (p - buffer));
+ repo_unuse_commit_buffer(r, this, buffer);
+ buffer = NULL;
+
+ if (do_store_stash(oid, msg, 1)) {
+ res = error(_("cannot save the stash for %s"), oid_to_hex(oid));
+ goto out;
+ }
+ FREE_AND_NULL(msg);
+ }
+out:
+ if (this && buffer)
+ repo_unuse_commit_buffer(r, this, buffer);
+ free_commit_list(items);
+ free(msg);
+
+ return res;
+}
+
+static int import_stash(int argc, const char **argv, const char *prefix,
+ struct repository *repo)
+{
+ struct option options[] = {
+ OPT_END()
+ };
+
+ argc = parse_options(argc, argv, prefix, options,
+ git_stash_import_usage,
+ PARSE_OPT_KEEP_DASHDASH);
+
+ if (argc != 1)
+ usage_msg_opt("a revision is required", git_stash_import_usage, options);
+
+ return do_import_stash(repo, argv[0]);
+}
+
+struct stash_entry_data {
+ struct repository *r;
+ struct commit_list **items;
+ size_t count;
+};
+
+static int collect_stash_entries(struct object_id *old_oid UNUSED,
+ struct object_id *new_oid,
+ const char *committer UNUSED,
+ timestamp_t timestamp UNUSED,
+ int tz UNUSED, const char *msg UNUSED,
+ void *cb_data)
+{
+ struct stash_entry_data *data = cb_data;
+ struct commit *stash;
+
+ data->count++;
+ stash = lookup_commit_reference(data->r, new_oid);
+ if (!stash || check_stash_topology(data->r, stash)) {
+ return error(_("%s does not look like a stash commit"),
+ oid_to_hex(new_oid));
+ }
+ data->items = commit_list_append(stash, data->items);
+ return 0;
+}
+
+static int do_export_stash(struct repository *r,
+ const char *ref,
+ int argc,
+ const char **argv)
+{
+ struct object_id base;
+ struct object_context unused;
+ struct commit *prev;
+ struct commit_list *items = NULL, **iter = &items, *cur;
+ int res = 0;
+ int i;
+ struct strbuf revision = STRBUF_INIT;
+ const char *author, *committer;
+
+ /*
+ * This is an arbitrary, fixed date, specifically the one used by git
+ * format-patch. The goal is merely to produce reproducible output.
+ */
+ prepare_fallback_ident("git stash", "git@stash");
+ author = fmt_ident("git stash", "git@stash", WANT_BLANK_IDENT,
+ "2001-09-17T00:00:00Z", 0);
+ committer = fmt_ident("git stash", "git@stash", WANT_BLANK_IDENT,
+ "2001-09-17T00:00:00Z", 0);
+
+ /* First, we create a single empty commit. */
+ if (commit_tree_extended("", 0, r->hash_algo->empty_tree, NULL,
+ &base, author, committer, NULL, NULL))
+ return error(_("unable to write base commit"));
+
+ prev = lookup_commit_reference(r, &base);
+
+ if (argc) {
+ /*
+ * Find each specified stash, and load data into the array.
+ */
+ for (i = 0; i < argc; i++) {
+ struct object_id oid;
+ struct commit *stash;
+
+ if (parse_stash_revision(&revision, argv[i], 1) ||
+ get_oid_with_context(r, revision.buf,
+ GET_OID_QUIETLY | GET_OID_GENTLY,
+ &oid, &unused)) {
+ res = error(_("unable to find stash entry %s"), argv[i]);
+ goto out;
+ }
+
+ stash = lookup_commit_reference(r, &oid);
+ if (!stash || check_stash_topology(r, stash)) {
+ res = error(_("%s does not look like a stash commit"),
+ revision.buf);
+ goto out;
+ }
+ iter = commit_list_append(stash, iter);
+ }
+ } else {
+ /*
+ * Walk the reflog, finding each stash entry, and load data into the
+ * array.
+ */
+ struct stash_entry_data cb_data = {
+ .r = r, .items = iter,
+ };
+ if (refs_for_each_reflog_ent_reverse(get_main_ref_store(r),
+ "refs/stash",
+ collect_stash_entries,
+ &cb_data) && cb_data.count)
+ goto out;
+ }
+
+ /*
+ * Now, create a set of commits identical to the regular stash commits,
+ * but where their first parents form a chain to our original empty
+ * base commit.
+ */
+ items = reverse_commit_list(items);
+ for (cur = items; cur; cur = cur->next) {
+ struct commit_list *parents = NULL;
+ struct commit_list **next = &parents;
+ struct object_id out;
+ struct commit *stash = cur->item;
+
+ next = commit_list_append(prev, next);
+ next = commit_list_append(stash, next);
+ res = write_commit_with_parents(r, &out, &stash->object.oid, parents);
+ free_commit_list(parents);
+ if (res)
+ goto out;
+ prev = lookup_commit_reference(r, &out);
+ }
+ if (ref)
+ refs_update_ref(get_main_ref_store(r), NULL, ref,
+ &prev->object.oid, NULL, 0, UPDATE_REFS_DIE_ON_ERR);
+ else
+ puts(oid_to_hex(&prev->object.oid));
+out:
+ strbuf_release(&revision);
+ free_commit_list(items);
+
+ return res;
+}
+
+enum export_action {
+ ACTION_NONE,
+ ACTION_PRINT,
+ ACTION_TO_REF,
+};
+
+static int export_stash(int argc,
+ const char **argv,
+ const char *prefix,
+ struct repository *repo)
+{
+ const char *ref = NULL;
+ enum export_action action = ACTION_NONE;
+ struct option options[] = {
+ OPT_CMDMODE(0, "print", &action,
+ N_("print the object ID instead of writing it to a ref"),
+ ACTION_PRINT),
+ OPT_STRING(0, "to-ref", &ref, "ref",
+ N_("save the data to the given ref")),
+ OPT_END()
+ };
+
+ argc = parse_options(argc, argv, prefix, options,
+ git_stash_export_usage,
+ PARSE_OPT_KEEP_DASHDASH);
+
+ if (ref && action == ACTION_NONE)
+ action = ACTION_TO_REF;
+
+ if (action == ACTION_NONE || (ref && action == ACTION_PRINT))
+ return error(_("exactly one of --print and --to-ref is required"));
+
+ return do_export_stash(repo, ref, argc, argv);
+}
+
int cmd_stash(int argc,
const char **argv,
const char *prefix,
@@ -1904,6 +2350,8 @@ int cmd_stash(int argc,
OPT_SUBCOMMAND("store", &fn, store_stash),
OPT_SUBCOMMAND("create", &fn, create_stash),
OPT_SUBCOMMAND("push", &fn, push_stash_unassumed),
+ OPT_SUBCOMMAND("export", &fn, export_stash),
+ OPT_SUBCOMMAND("import", &fn, import_stash),
OPT_SUBCOMMAND_F("save", &fn, save_stash, PARSE_OPT_NOCOMPLETE),
OPT_END()
};
diff --git a/builtin/submodule--helper.c b/builtin/submodule--helper.c
index 53da2116dd..d8a6fa47e5 100644
--- a/builtin/submodule--helper.c
+++ b/builtin/submodule--helper.c
@@ -28,7 +28,7 @@
#include "diff.h"
#include "object-file.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "advice.h"
#include "branch.h"
#include "list-objects-filter-options.h"
@@ -41,61 +41,9 @@
typedef void (*each_submodule_fn)(const struct cache_entry *list_item,
void *cb_data);
-static int repo_get_default_remote(struct repository *repo, char **default_remote)
-{
- char *dest = NULL;
- struct strbuf sb = STRBUF_INIT;
- struct ref_store *store = get_main_ref_store(repo);
- const char *refname = refs_resolve_ref_unsafe(store, "HEAD", 0, NULL,
- NULL);
-
- if (!refname)
- return die_message(_("No such ref: %s"), "HEAD");
-
- /* detached HEAD */
- if (!strcmp(refname, "HEAD")) {
- *default_remote = xstrdup("origin");
- return 0;
- }
-
- if (!skip_prefix(refname, "refs/heads/", &refname))
- return die_message(_("Expecting a full ref name, got %s"),
- refname);
-
- strbuf_addf(&sb, "branch.%s.remote", refname);
- if (repo_config_get_string(repo, sb.buf, &dest))
- *default_remote = xstrdup("origin");
- else
- *default_remote = dest;
-
- strbuf_release(&sb);
- return 0;
-}
-
-static int get_default_remote_submodule(const char *module_path, char **default_remote)
-{
- struct repository subrepo;
- int ret;
-
- if (repo_submodule_init(&subrepo, the_repository, module_path,
- null_oid(the_hash_algo)) < 0)
- return die_message(_("could not get a repository handle for submodule '%s'"),
- module_path);
- ret = repo_get_default_remote(&subrepo, default_remote);
- repo_clear(&subrepo);
-
- return ret;
-}
-
static char *get_default_remote(void)
{
- char *default_remote;
- int code = repo_get_default_remote(the_repository, &default_remote);
-
- if (code)
- exit(code);
-
- return default_remote;
+ return xstrdup(repo_default_remote(the_repository));
}
static char *resolve_relative_url(const char *rel_url, const char *up_path, int quiet)
@@ -122,6 +70,46 @@ static char *resolve_relative_url(const char *rel_url, const char *up_path, int
return resolved_url;
}
+static int get_default_remote_submodule(const char *module_path, char **default_remote)
+{
+ const struct submodule *sub;
+ struct repository subrepo;
+ const char *remote_name = NULL;
+ char *url = NULL;
+
+ sub = submodule_from_path(the_repository, null_oid(the_hash_algo), module_path);
+ if (sub && sub->url) {
+ url = xstrdup(sub->url);
+
+ /* Possibly a url relative to parent */
+ if (starts_with_dot_dot_slash(url) ||
+ starts_with_dot_slash(url)) {
+ char *oldurl = url;
+
+ url = resolve_relative_url(oldurl, NULL, 1);
+ free(oldurl);
+ }
+ }
+
+ if (repo_submodule_init(&subrepo, the_repository, module_path,
+ null_oid(the_hash_algo)) < 0)
+ return die_message(_("could not get a repository handle for submodule '%s'"),
+ module_path);
+
+ /* Look up by URL first */
+ if (url)
+ remote_name = repo_remote_from_url(&subrepo, url);
+ if (!remote_name)
+ remote_name = repo_default_remote(&subrepo);
+
+ *default_remote = xstrdup(remote_name);
+
+ repo_clear(&subrepo);
+ free(url);
+
+ return 0;
+}
+
/* the result should be freed by the caller. */
static char *get_submodule_displaypath(const char *path, const char *prefix,
const char *super_prefix)
@@ -303,7 +291,7 @@ static void runcommand_in_submodule_cb(const struct cache_entry *list_item,
char *displaypath;
if (validate_submodule_path(path) < 0)
- exit(128);
+ die(NULL);
displaypath = get_submodule_displaypath(path, info->prefix,
info->super_prefix);
@@ -438,18 +426,6 @@ cleanup:
return ret;
}
-static int starts_with_dot_slash(const char *const path)
-{
- return path_match_flags(path, PATH_MATCH_STARTS_WITH_DOT_SLASH |
- PATH_MATCH_XPLATFORM);
-}
-
-static int starts_with_dot_dot_slash(const char *const path)
-{
- return path_match_flags(path, PATH_MATCH_STARTS_WITH_DOT_DOT_SLASH |
- PATH_MATCH_XPLATFORM);
-}
-
struct init_cb {
const char *prefix;
const char *super_prefix;
@@ -643,7 +619,7 @@ static void status_submodule(const char *path, const struct object_id *ce_oid,
};
if (validate_submodule_path(path) < 0)
- exit(128);
+ die(NULL);
if (!submodule_from_path(the_repository, null_oid(the_hash_algo), path))
die(_("no submodule mapping found in .gitmodules for path '%s'"),
@@ -1257,7 +1233,7 @@ static void sync_submodule(const char *path, const char *prefix,
return;
if (validate_submodule_path(path) < 0)
- exit(128);
+ die(NULL);
sub = submodule_from_path(the_repository, null_oid(the_hash_algo), path);
@@ -1402,7 +1378,7 @@ static void deinit_submodule(const char *path, const char *prefix,
char *sub_git_dir = xstrfmt("%s/.git", path);
if (validate_submodule_path(path) < 0)
- exit(128);
+ die(NULL);
sub = submodule_from_path(the_repository, null_oid(the_hash_algo), path);
@@ -1582,7 +1558,7 @@ static const char alternate_error_advice[] = N_(
);
static int add_possible_reference_from_superproject(
- struct object_directory *odb, void *sas_cb)
+ struct odb_source *alt_odb, void *sas_cb)
{
struct submodule_alternate_setup *sas = sas_cb;
size_t len;
@@ -1591,12 +1567,12 @@ static int add_possible_reference_from_superproject(
* If the alternate object store is another repository, try the
* standard layout with .git/(modules/<name>)+/objects
*/
- if (strip_suffix(odb->path, "/objects", &len)) {
+ if (strip_suffix(alt_odb->path, "/objects", &len)) {
struct repository alternate;
char *sm_alternate;
struct strbuf sb = STRBUF_INIT;
struct strbuf err = STRBUF_INIT;
- strbuf_add(&sb, odb->path, len);
+ strbuf_add(&sb, alt_odb->path, len);
if (repo_init(&alternate, sb.buf, NULL) < 0)
die(_("could not get a repository handle for gitdir '%s'"),
@@ -1668,7 +1644,8 @@ static void prepare_possible_alternates(const char *sm_name,
die(_("Value '%s' for submodule.alternateErrorStrategy is not recognized"), error_strategy);
if (!strcmp(sm_alternate, "superproject"))
- foreach_alt_odb(add_possible_reference_from_superproject, &sas);
+ odb_for_each_alternate(the_repository->objects,
+ add_possible_reference_from_superproject, &sas);
else if (!strcmp(sm_alternate, "no"))
; /* do nothing */
else
@@ -1724,7 +1701,7 @@ static int clone_submodule(const struct module_clone_data *clone_data,
char *to_free = NULL;
if (validate_submodule_path(clone_data_path) < 0)
- exit(128);
+ die(NULL);
if (!is_absolute_path(clone_data->path))
clone_data_path = to_free = xstrfmt("%s/%s", repo_get_work_tree(the_repository),
@@ -2660,8 +2637,10 @@ static int update_submodule(struct update_data *update_data)
if (code)
return code;
code = remote_submodule_branch(update_data->sm_path, &branch);
- if (code)
+ if (code) {
+ free(remote_name);
return code;
+ }
remote_ref = xstrfmt("refs/remotes/%s/%s", remote_name, branch);
free(remote_name);
@@ -3524,7 +3503,7 @@ static int module_add(int argc, const char **argv, const char *prefix,
strip_dir_trailing_slashes(add_data.sm_path);
if (validate_submodule_path(add_data.sm_path) < 0)
- exit(128);
+ die(NULL);
die_on_index_match(add_data.sm_path, force);
die_on_repo_without_commits(add_data.sm_path);
diff --git a/builtin/tag.c b/builtin/tag.c
index 4742b27d16..46cbf892e3 100644
--- a/builtin/tag.c
+++ b/builtin/tag.c
@@ -19,7 +19,7 @@
#include "refs.h"
#include "object-file.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "path.h"
#include "tag.h"
#include "parse-options.h"
@@ -244,7 +244,7 @@ static void write_tag_body(int fd, const struct object_id *oid)
struct strbuf payload = STRBUF_INIT;
struct strbuf signature = STRBUF_INIT;
- orig = buf = repo_read_object_file(the_repository, oid, &type, &size);
+ orig = buf = odb_read_object(the_repository->objects, oid, &type, &size);
if (!buf)
return;
if (parse_signature(buf, size, &payload, &signature)) {
@@ -304,7 +304,7 @@ static void create_tag(const struct object_id *object, const char *object_ref,
struct strbuf header = STRBUF_INIT;
int should_edit;
- type = oid_object_info(the_repository, object, NULL);
+ type = odb_read_object_info(the_repository->objects, object, NULL);
if (type <= OBJ_NONE)
die(_("bad object type."));
@@ -401,13 +401,13 @@ static void create_reflog_msg(const struct object_id *oid, struct strbuf *sb)
}
strbuf_addstr(sb, " (");
- type = oid_object_info(the_repository, oid, NULL);
+ type = odb_read_object_info(the_repository->objects, oid, NULL);
switch (type) {
default:
strbuf_addstr(sb, "object of unknown type");
break;
case OBJ_COMMIT:
- if ((buf = repo_read_object_file(the_repository, oid, &type, &size))) {
+ if ((buf = odb_read_object(the_repository->objects, oid, &type, &size))) {
subject_len = find_commit_subject(buf, &subject_start);
strbuf_insert(sb, sb->len, subject_start, subject_len);
} else {
diff --git a/builtin/unpack-file.c b/builtin/unpack-file.c
index e33acfc4ee..4360872ae0 100644
--- a/builtin/unpack-file.c
+++ b/builtin/unpack-file.c
@@ -4,7 +4,7 @@
#include "hex.h"
#include "object-file.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
static char *create_temp_file(struct object_id *oid)
{
@@ -14,7 +14,7 @@ static char *create_temp_file(struct object_id *oid)
unsigned long size;
int fd;
- buf = repo_read_object_file(the_repository, oid, &type, &size);
+ buf = odb_read_object(the_repository->objects, oid, &type, &size);
if (!buf || type != OBJ_BLOB)
die("unable to read blob object %s", oid_to_hex(oid));
diff --git a/builtin/unpack-objects.c b/builtin/unpack-objects.c
index e905d5f4e1..a69d59eb50 100644
--- a/builtin/unpack-objects.c
+++ b/builtin/unpack-objects.c
@@ -9,7 +9,7 @@
#include "git-zlib.h"
#include "hex.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "object.h"
#include "delta.h"
#include "pack.h"
@@ -232,7 +232,7 @@ static int check_object(struct object *obj, enum object_type type,
if (!(obj->flags & FLAG_OPEN)) {
unsigned long size;
- int type = oid_object_info(the_repository, &obj->oid, &size);
+ int type = odb_read_object_info(the_repository->objects, &obj->oid, &size);
if (type != obj->type || type <= 0)
die("object of unexpected type");
obj->flags |= FLAG_WRITTEN;
@@ -449,8 +449,8 @@ static void unpack_delta_entry(enum object_type type, unsigned long delta_size,
delta_data = get_data(delta_size);
if (!delta_data)
return;
- if (has_object(the_repository, &base_oid,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
+ if (odb_has_object(the_repository->objects, &base_oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
; /* Ok we have this one */
else if (resolve_against_held(nr, &base_oid,
delta_data, delta_size))
@@ -516,8 +516,8 @@ static void unpack_delta_entry(enum object_type type, unsigned long delta_size,
if (resolve_against_held(nr, &base_oid, delta_data, delta_size))
return;
- base = repo_read_object_file(the_repository, &base_oid, &type,
- &base_size);
+ base = odb_read_object(the_repository->objects, &base_oid,
+ &type, &base_size);
if (!base) {
error("failed to read delta-pack base object %s",
oid_to_hex(&base_oid));
diff --git a/builtin/update-index.c b/builtin/update-index.c
index 538b619ba4..0c1d4ed55b 100644
--- a/builtin/update-index.c
+++ b/builtin/update-index.c
@@ -981,6 +981,7 @@ int cmd_update_index(int argc,
.type = OPTION_SET_INT,
.long_name = "assume-unchanged",
.value = &mark_valid_only,
+ .precision = sizeof(mark_valid_only),
.help = N_("mark files as \"not changing\""),
.flags = PARSE_OPT_NOARG | PARSE_OPT_NONEG,
.defval = MARK_FLAG,
@@ -989,6 +990,7 @@ int cmd_update_index(int argc,
.type = OPTION_SET_INT,
.long_name = "no-assume-unchanged",
.value = &mark_valid_only,
+ .precision = sizeof(mark_valid_only),
.help = N_("clear assumed-unchanged bit"),
.flags = PARSE_OPT_NOARG | PARSE_OPT_NONEG,
.defval = UNMARK_FLAG,
@@ -997,6 +999,7 @@ int cmd_update_index(int argc,
.type = OPTION_SET_INT,
.long_name = "skip-worktree",
.value = &mark_skip_worktree_only,
+ .precision = sizeof(mark_skip_worktree_only),
.help = N_("mark files as \"index-only\""),
.flags = PARSE_OPT_NOARG | PARSE_OPT_NONEG,
.defval = MARK_FLAG,
@@ -1005,6 +1008,7 @@ int cmd_update_index(int argc,
.type = OPTION_SET_INT,
.long_name = "no-skip-worktree",
.value = &mark_skip_worktree_only,
+ .precision = sizeof(mark_skip_worktree_only),
.help = N_("clear skip-worktree bit"),
.flags = PARSE_OPT_NOARG | PARSE_OPT_NONEG,
.defval = UNMARK_FLAG,
@@ -1079,6 +1083,7 @@ int cmd_update_index(int argc,
.type = OPTION_SET_INT,
.long_name = "fsmonitor-valid",
.value = &mark_fsmonitor_only,
+ .precision = sizeof(mark_fsmonitor_only),
.help = N_("mark files as fsmonitor valid"),
.flags = PARSE_OPT_NOARG | PARSE_OPT_NONEG,
.defval = MARK_FLAG,
@@ -1087,6 +1092,7 @@ int cmd_update_index(int argc,
.type = OPTION_SET_INT,
.long_name = "no-fsmonitor-valid",
.value = &mark_fsmonitor_only,
+ .precision = sizeof(mark_fsmonitor_only),
.help = N_("clear fsmonitor valid bit"),
.flags = PARSE_OPT_NOARG | PARSE_OPT_NONEG,
.defval = UNMARK_FLAG,
diff --git a/builtin/update-ref.c b/builtin/update-ref.c
index 2b1e336ba1..1e6131e04a 100644
--- a/builtin/update-ref.c
+++ b/builtin/update-ref.c
@@ -575,30 +575,7 @@ static void print_rejected_refs(const char *refname,
void *cb_data UNUSED)
{
struct strbuf sb = STRBUF_INIT;
- const char *reason = "";
-
- switch (err) {
- case REF_TRANSACTION_ERROR_NAME_CONFLICT:
- reason = "refname conflict";
- break;
- case REF_TRANSACTION_ERROR_CREATE_EXISTS:
- reason = "reference already exists";
- break;
- case REF_TRANSACTION_ERROR_NONEXISTENT_REF:
- reason = "reference does not exist";
- break;
- case REF_TRANSACTION_ERROR_INCORRECT_OLD_VALUE:
- reason = "incorrect old value provided";
- break;
- case REF_TRANSACTION_ERROR_INVALID_NEW_VALUE:
- reason = "invalid new value provided";
- break;
- case REF_TRANSACTION_ERROR_EXPECTED_SYMREF:
- reason = "expected symref but found regular ref";
- break;
- default:
- reason = "unkown failure";
- }
+ const char *reason = ref_transaction_error_msg(err);
strbuf_addf(&sb, "rejected %s %s %s %s\n", refname,
new_oid ? oid_to_hex(new_oid) : new_target,
diff --git a/builtin/worktree.c b/builtin/worktree.c
index 88a36ea9f8..2dceeeed8b 100644
--- a/builtin/worktree.c
+++ b/builtin/worktree.c
@@ -621,7 +621,7 @@ static void print_preparing_worktree_line(int detach,
else {
struct commit *commit = lookup_commit_reference_by_name(branch);
if (!commit)
- BUG(_("unreachable: invalid reference: %s"), branch);
+ BUG("unreachable: invalid reference: %s", branch);
fprintf_ln(stderr, _("Preparing worktree (detached HEAD %s)"),
repo_find_unique_abbrev(the_repository, &commit->object.oid, DEFAULT_ABBREV));
}
diff --git a/builtin/write-tree.c b/builtin/write-tree.c
index 5a8dc377ec..cfec044710 100644
--- a/builtin/write-tree.c
+++ b/builtin/write-tree.c
@@ -35,6 +35,7 @@ int cmd_write_tree(int argc,
.type = OPTION_BIT,
.long_name = "ignore-cache-tree",
.value = &flags,
+ .precision = sizeof(flags),
.help = N_("only useful for debugging"),
.flags = PARSE_OPT_HIDDEN | PARSE_OPT_NOARG,
.defval = WRITE_TREE_IGNORE_CACHE_TREE,
diff --git a/bulk-checkin.c b/bulk-checkin.c
index 678e2ecc2c..16df86c0ba 100644
--- a/bulk-checkin.c
+++ b/bulk-checkin.c
@@ -17,7 +17,7 @@
#include "tmp-objdir.h"
#include "packfile.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
static int odb_transaction_nesting;
@@ -130,8 +130,8 @@ static void flush_batch_fsync(void)
static int already_written(struct bulk_checkin_packfile *state, struct object_id *oid)
{
/* The object may already exist in the repository */
- if (has_object(the_repository, oid,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
+ if (odb_has_object(the_repository->objects, oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
return 1;
/* Might want to keep the list sorted */
diff --git a/bundle-uri.c b/bundle-uri.c
index 9accf157b4..57cccfc6b8 100644
--- a/bundle-uri.c
+++ b/bundle-uri.c
@@ -14,7 +14,7 @@
#include "fetch-pack.h"
#include "remote.h"
#include "trace2.h"
-#include "object-store.h"
+#include "odb.h"
static struct {
enum bundle_list_heuristic heuristic;
@@ -122,7 +122,7 @@ void print_bundle_list(FILE *fp, struct bundle_list *list)
int i;
for (i = 0; i < BUNDLE_HEURISTIC__COUNT; i++) {
if (heuristics[i].heuristic == list->heuristic) {
- printf("\theuristic = %s\n",
+ fprintf(fp, "\theuristic = %s\n",
heuristics[list->heuristic].name);
break;
}
@@ -278,7 +278,8 @@ static char *find_temp_filename(void)
* Find a temporary filename that is available. This is briefly
* racy, but unlikely to collide.
*/
- fd = odb_mkstemp(&name, "bundles/tmp_uri_XXXXXX");
+ fd = odb_mkstemp(the_repository->objects, &name,
+ "bundles/tmp_uri_XXXXXX");
if (fd < 0) {
warning(_("failed to create temporary file"));
return NULL;
@@ -297,6 +298,28 @@ static int download_https_uri_to_file(const char *file, const char *uri)
struct strbuf line = STRBUF_INIT;
int found_get = 0;
+ /*
+ * The protocol we speak with git-remote-https(1) uses a space to
+ * separate between URI and file, so the URI itself must not contain a
+ * space. If it did, an adversary could change the location where the
+ * downloaded file is being written to.
+ *
+ * Similarly, we use newlines to separate commands from one another.
+ * Consequently, neither the URI nor the file must contain a newline or
+ * otherwise an adversary could inject arbitrary commands.
+ *
+ * TODO: Restricting newlines in the target paths may break valid
+ * usecases, even if those are a bit more on the esoteric side.
+ * If this ever becomes a problem we should probably think about
+ * alternatives. One alternative could be to use NUL-delimited
+ * requests in git-remote-http(1). Another alternative could be
+ * to use URL quoting.
+ */
+ if (strpbrk(uri, " \n"))
+ return error("bundle-uri: URI is malformed: '%s'", file);
+ if (strchr(file, '\n'))
+ return error("bundle-uri: filename is malformed: '%s'", file);
+
strvec_pushl(&cp.args, "git-remote-https", uri, NULL);
cp.err = -1;
cp.in = -1;
diff --git a/bundle.c b/bundle.c
index b0a3fee2ef..42327f9739 100644
--- a/bundle.c
+++ b/bundle.c
@@ -7,7 +7,7 @@
#include "environment.h"
#include "gettext.h"
#include "hex.h"
-#include "object-store.h"
+#include "odb.h"
#include "repository.h"
#include "object.h"
#include "commit.h"
@@ -95,7 +95,7 @@ int read_bundle_header_fd(int fd, struct bundle_header *header,
* by an "object-format=" capability, which is being handled in
* `parse_capability()`.
*/
- header->hash_algo = &hash_algos[GIT_HASH_SHA1];
+ header->hash_algo = &hash_algos[GIT_HASH_SHA1_LEGACY];
/* The bundle header ends with an empty line */
while (!strbuf_getwholeline_fd(&buf, fd, '\n') &&
@@ -233,7 +233,7 @@ int verify_bundle(struct repository *r,
.quiet = 1,
};
- if (!r || !r->objects || !r->objects->odb)
+ if (!r || !r->objects || !r->objects->sources)
return error(_("need a repository to verify a bundle"));
for (i = 0; i < p->nr; i++) {
@@ -305,7 +305,7 @@ static int is_tag_in_date_range(struct object *tag, struct rev_info *revs)
if (revs->max_age == -1 && revs->min_age == -1)
goto out;
- buf = repo_read_object_file(the_repository, &tag->oid, &type, &size);
+ buf = odb_read_object(the_repository->objects, &tag->oid, &type, &size);
if (!buf)
goto out;
line = memmem(buf, size, "\ntagger ", 8);
@@ -507,7 +507,7 @@ int create_bundle(struct repository *r, const char *path,
* SHA1.
* 2. @filter is required because we parsed an object filter.
*/
- if (the_hash_algo != &hash_algos[GIT_HASH_SHA1] || revs.filter.choice)
+ if (the_hash_algo != &hash_algos[GIT_HASH_SHA1_LEGACY] || revs.filter.choice)
min_version = 3;
if (argc > 1) {
diff --git a/cache-tree.c b/cache-tree.c
index fa3858e282..a4bc14ad15 100644
--- a/cache-tree.c
+++ b/cache-tree.c
@@ -10,7 +10,7 @@
#include "cache-tree.h"
#include "bulk-checkin.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "read-cache-ll.h"
#include "replace-object.h"
#include "repository.h"
@@ -239,8 +239,8 @@ int cache_tree_fully_valid(struct cache_tree *it)
if (!it)
return 0;
if (it->entry_count < 0 ||
- has_object(the_repository, &it->oid,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
+ odb_has_object(the_repository->objects, &it->oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
return 0;
for (i = 0; i < it->subtree_nr; i++) {
if (!cache_tree_fully_valid(it->down[i]->cache_tree))
@@ -292,8 +292,8 @@ static int update_one(struct cache_tree *it,
}
if (0 <= it->entry_count &&
- has_object(the_repository, &it->oid,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
+ odb_has_object(the_repository->objects, &it->oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
return it->entry_count;
/*
@@ -399,8 +399,9 @@ static int update_one(struct cache_tree *it,
ce_missing_ok = mode == S_IFGITLINK || missing_ok ||
!must_check_existence(ce);
if (is_null_oid(oid) ||
- (!ce_missing_ok && !has_object(the_repository, oid,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))) {
+ (!ce_missing_ok &&
+ !odb_has_object(the_repository->objects, oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))) {
strbuf_release(&buffer);
if (expected_missing)
return -1;
@@ -448,7 +449,7 @@ static int update_one(struct cache_tree *it,
struct object_id oid;
hash_object_file(the_hash_algo, buffer.buf, buffer.len,
OBJ_TREE, &oid);
- if (has_object(the_repository, &oid, HAS_OBJECT_RECHECK_PACKED))
+ if (odb_has_object(the_repository->objects, &oid, HAS_OBJECT_RECHECK_PACKED))
oidcpy(&it->oid, &oid);
else
to_invalidate = 1;
diff --git a/ci/print-test-failures.sh b/ci/print-test-failures.sh
index dc910e5160..5545e77c13 100755
--- a/ci/print-test-failures.sh
+++ b/ci/print-test-failures.sh
@@ -41,7 +41,7 @@ do
case "$CI_TYPE" in
github-actions)
mkdir -p failed-test-artifacts
- echo "FAILED_TEST_ARTIFACTS=${TEST_OUTPUT_DIRECTORY:t}/failed-test-artifacts" >>$GITHUB_ENV
+ echo "FAILED_TEST_ARTIFACTS=${TEST_OUTPUT_DIRECTORY:-t}/failed-test-artifacts" >>$GITHUB_ENV
cp "${TEST_EXIT%.exit}.out" failed-test-artifacts/
tar czf failed-test-artifacts/"$test_name".trash.tar.gz "$trash_dir"
continue
diff --git a/ci/run-style-check.sh b/ci/run-style-check.sh
index 6cd4b1d934..0832c19df0 100755
--- a/ci/run-style-check.sh
+++ b/ci/run-style-check.sh
@@ -5,21 +5,5 @@
baseCommit=$1
-# Remove optional braces of control statements (if, else, for, and while)
-# according to the LLVM coding style. This avoids braces on simple
-# single-statement bodies of statements but keeps braces if one side of
-# if/else if/.../else cascade has multi-statement body.
-#
-# As this rule comes with a warning [1], we want to experiment with it
-# before adding it in-tree. since the CI job for the style check is allowed
-# to fail, appending the rule here allows us to validate its efficacy.
-# While also ensuring that end-users are not affected directly.
-#
-# [1]: https://clang.llvm.org/docs/ClangFormatStyleOptions.html#removebracesllvm
-{
- cat .clang-format
- echo "RemoveBracesLLVM: true"
-} >/tmp/clang-format-rules
-
-git clang-format --style=file:/tmp/clang-format-rules \
+git clang-format --style=file:.clang-format \
--diff --extensions c,h "$baseCommit"
diff --git a/combine-diff.c b/combine-diff.c
index dfae9f7995..4ea2dc93c4 100644
--- a/combine-diff.c
+++ b/combine-diff.c
@@ -2,7 +2,7 @@
#define DISABLE_SIGN_COMPARE_WARNINGS
#include "git-compat-util.h"
-#include "object-store.h"
+#include "odb.h"
#include "commit.h"
#include "convert.h"
#include "diff.h"
@@ -325,7 +325,7 @@ static char *grab_blob(struct repository *r,
*size = fill_textconv(r, textconv, df, &blob);
free_filespec(df);
} else {
- blob = repo_read_object_file(r, oid, &type, size);
+ blob = odb_read_object(r->objects, oid, &type, size);
if (!blob)
die(_("unable to read %s"), oid_to_hex(oid));
if (type != OBJ_BLOB)
diff --git a/commit-graph.c b/commit-graph.c
index ad3943b690..bd7b6f5338 100644
--- a/commit-graph.c
+++ b/commit-graph.c
@@ -13,7 +13,7 @@
#include "refs.h"
#include "hash-lookup.h"
#include "commit-graph.h"
-#include "object-store.h"
+#include "odb.h"
#include "oid-array.h"
#include "path.h"
#include "alloc.h"
@@ -37,7 +37,7 @@ void git_test_write_commit_graph_or_die(void)
if (git_env_bool(GIT_TEST_COMMIT_GRAPH_CHANGED_PATHS, 0))
flags = COMMIT_GRAPH_WRITE_BLOOM_FILTERS;
- if (write_commit_graph_reachable(the_repository->objects->odb,
+ if (write_commit_graph_reachable(the_repository->objects->sources,
flags, NULL))
die("failed to write commit-graph under GIT_TEST_COMMIT_GRAPH");
}
@@ -191,21 +191,21 @@ static int commit_gen_cmp(const void *va, const void *vb)
return 0;
}
-char *get_commit_graph_filename(struct object_directory *obj_dir)
+char *get_commit_graph_filename(struct odb_source *source)
{
- return xstrfmt("%s/info/commit-graph", obj_dir->path);
+ return xstrfmt("%s/info/commit-graph", source->path);
}
-static char *get_split_graph_filename(struct object_directory *odb,
+static char *get_split_graph_filename(struct odb_source *source,
const char *oid_hex)
{
- return xstrfmt("%s/info/commit-graphs/graph-%s.graph", odb->path,
+ return xstrfmt("%s/info/commit-graphs/graph-%s.graph", source->path,
oid_hex);
}
-char *get_commit_graph_chain_filename(struct object_directory *odb)
+char *get_commit_graph_chain_filename(struct odb_source *source)
{
- return xstrfmt("%s/info/commit-graphs/commit-graph-chain", odb->path);
+ return xstrfmt("%s/info/commit-graphs/commit-graph-chain", source->path);
}
static struct commit_graph *alloc_commit_graph(void)
@@ -250,7 +250,7 @@ int open_commit_graph(const char *graph_file, int *fd, struct stat *st)
struct commit_graph *load_commit_graph_one_fd_st(struct repository *r,
int fd, struct stat *st,
- struct object_directory *odb)
+ struct odb_source *source)
{
void *graph_map;
size_t graph_size;
@@ -269,7 +269,7 @@ struct commit_graph *load_commit_graph_one_fd_st(struct repository *r,
ret = parse_commit_graph(&r->settings, graph_map, graph_size);
if (ret)
- ret->odb = odb;
+ ret->odb_source = source;
else
munmap(graph_map, graph_size);
@@ -487,7 +487,7 @@ free_and_return:
static struct commit_graph *load_commit_graph_one(struct repository *r,
const char *graph_file,
- struct object_directory *odb)
+ struct odb_source *source)
{
struct stat st;
@@ -498,7 +498,7 @@ static struct commit_graph *load_commit_graph_one(struct repository *r,
if (!open_ok)
return NULL;
- g = load_commit_graph_one_fd_st(r, fd, &st, odb);
+ g = load_commit_graph_one_fd_st(r, fd, &st, source);
if (g)
g->filename = xstrdup(graph_file);
@@ -507,10 +507,10 @@ static struct commit_graph *load_commit_graph_one(struct repository *r,
}
static struct commit_graph *load_commit_graph_v1(struct repository *r,
- struct object_directory *odb)
+ struct odb_source *source)
{
- char *graph_name = get_commit_graph_filename(odb);
- struct commit_graph *g = load_commit_graph_one(r, graph_name, odb);
+ char *graph_name = get_commit_graph_filename(source);
+ struct commit_graph *g = load_commit_graph_one(r, graph_name, source);
free(graph_name);
return g;
@@ -649,10 +649,10 @@ struct commit_graph *load_commit_graph_chain_fd_st(struct repository *r,
count = st->st_size / (the_hash_algo->hexsz + 1);
CALLOC_ARRAY(oids, count);
- prepare_alt_odb(r);
+ odb_prepare_alternates(r->objects);
for (i = 0; i < count; i++) {
- struct object_directory *odb;
+ struct odb_source *source;
if (strbuf_getline_lf(&line, fp) == EOF)
break;
@@ -665,9 +665,9 @@ struct commit_graph *load_commit_graph_chain_fd_st(struct repository *r,
}
valid = 0;
- for (odb = r->objects->odb; odb; odb = odb->next) {
- char *graph_name = get_split_graph_filename(odb, line.buf);
- struct commit_graph *g = load_commit_graph_one(r, graph_name, odb);
+ for (source = r->objects->sources; source; source = source->next) {
+ char *graph_name = get_split_graph_filename(source, line.buf);
+ struct commit_graph *g = load_commit_graph_one(r, graph_name, source);
free(graph_name);
@@ -701,9 +701,9 @@ struct commit_graph *load_commit_graph_chain_fd_st(struct repository *r,
}
static struct commit_graph *load_commit_graph_chain(struct repository *r,
- struct object_directory *odb)
+ struct odb_source *source)
{
- char *chain_file = get_commit_graph_chain_filename(odb);
+ char *chain_file = get_commit_graph_chain_filename(source);
struct stat st;
int fd;
struct commit_graph *g = NULL;
@@ -719,24 +719,24 @@ static struct commit_graph *load_commit_graph_chain(struct repository *r,
}
struct commit_graph *read_commit_graph_one(struct repository *r,
- struct object_directory *odb)
+ struct odb_source *source)
{
- struct commit_graph *g = load_commit_graph_v1(r, odb);
+ struct commit_graph *g = load_commit_graph_v1(r, source);
if (!g)
- g = load_commit_graph_chain(r, odb);
+ g = load_commit_graph_chain(r, source);
return g;
}
static void prepare_commit_graph_one(struct repository *r,
- struct object_directory *odb)
+ struct odb_source *source)
{
if (r->objects->commit_graph)
return;
- r->objects->commit_graph = read_commit_graph_one(r, odb);
+ r->objects->commit_graph = read_commit_graph_one(r, source);
}
/*
@@ -747,7 +747,7 @@ static void prepare_commit_graph_one(struct repository *r,
*/
static int prepare_commit_graph(struct repository *r)
{
- struct object_directory *odb;
+ struct odb_source *source;
/*
* Early return if there is no git dir or if the commit graph is
@@ -778,11 +778,11 @@ static int prepare_commit_graph(struct repository *r)
if (!commit_graph_compatible(r))
return 0;
- prepare_alt_odb(r);
- for (odb = r->objects->odb;
- !r->objects->commit_graph && odb;
- odb = odb->next)
- prepare_commit_graph_one(r, odb);
+ odb_prepare_alternates(r->objects);
+ for (source = r->objects->sources;
+ !r->objects->commit_graph && source;
+ source = source->next)
+ prepare_commit_graph_one(r, source);
return !!r->objects->commit_graph;
}
@@ -829,7 +829,7 @@ struct bloom_filter_settings *get_bloom_filter_settings(struct repository *r)
return NULL;
}
-void close_commit_graph(struct raw_object_store *o)
+void close_commit_graph(struct object_database *o)
{
if (!o->commit_graph)
return;
@@ -1040,7 +1040,7 @@ struct commit *lookup_commit_in_graph(struct repository *repo, const struct obje
return NULL;
if (!search_commit_pos_in_graph(id, repo->objects->commit_graph, &pos))
return NULL;
- if (commit_graph_paranoia && !has_object(repo, id, 0))
+ if (commit_graph_paranoia && !odb_has_object(repo->objects, id, 0))
return NULL;
commit = lookup_commit(repo, id);
@@ -1137,7 +1137,7 @@ struct packed_commit_list {
struct write_commit_graph_context {
struct repository *r;
- struct object_directory *odb;
+ struct odb_source *odb_source;
char *graph_name;
struct oid_array oids;
struct packed_commit_list commits;
@@ -1862,7 +1862,7 @@ static int add_ref_to_set(const char *refname UNUSED,
if (!peel_iterated_oid(the_repository, oid, &peeled))
oid = &peeled;
- if (oid_object_info(the_repository, oid, NULL) == OBJ_COMMIT)
+ if (odb_read_object_info(the_repository->objects, oid, NULL) == OBJ_COMMIT)
oidset_insert(data->commits, oid);
display_progress(data->progress, oidset_size(data->commits));
@@ -1870,7 +1870,7 @@ static int add_ref_to_set(const char *refname UNUSED,
return 0;
}
-int write_commit_graph_reachable(struct object_directory *odb,
+int write_commit_graph_reachable(struct odb_source *source,
enum commit_graph_write_flags flags,
const struct commit_graph_opts *opts)
{
@@ -1890,7 +1890,7 @@ int write_commit_graph_reachable(struct object_directory *odb,
stop_progress(&data.progress);
- result = write_commit_graph(odb, NULL, &commits,
+ result = write_commit_graph(source, NULL, &commits,
flags, opts);
oidset_clear(&commits);
@@ -1906,7 +1906,7 @@ static int fill_oids_from_packs(struct write_commit_graph_context *ctx,
int dirlen;
int ret = 0;
- strbuf_addf(&packname, "%s/pack/", ctx->odb->path);
+ strbuf_addf(&packname, "%s/pack/", ctx->odb_source->path);
dirlen = packname.len;
if (ctx->report_progress) {
strbuf_addf(&progress_title,
@@ -2060,10 +2060,10 @@ static int write_commit_graph_file(struct write_commit_graph_context *ctx)
strbuf_addf(&tmp_file,
"%s/info/commit-graphs/tmp_graph_XXXXXX",
- ctx->odb->path);
+ ctx->odb_source->path);
ctx->graph_name = strbuf_detach(&tmp_file, NULL);
} else {
- ctx->graph_name = get_commit_graph_filename(ctx->odb);
+ ctx->graph_name = get_commit_graph_filename(ctx->odb_source);
}
if (safe_create_leading_directories(the_repository, ctx->graph_name)) {
@@ -2073,7 +2073,7 @@ static int write_commit_graph_file(struct write_commit_graph_context *ctx)
}
if (ctx->split) {
- char *lock_name = get_commit_graph_chain_filename(ctx->odb);
+ char *lock_name = get_commit_graph_chain_filename(ctx->odb_source);
hold_lock_file_for_update_mode(&lk, lock_name,
LOCK_DIE_ON_ERROR, 0444);
@@ -2161,7 +2161,7 @@ static int write_commit_graph_file(struct write_commit_graph_context *ctx)
if (ctx->split && ctx->base_graph_name && ctx->num_commit_graphs_after > 1) {
char *new_base_hash = xstrdup(oid_to_hex(&ctx->new_base_graph->oid));
- char *new_base_name = get_split_graph_filename(ctx->new_base_graph->odb, new_base_hash);
+ char *new_base_name = get_split_graph_filename(ctx->new_base_graph->odb_source, new_base_hash);
free(ctx->commit_graph_filenames_after[ctx->num_commit_graphs_after - 2]);
free(ctx->commit_graph_hash_after[ctx->num_commit_graphs_after - 2]);
@@ -2201,14 +2201,14 @@ static int write_commit_graph_file(struct write_commit_graph_context *ctx)
}
}
} else {
- char *graph_name = get_commit_graph_filename(ctx->odb);
+ char *graph_name = get_commit_graph_filename(ctx->odb_source);
unlink(graph_name);
free(graph_name);
}
free(ctx->commit_graph_hash_after[ctx->num_commit_graphs_after - 1]);
ctx->commit_graph_hash_after[ctx->num_commit_graphs_after - 1] = xstrdup(hash_to_hex(file_hash));
- final_graph_name = get_split_graph_filename(ctx->odb,
+ final_graph_name = get_split_graph_filename(ctx->odb_source,
ctx->commit_graph_hash_after[ctx->num_commit_graphs_after - 1]);
free(ctx->commit_graph_filenames_after[ctx->num_commit_graphs_after - 1]);
ctx->commit_graph_filenames_after[ctx->num_commit_graphs_after - 1] = final_graph_name;
@@ -2259,7 +2259,7 @@ static void split_graph_merge_strategy(struct write_commit_graph_context *ctx)
flags != COMMIT_GRAPH_SPLIT_REPLACE) {
while (g && (g->num_commits <= st_mult(size_mult, num_commits) ||
(max_commits && num_commits > max_commits))) {
- if (g->odb != ctx->odb)
+ if (g->odb_source != ctx->odb_source)
break;
if (unsigned_add_overflows(num_commits, g->num_commits))
@@ -2281,10 +2281,10 @@ static void split_graph_merge_strategy(struct write_commit_graph_context *ctx)
"should be 1 with --split=replace");
if (ctx->num_commit_graphs_after == 2) {
- char *old_graph_name = get_commit_graph_filename(g->odb);
+ char *old_graph_name = get_commit_graph_filename(g->odb_source);
if (!strcmp(g->filename, old_graph_name) &&
- g->odb != ctx->odb) {
+ g->odb_source != ctx->odb_source) {
ctx->num_commit_graphs_after = 1;
ctx->new_base_graph = NULL;
}
@@ -2456,13 +2456,13 @@ static void expire_commit_graphs(struct write_commit_graph_context *ctx)
if (ctx->opts && ctx->opts->expire_time)
expire_time = ctx->opts->expire_time;
if (!ctx->split) {
- char *chain_file_name = get_commit_graph_chain_filename(ctx->odb);
+ char *chain_file_name = get_commit_graph_chain_filename(ctx->odb_source);
unlink(chain_file_name);
free(chain_file_name);
ctx->num_commit_graphs_after = 0;
}
- strbuf_addstr(&path, ctx->odb->path);
+ strbuf_addstr(&path, ctx->odb_source->path);
strbuf_addstr(&path, "/info/commit-graphs");
dir = opendir(path.buf);
@@ -2504,7 +2504,7 @@ out:
strbuf_release(&path);
}
-int write_commit_graph(struct object_directory *odb,
+int write_commit_graph(struct odb_source *source,
const struct string_list *const pack_indexes,
struct oidset *commits,
enum commit_graph_write_flags flags,
@@ -2513,7 +2513,7 @@ int write_commit_graph(struct object_directory *odb,
struct repository *r = the_repository;
struct write_commit_graph_context ctx = {
.r = r,
- .odb = odb,
+ .odb_source = source,
.append = flags & COMMIT_GRAPH_WRITE_APPEND ? 1 : 0,
.report_progress = flags & COMMIT_GRAPH_WRITE_PROGRESS ? 1 : 0,
.split = flags & COMMIT_GRAPH_WRITE_SPLIT ? 1 : 0,
diff --git a/commit-graph.h b/commit-graph.h
index 13f662827d..78ab7b875b 100644
--- a/commit-graph.h
+++ b/commit-graph.h
@@ -1,7 +1,7 @@
#ifndef COMMIT_GRAPH_H
#define COMMIT_GRAPH_H
-#include "object-store.h"
+#include "odb.h"
#include "oidset.h"
#define GIT_TEST_COMMIT_GRAPH "GIT_TEST_COMMIT_GRAPH"
@@ -26,11 +26,11 @@ void git_test_write_commit_graph_or_die(void);
struct commit;
struct bloom_filter_settings;
struct repository;
-struct raw_object_store;
+struct object_database;
struct string_list;
-char *get_commit_graph_filename(struct object_directory *odb);
-char *get_commit_graph_chain_filename(struct object_directory *odb);
+char *get_commit_graph_filename(struct odb_source *source);
+char *get_commit_graph_chain_filename(struct odb_source *source);
int open_commit_graph(const char *graph_file, int *fd, struct stat *st);
int open_commit_graph_chain(const char *chain_file, int *fd, struct stat *st);
@@ -89,7 +89,7 @@ struct commit_graph {
uint32_t num_commits;
struct object_id oid;
char *filename;
- struct object_directory *odb;
+ struct odb_source *odb_source;
uint32_t num_commits_in_base;
unsigned int read_generation_data;
@@ -115,12 +115,12 @@ struct commit_graph {
struct commit_graph *load_commit_graph_one_fd_st(struct repository *r,
int fd, struct stat *st,
- struct object_directory *odb);
+ struct odb_source *source);
struct commit_graph *load_commit_graph_chain_fd_st(struct repository *r,
int fd, struct stat *st,
int *incomplete_chain);
struct commit_graph *read_commit_graph_one(struct repository *r,
- struct object_directory *odb);
+ struct odb_source *source);
struct repo_settings;
@@ -173,10 +173,10 @@ struct commit_graph_opts {
* is not compatible with the commit-graph feature, then the
* methods will return 0 without writing a commit-graph.
*/
-int write_commit_graph_reachable(struct object_directory *odb,
+int write_commit_graph_reachable(struct odb_source *source,
enum commit_graph_write_flags flags,
const struct commit_graph_opts *opts);
-int write_commit_graph(struct object_directory *odb,
+int write_commit_graph(struct odb_source *source,
const struct string_list *pack_indexes,
struct oidset *commits,
enum commit_graph_write_flags flags,
@@ -186,7 +186,7 @@ int write_commit_graph(struct object_directory *odb,
int verify_commit_graph(struct repository *r, struct commit_graph *g, int flags);
-void close_commit_graph(struct raw_object_store *);
+void close_commit_graph(struct object_database *);
void free_commit_graph(struct commit_graph *);
/*
diff --git a/commit.c b/commit.c
index e915b2b9a1..ea84c8e7f6 100644
--- a/commit.c
+++ b/commit.c
@@ -9,7 +9,7 @@
#include "hex.h"
#include "repository.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "utf8.h"
#include "diff.h"
#include "revision.h"
@@ -31,6 +31,7 @@
#include "parse.h"
#include "object-file.h"
#include "object-file-convert.h"
+#include "prio-queue.h"
static struct commit_extra_header *read_commit_extra_header_lines(const char *buf, size_t len, const char **);
@@ -374,7 +375,7 @@ const void *repo_get_commit_buffer(struct repository *r,
if (!ret) {
enum object_type type;
unsigned long size;
- ret = repo_read_object_file(r, &commit->object.oid, &type, &size);
+ ret = odb_read_object(r->objects, &commit->object.oid, &type, &size);
if (!ret)
die("cannot read commit object %s",
oid_to_hex(&commit->object.oid));
@@ -575,7 +576,7 @@ int repo_parse_commit_internal(struct repository *r,
if (commit_graph_paranoia == -1)
commit_graph_paranoia = git_env_bool(GIT_COMMIT_GRAPH_PARANOIA, 0);
- if (commit_graph_paranoia && !has_object(r, &item->object.oid, 0)) {
+ if (commit_graph_paranoia && !odb_has_object(r->objects, &item->object.oid, 0)) {
unparse_commit(r, &item->object.oid);
return quiet_on_missing ? -1 :
error(_("commit %s exists in commit-graph but not in the object database"),
@@ -585,7 +586,8 @@ int repo_parse_commit_internal(struct repository *r,
return 0;
}
- if (oid_object_info_extended(r, &item->object.oid, &oi, flags) < 0)
+ if (odb_read_object_info_extended(r->objects, &item->object.oid,
+ &oi, flags) < 0)
return quiet_on_missing ? -1 :
error("Could not read %s",
oid_to_hex(&item->object.oid));
@@ -738,20 +740,27 @@ void commit_list_sort_by_date(struct commit_list **list)
commit_list_sort(list, commit_list_compare_by_date);
}
-struct commit *pop_most_recent_commit(struct commit_list **list,
+struct commit *pop_most_recent_commit(struct prio_queue *queue,
unsigned int mark)
{
- struct commit *ret = pop_commit(list);
+ struct commit *ret = prio_queue_peek(queue);
+ int get_pending = 1;
struct commit_list *parents = ret->parents;
while (parents) {
struct commit *commit = parents->item;
if (!repo_parse_commit(the_repository, commit) && !(commit->object.flags & mark)) {
commit->object.flags |= mark;
- commit_list_insert_by_date(commit, list);
+ if (get_pending)
+ prio_queue_replace(queue, commit);
+ else
+ prio_queue_put(queue, commit);
+ get_pending = 0;
}
parents = parents->next;
}
+ if (get_pending)
+ prio_queue_get(queue);
return ret;
}
@@ -1274,8 +1283,8 @@ static void handle_signed_tag(const struct commit *parent, struct commit_extra_h
desc = merge_remote_util(parent);
if (!desc || !desc->obj)
return;
- buf = repo_read_object_file(the_repository, &desc->obj->oid, &type,
- &size);
+ buf = odb_read_object(the_repository->objects, &desc->obj->oid,
+ &type, &size);
if (!buf || type != OBJ_TAG)
goto free_return;
if (!parse_signature(buf, size, &payload, &signature))
@@ -1706,7 +1715,7 @@ int commit_tree_extended(const char *msg, size_t msg_len,
/* Not having i18n.commitencoding is the same as having utf-8 */
encoding_is_utf8 = is_encoding_utf8(git_commit_encoding);
- assert_oid_type(tree, OBJ_TREE);
+ odb_assert_oid_type(the_repository->objects, tree, OBJ_TREE);
if (memchr(msg, '\0', msg_len))
return error("a NUL byte in commit log message not allowed.");
diff --git a/commit.h b/commit.h
index 70c870dae4..9630c076d6 100644
--- a/commit.h
+++ b/commit.h
@@ -201,10 +201,10 @@ const char *repo_logmsg_reencode(struct repository *r,
const char *skip_blank_lines(const char *msg);
-/** Removes the first commit from a list sorted by date, and adds all
- * of its parents.
- **/
-struct commit *pop_most_recent_commit(struct commit_list **list,
+struct prio_queue;
+
+/* Removes the first commit from a prio_queue and adds its parents. */
+struct commit *pop_most_recent_commit(struct prio_queue *queue,
unsigned int mark);
struct commit *pop_commit(struct commit_list **stack);
diff --git a/compat/bswap.h b/compat/bswap.h
index b34054f2bd..28635ebc69 100644
--- a/compat/bswap.h
+++ b/compat/bswap.h
@@ -32,78 +32,35 @@ static inline uint64_t default_bswap64(uint64_t val)
((val & (uint64_t)0xff00000000000000ULL) >> 56));
}
+/*
+ * __has_builtin is available since Clang 10 and GCC 10.
+ * Below is a fallback for older compilers.
+ */
+#ifndef __has_builtin
+# define __has_builtin(x) 0
+#endif
+
#undef bswap32
#undef bswap64
-#if defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))
-
-#define bswap32 git_bswap32
-static inline uint32_t git_bswap32(uint32_t x)
-{
- uint32_t result;
- if (__builtin_constant_p(x))
- result = default_swab32(x);
- else
- __asm__("bswap %0" : "=r" (result) : "0" (x));
- return result;
-}
-
-#define bswap64 git_bswap64
-#if defined(__x86_64__)
-static inline uint64_t git_bswap64(uint64_t x)
-{
- uint64_t result;
- if (__builtin_constant_p(x))
- result = default_bswap64(x);
- else
- __asm__("bswap %q0" : "=r" (result) : "0" (x));
- return result;
-}
-#else
-static inline uint64_t git_bswap64(uint64_t x)
-{
- union { uint64_t i64; uint32_t i32[2]; } tmp, result;
- if (__builtin_constant_p(x))
- result.i64 = default_bswap64(x);
- else {
- tmp.i64 = x;
- result.i32[0] = git_bswap32(tmp.i32[1]);
- result.i32[1] = git_bswap32(tmp.i32[0]);
- }
- return result.i64;
-}
-#endif
-
-#elif defined(_MSC_VER) && (defined(_M_IX86) || defined(_M_X64) || defined(_M_ARM64))
+#if defined(_MSC_VER) && (defined(_M_IX86) || defined(_M_X64) || defined(_M_ARM64))
#include <stdlib.h>
#define bswap32(x) _byteswap_ulong(x)
#define bswap64(x) _byteswap_uint64(x)
-#endif
+#define GIT_LITTLE_ENDIAN 1234
+#define GIT_BIG_ENDIAN 4321
+#define GIT_BYTE_ORDER GIT_LITTLE_ENDIAN
-#if defined(bswap32)
+#elif __has_builtin(__builtin_bswap32) && __has_builtin(__builtin_bswap64)
-#undef ntohl
-#undef htonl
-#define ntohl(x) bswap32(x)
-#define htonl(x) bswap32(x)
+#define bswap32(x) __builtin_bswap32((x))
+#define bswap64(x) __builtin_bswap64((x))
#endif
-#if defined(bswap64)
-
-#undef ntohll
-#undef htonll
-#define ntohll(x) bswap64(x)
-#define htonll(x) bswap64(x)
-
-#else
-
-#undef ntohll
-#undef htonll
-
#if defined(__BYTE_ORDER) && defined(__LITTLE_ENDIAN) && defined(__BIG_ENDIAN)
# define GIT_BYTE_ORDER __BYTE_ORDER
@@ -116,7 +73,13 @@ static inline uint64_t git_bswap64(uint64_t x)
# define GIT_LITTLE_ENDIAN LITTLE_ENDIAN
# define GIT_BIG_ENDIAN BIG_ENDIAN
-#else
+#elif defined(__BYTE_ORDER__) && defined(__ORDER_LITTLE_ENDIAN__) && defined(__ORDER_BIG_ENDIAN__)
+
+# define GIT_BYTE_ORDER __BYTE_ORDER__
+# define GIT_LITTLE_ENDIAN __ORDER_LITTLE_ENDIAN__
+# define GIT_BIG_ENDIAN __ORDER_BIG_ENDIAN__
+
+#elif !defined(GIT_BYTE_ORDER)
# define GIT_BIG_ENDIAN 4321
# define GIT_LITTLE_ENDIAN 1234
@@ -135,14 +98,33 @@ static inline uint64_t git_bswap64(uint64_t x)
#endif
+#undef ntohl
+#undef htonl
+#undef ntohll
+#undef htonll
+
#if GIT_BYTE_ORDER == GIT_BIG_ENDIAN
-# define ntohll(n) (n)
-# define htonll(n) (n)
+# define ntohl(x) (x)
+# define htonl(x) (x)
+# define ntohll(x) (x)
+# define htonll(x) (x)
#else
-# define ntohll(n) default_bswap64(n)
-# define htonll(n) default_bswap64(n)
-#endif
+# if defined(bswap32)
+# define ntohl(x) bswap32(x)
+# define htonl(x) bswap32(x)
+# else
+# define ntohl(x) default_swab32(x)
+# define htonl(x) default_swab32(x)
+# endif
+
+# if defined(bswap64)
+# define ntohll(x) bswap64(x)
+# define htonll(x) bswap64(x)
+# else
+# define ntohll(x) default_bswap64(x)
+# define htonll(x) default_bswap64(x)
+# endif
#endif
static inline uint16_t get_be16(const void *ptr)
diff --git a/compat/mingw-posix.h b/compat/mingw-posix.h
index 88e0cf9292..631a208684 100644
--- a/compat/mingw-posix.h
+++ b/compat/mingw-posix.h
@@ -96,6 +96,7 @@ struct sigaction {
unsigned sa_flags;
};
#define SA_RESTART 0
+#define SA_NOCLDSTOP 1
struct itimerval {
struct timeval it_value, it_interval;
diff --git a/compat/mingw.c b/compat/mingw.c
index 8a9972a1ca..5d69ae32f4 100644
--- a/compat/mingw.c
+++ b/compat/mingw.c
@@ -2561,7 +2561,9 @@ int setitimer(int type UNUSED, struct itimerval *in, struct itimerval *out)
int sigaction(int sig, struct sigaction *in, struct sigaction *out)
{
- if (sig != SIGALRM)
+ if (sig == SIGCHLD)
+ return -1;
+ else if (sig != SIGALRM)
return errno = EINVAL,
error("sigaction only implemented for SIGALRM");
if (out)
diff --git a/config.c b/config.c
index b18b5617fc..8a2d0b7916 100644
--- a/config.c
+++ b/config.c
@@ -31,7 +31,7 @@
#include "hashmap.h"
#include "string-list.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "pager.h"
#include "path.h"
#include "utf8.h"
@@ -56,7 +56,6 @@ struct config_source {
} u;
enum config_origin_type origin_type;
const char *name;
- const char *path;
enum config_error_action default_error_action;
int linenr;
int eof;
@@ -173,14 +172,14 @@ static int handle_path_include(const struct key_value_info *kvi,
if (!is_absolute_path(path)) {
char *slash;
- if (!kvi || !kvi->path) {
+ if (!kvi || kvi->origin_type != CONFIG_ORIGIN_FILE) {
ret = error(_("relative config includes must come from files"));
goto cleanup;
}
- slash = find_last_dir_sep(kvi->path);
+ slash = find_last_dir_sep(kvi->filename);
if (slash)
- strbuf_add(&buf, kvi->path, slash - kvi->path + 1);
+ strbuf_add(&buf, kvi->filename, slash - kvi->filename + 1);
strbuf_addstr(&buf, path);
path = buf.buf;
}
@@ -224,11 +223,11 @@ static int prepare_include_condition_pattern(const struct key_value_info *kvi,
if (pat->buf[0] == '.' && is_dir_sep(pat->buf[1])) {
const char *slash;
- if (!kvi || !kvi->path)
+ if (!kvi || kvi->origin_type != CONFIG_ORIGIN_FILE)
return error(_("relative config include "
"conditionals must come from files"));
- strbuf_realpath(&path, kvi->path, 1);
+ strbuf_realpath(&path, kvi->filename, 1);
slash = find_last_dir_sep(path.buf);
if (!slash)
BUG("how is this possible?");
@@ -633,7 +632,6 @@ void kvi_from_param(struct key_value_info *out)
out->linenr = -1;
out->origin_type = CONFIG_ORIGIN_CMDLINE;
out->scope = CONFIG_SCOPE_COMMAND;
- out->path = NULL;
}
int git_config_parse_parameter(const char *text,
@@ -1036,7 +1034,6 @@ static void kvi_from_source(struct config_source *cs,
out->origin_type = cs->origin_type;
out->linenr = cs->linenr;
out->scope = scope;
- out->path = cs->path;
}
static int git_parse_source(struct config_source *cs, config_fn_t fn,
@@ -1537,9 +1534,11 @@ static int git_default_core_config(const char *var, const char *value,
!strcmp(var, "core.commentstring")) {
if (!value)
return config_error_nonbool(var);
- else if (!strcasecmp(value, "auto"))
+ else if (!strcasecmp(value, "auto")) {
auto_comment_line_char = 1;
- else if (value[0]) {
+ FREE_AND_NULL(comment_line_str_to_free);
+ comment_line_str = "#";
+ } else if (value[0]) {
if (strchr(value, '\n'))
return error(_("%s cannot contain newline"), var);
comment_line_str = value;
@@ -1595,11 +1594,6 @@ static int git_default_core_config(const char *var, const char *value,
return 0;
}
- if (!strcmp(var, "core.preloadindex")) {
- core_preload_index = git_config_bool(var, value);
- return 0;
- }
-
if (!strcmp(var, "core.createobject")) {
if (!value)
return config_error_nonbool(var);
@@ -1855,17 +1849,19 @@ static int do_config_from(struct config_source *top, config_fn_t fn,
static int do_config_from_file(config_fn_t fn,
const enum config_origin_type origin_type,
- const char *name, const char *path, FILE *f,
- void *data, enum config_scope scope,
+ const char *name, FILE *f, void *data,
+ enum config_scope scope,
const struct config_options *opts)
{
struct config_source top = CONFIG_SOURCE_INIT;
int ret;
+ if (origin_type == CONFIG_ORIGIN_FILE && (!name || !*name))
+ BUG("missing filename for CONFIG_ORIGIN_FILE");
+
top.u.file = f;
top.origin_type = origin_type;
top.name = name;
- top.path = path;
top.default_error_action = CONFIG_ERROR_DIE;
top.do_fgetc = config_file_fgetc;
top.do_ungetc = config_file_ungetc;
@@ -1880,8 +1876,8 @@ static int do_config_from_file(config_fn_t fn,
static int git_config_from_stdin(config_fn_t fn, void *data,
enum config_scope scope)
{
- return do_config_from_file(fn, CONFIG_ORIGIN_STDIN, "", NULL, stdin,
- data, scope, NULL);
+ return do_config_from_file(fn, CONFIG_ORIGIN_STDIN, "", stdin, data,
+ scope, NULL);
}
int git_config_from_file_with_options(config_fn_t fn, const char *filename,
@@ -1896,7 +1892,7 @@ int git_config_from_file_with_options(config_fn_t fn, const char *filename,
f = fopen_or_warn(filename, "r");
if (f) {
ret = do_config_from_file(fn, CONFIG_ORIGIN_FILE, filename,
- filename, f, data, scope, opts);
+ f, data, scope, opts);
fclose(f);
}
return ret;
@@ -1921,7 +1917,6 @@ int git_config_from_mem(config_fn_t fn,
top.u.buf.pos = 0;
top.origin_type = origin_type;
top.name = name;
- top.path = NULL;
top.default_error_action = CONFIG_ERROR_ERROR;
top.do_fgetc = config_buf_fgetc;
top.do_ungetc = config_buf_ungetc;
@@ -1942,7 +1937,7 @@ int git_config_from_blob_oid(config_fn_t fn,
unsigned long size;
int ret;
- buf = repo_read_object_file(repo, oid, &type, &size);
+ buf = odb_read_object(repo->objects, oid, &type, &size);
if (!buf)
return error(_("unable to load config blob object '%s'"), name);
if (type != OBJ_BLOB) {
@@ -2940,7 +2935,7 @@ static ssize_t write_pair(int fd, const char *key, const char *value,
if (value[0] == ' ')
quote = "\"";
for (i = 0; value[i]; i++)
- if (value[i] == ';' || value[i] == '#')
+ if (value[i] == ';' || value[i] == '#' || value[i] == '\r')
quote = "\"";
if (i && value[i - 1] == ' ')
quote = "\"";
diff --git a/config.h b/config.h
index 29a0277483..cbb0f4fddc 100644
--- a/config.h
+++ b/config.h
@@ -122,14 +122,12 @@ struct key_value_info {
int linenr;
enum config_origin_type origin_type;
enum config_scope scope;
- const char *path;
};
#define KVI_INIT { \
.filename = NULL, \
.linenr = -1, \
.origin_type = CONFIG_ORIGIN_UNKNOWN, \
.scope = CONFIG_SCOPE_UNKNOWN, \
- .path = NULL, \
}
/* Captures additional information that a config callback can use. */
diff --git a/config.mak.uname b/config.mak.uname
index 3e26bb074a..1691c6ae6e 100644
--- a/config.mak.uname
+++ b/config.mak.uname
@@ -190,9 +190,6 @@ ifeq ($(uname_S),SunOS)
SHELL_PATH = /bin/bash
SANE_TOOL_PATH = /usr/xpg6/bin:/usr/xpg4/bin
HAVE_ALLOCA_H = YesPlease
- NO_STRCASESTR = YesPlease
- NO_MEMMEM = YesPlease
- NO_MKDTEMP = YesPlease
NO_REGEX = YesPlease
NO_MSGFMT_EXTENDED_OPTIONS = YesPlease
HAVE_DEV_TTY = YesPlease
@@ -202,7 +199,10 @@ ifeq ($(uname_S),SunOS)
NO_IPV6 = YesPlease
NO_SOCKADDR_STORAGE = YesPlease
NO_UNSETENV = YesPlease
+ NO_MKDTEMP = YesPlease
+ NO_MEMMEM = YesPlease
NO_SETENV = YesPlease
+ NO_STRCASESTR = YesPlease
NO_STRLCPY = YesPlease
NO_STRTOUMAX = YesPlease
GIT_TEST_CMP = cmp
@@ -212,23 +212,45 @@ ifeq ($(uname_S),SunOS)
NO_IPV6 = YesPlease
NO_SOCKADDR_STORAGE = YesPlease
NO_UNSETENV = YesPlease
+ NO_MKDTEMP = YesPlease
+ NO_MEMMEM = YesPlease
NO_SETENV = YesPlease
+ NO_STRCASESTR = YesPlease
NO_STRLCPY = YesPlease
NO_STRTOUMAX = YesPlease
GIT_TEST_CMP = cmp
endif
ifeq ($(uname_R),5.8)
NO_UNSETENV = YesPlease
+ NO_MKDTEMP = YesPlease
+ NO_MEMMEM = YesPlease
NO_SETENV = YesPlease
+ NO_STRCASESTR = YesPlease
NO_STRTOUMAX = YesPlease
GIT_TEST_CMP = cmp
endif
ifeq ($(uname_R),5.9)
NO_UNSETENV = YesPlease
+ NO_MKDTEMP = YesPlease
+ NO_MEMMEM = YesPlease
NO_SETENV = YesPlease
+ NO_STRCASESTR = YesPlease
NO_STRTOUMAX = YesPlease
GIT_TEST_CMP = cmp
endif
+ ifeq ($(uname_R),5.10)
+ NO_UNSETENV = YesPlease
+ NO_MKDTEMP = YesPlease
+ NO_MEMMEM = YesPlease
+ NO_SETENV = YesPlease
+ NO_STRCASESTR = YesPlease
+ GIT_TEST_CMP = cmp
+ endif
+ ifeq ($(uname_R),5.11)
+ NO_UNSETENV = YesPlease
+ NO_SETENV = YesPlease
+ GIT_TEST_CMP = cmp
+ endif
INSTALL = /usr/ucb/install
TAR = gtar
BASIC_CFLAGS += -D__EXTENSIONS__ -D__sun__
@@ -280,16 +302,13 @@ ifeq ($(uname_S),FreeBSD)
ifeq ($(firstword $(subst -, ,$(uname_R))),10.1)
OLD_ICONV = YesPlease
endif
- NO_MEMMEM = YesPlease
+ ifeq ($(shell v=$(uname_R) && test $${v%%.*} -lt 12 && echo 1),1)
+ NO_MEMMEM = UnfortunatelyYes
+ endif
BASIC_CFLAGS += -I/usr/local/include
BASIC_LDFLAGS += -L/usr/local/lib
DIR_HAS_BSD_GROUP_SEMANTICS = YesPlease
USE_ST_TIMESPEC = YesPlease
- ifeq ($(shell expr "$(uname_R)" : '4\.'),2)
- PTHREAD_LIBS = -pthread
- NO_UINTMAX_T = YesPlease
- NO_STRTOUMAX = YesPlease
- endif
PYTHON_PATH = /usr/local/bin/python
PERL_PATH = /usr/local/bin/perl
HAVE_PATHS_H = YesPlease
diff --git a/configure.ac b/configure.ac
index f6caab919a..cfb50112bf 100644
--- a/configure.ac
+++ b/configure.ac
@@ -1068,32 +1068,6 @@ AC_CHECK_LIB([iconv], [locale_charset],
GIT_CONF_SUBST([CHARSET_LIB])
#
-# Define HAVE_SYSINFO=YesPlease if sysinfo is available.
-#
-AC_DEFUN([HAVE_SYSINFO_SRC], [
-AC_LANG_PROGRAM([[
-#include <stdint.h>
-#include <sys/sysinfo.h>
-]], [[
-struct sysinfo si;
-uint64_t t = 0;
-if (!sysinfo(&si)) {
- t = si.totalram;
- if (si.mem_unit > 1)
- t *= (uint64_t)si.mem_unit;
-}
-return t;
-]])])
-
-AC_MSG_CHECKING([for sysinfo])
-AC_COMPILE_IFELSE([HAVE_SYSINFO_SRC],
- [AC_MSG_RESULT([yes])
- HAVE_SYSINFO=YesPlease],
- [AC_MSG_RESULT([no])
- HAVE_SYSINFO=])
-GIT_CONF_SUBST([HAVE_SYSINFO])
-
-#
# Define HAVE_CLOCK_GETTIME=YesPlease if clock_gettime is available.
GIT_CHECK_FUNC(clock_gettime,
[HAVE_CLOCK_GETTIME=YesPlease],
@@ -1148,14 +1122,6 @@ GIT_CHECK_FUNC(strlcpy,
[NO_STRLCPY=YesPlease])
GIT_CONF_SUBST([NO_STRLCPY])
#
-# Define NO_UINTMAX_T if your platform does not have uintmax_t
-AC_CHECK_TYPE(uintmax_t,
-[NO_UINTMAX_T=],
-[NO_UINTMAX_T=YesPlease],[
-#include <inttypes.h>
-])
-GIT_CONF_SUBST([NO_UINTMAX_T])
-#
# Define NO_STRTOUMAX if you don't have strtoumax in the C library.
GIT_CHECK_FUNC(strtoumax,
[NO_STRTOUMAX=],
@@ -1221,6 +1187,41 @@ AC_COMPILE_IFELSE([BSD_SYSCTL_SRC],
HAVE_BSD_SYSCTL=])
GIT_CONF_SUBST([HAVE_BSD_SYSCTL])
+#
+# Define HAVE_SYSINFO=YesPlease if sysinfo is available.
+#
+
+HAVE_SYSINFO=
+# on a *BSD system, sysctl() takes precedence over the
+# sysinfo() compatibility library (if installed).
+
+if test -z "$HAVE_BSD_SYSCTL"; then
+
+ AC_DEFUN([HAVE_SYSINFO_SRC], [
+ AC_LANG_PROGRAM([[
+ #include <stdint.h>
+ #include <sys/sysinfo.h>
+ ]], [[
+ struct sysinfo si;
+ uint64_t t = 0;
+ if (!sysinfo(&si)) {
+ t = si.totalram;
+ if (si.mem_unit > 1)
+ t *= (uint64_t)si.mem_unit;
+ }
+ return t;
+ ]])])
+
+ AC_MSG_CHECKING([for sysinfo])
+ AC_COMPILE_IFELSE([HAVE_SYSINFO_SRC],
+ [AC_MSG_RESULT([yes])
+ HAVE_SYSINFO=YesPlease],
+ [AC_MSG_RESULT([no])
+ HAVE_SYSINFO=])
+ GIT_CONF_SUBST([HAVE_SYSINFO])
+
+fi
+
## Other checks.
# Define NO_SYMLINK_HEAD if you never want .git/HEAD to be a symbolic link.
# Enable it on Windows. By default, symrefs are still used.
diff --git a/connect.c b/connect.c
index 3280435331..e77287f426 100644
--- a/connect.c
+++ b/connect.c
@@ -251,7 +251,7 @@ static void process_capabilities(struct packet_reader *reader, size_t *linelen)
reader->hash_algo = &hash_algos[hash_algo];
free(hash_name);
} else {
- reader->hash_algo = &hash_algos[GIT_HASH_SHA1];
+ reader->hash_algo = &hash_algos[GIT_HASH_SHA1_LEGACY];
}
}
@@ -500,7 +500,7 @@ static void send_capabilities(int fd_out, struct packet_reader *reader)
reader->hash_algo = &hash_algos[hash_algo];
packet_write_fmt(fd_out, "object-format=%s", reader->hash_algo->name);
} else {
- reader->hash_algo = &hash_algos[GIT_HASH_SHA1];
+ reader->hash_algo = &hash_algos[GIT_HASH_SHA1_LEGACY];
}
if (server_feature_v2("promisor-remote", &promisor_remote_info)) {
char *reply = promisor_remote_reply(promisor_remote_info);
@@ -665,7 +665,7 @@ int server_supports_hash(const char *desired, int *feature_supported)
if (feature_supported)
*feature_supported = !!hash;
if (!hash) {
- hash = hash_algos[GIT_HASH_SHA1].name;
+ hash = hash_algos[GIT_HASH_SHA1_LEGACY].name;
len = strlen(hash);
}
while (hash) {
diff --git a/connected.c b/connected.c
index 4415388beb..18c13245d8 100644
--- a/connected.c
+++ b/connected.c
@@ -3,7 +3,7 @@
#include "git-compat-util.h"
#include "gettext.h"
#include "hex.h"
-#include "object-store.h"
+#include "odb.h"
#include "run-command.h"
#include "sigchain.h"
#include "connected.h"
diff --git a/contrib/coccinelle/commit.cocci b/contrib/coccinelle/commit.cocci
index af6dd4c20c..c5284604c5 100644
--- a/contrib/coccinelle/commit.cocci
+++ b/contrib/coccinelle/commit.cocci
@@ -25,7 +25,8 @@ expression s;
// functions, then the recommended transformation will be bogus with
// repo_get_commit_tree() on the LHS.
@@
-identifier f !~ "^(repo_get_commit_tree|get_commit_tree_in_graph_one|load_tree_for_commit|set_commit_tree)$";
+identifier f != { repo_get_commit_tree, get_commit_tree_in_graph_one,
+ load_tree_for_commit, set_commit_tree };
expression c;
@@
f(...) {<...
diff --git a/contrib/coccinelle/the_repository.cocci b/contrib/coccinelle/the_repository.cocci
index 765ad68967..ea7fe1c8db 100644
--- a/contrib/coccinelle/the_repository.cocci
+++ b/contrib/coccinelle/the_repository.cocci
@@ -77,7 +77,7 @@
|
- diff_setup
+ repo_diff_setup
-// object-store.h
+// odb.h
|
- read_object_file
+ repo_read_object_file
diff --git a/contrib/credential/netrc/git-credential-netrc.perl b/contrib/credential/netrc/git-credential-netrc.perl
index 9fb998ae09..3c0a532d0e 100755
--- a/contrib/credential/netrc/git-credential-netrc.perl
+++ b/contrib/credential/netrc/git-credential-netrc.perl
@@ -1,4 +1,4 @@
-#!/usr/bin/perl
+#!/usr/bin/env perl
use strict;
use warnings;
@@ -267,8 +267,16 @@ sub load_netrc {
if (!defined $nentry->{machine}) {
next;
}
- if (defined $nentry->{port} && $nentry->{port} =~ m/^\d+$/) {
- $num_port = $nentry->{port};
+ if (defined $nentry->{port}) {
+ $num_port = Git::port_num($nentry->{port});
+ unless ($num_port) {
+ printf(STDERR "ignoring invalid port `%s' " .
+ "from netrc file\n", $nentry->{port});
+ }
+ # Since we've already validated and converted
+ # the port to its numerical value, do not
+ # capture it as the `protocol' value, as used
+ # to be the case for symbolic port names.
delete $nentry->{port};
}
diff --git a/contrib/credential/netrc/meson.build b/contrib/credential/netrc/meson.build
index 3d74547c8a..16fa69e317 100644
--- a/contrib/credential/netrc/meson.build
+++ b/contrib/credential/netrc/meson.build
@@ -17,6 +17,6 @@ if get_option('tests')
workdir: meson.current_source_dir(),
env: credential_netrc_testenv,
depends: test_dependencies + bin_wrappers + [credential_netrc],
- timeout: 0,
+ kwargs: test_kwargs,
)
endif
diff --git a/contrib/credential/netrc/test.pl b/contrib/credential/netrc/test.pl
index 67a0ede564..8a7fc2588a 100755
--- a/contrib/credential/netrc/test.pl
+++ b/contrib/credential/netrc/test.pl
@@ -45,7 +45,7 @@ chmod 0600, $netrc;
diag "Testing with invalid data\n";
$cred = run_credential(['-f', $netrc, 'get'],
"bad data");
-ok(scalar keys %$cred == 4, "Got first found keys with bad data");
+ok(scalar keys %$cred == 3, "Got first found keys with bad data");
diag "Testing netrc file for a missing corovamilkbar entry\n";
$cred = run_credential(['-f', $netrc, 'get'],
@@ -64,12 +64,12 @@ is($cred->{username}, 'carol', "Got correct Github username");
diag "Testing netrc file for a username-specific entry\n";
$cred = run_credential(['-f', $netrc, 'get'],
- { host => 'imap', username => 'bob' });
+ { host => 'imap:993', username => 'bob' });
-ok(scalar keys %$cred == 2, "Got 2 username-specific keys");
+# Only the password field gets returned.
+ok(scalar keys %$cred == 1, "Got 1 username-specific keys");
is($cred->{password}, 'bobwillknow', "Got correct user-specific password");
-is($cred->{protocol}, 'imaps', "Got correct user-specific protocol");
diag "Testing netrc file for a host:port-specific entry\n";
$cred = run_credential(['-f', $netrc, 'get'],
diff --git a/contrib/credential/wincred/git-credential-wincred.c b/contrib/credential/wincred/git-credential-wincred.c
index 04145b5118..5683846b4b 100644
--- a/contrib/credential/wincred/git-credential-wincred.c
+++ b/contrib/credential/wincred/git-credential-wincred.c
@@ -39,6 +39,14 @@ static void *xmalloc(size_t size)
static WCHAR *wusername, *password, *protocol, *host, *path, target[1024],
*password_expiry_utc, *oauth_refresh_token;
+static void target_append(const WCHAR *src)
+{
+ size_t avail = ARRAY_SIZE(target) - wcslen(target) - 1; /* -1 for NUL */
+ if (avail < wcslen(src))
+ die("target buffer overflow");
+ wcsncat(target, src, avail);
+}
+
static void write_item(const char *what, LPCWSTR wbuf, int wlen)
{
char *buf;
@@ -330,17 +338,17 @@ int main(int argc, char *argv[])
/* prepare 'target', the unique key for the credential */
wcscpy(target, L"git:");
- wcsncat(target, protocol, ARRAY_SIZE(target));
- wcsncat(target, L"://", ARRAY_SIZE(target));
+ target_append(protocol);
+ target_append(L"://");
if (wusername) {
- wcsncat(target, wusername, ARRAY_SIZE(target));
- wcsncat(target, L"@", ARRAY_SIZE(target));
+ target_append(wusername);
+ target_append(L"@");
}
if (host)
- wcsncat(target, host, ARRAY_SIZE(target));
+ target_append(host);
if (path) {
- wcsncat(target, L"/", ARRAY_SIZE(target));
- wcsncat(target, path, ARRAY_SIZE(target));
+ target_append(L"/");
+ target_append(path);
}
if (!strcmp(argv[1], "get"))
diff --git a/contrib/emacs/README b/contrib/emacs/README
deleted file mode 100644
index 977a16f1e3..0000000000
--- a/contrib/emacs/README
+++ /dev/null
@@ -1,33 +0,0 @@
-This directory used to contain various modules for Emacs support.
-
-These were added shortly after Git was first released. Since then
-Emacs's own support for Git got better than what was offered by these
-modes. There are also popular 3rd-party Git modes such as Magit which
-offer replacements for these.
-
-The following modules were available, and can be dug up from the Git
-history:
-
-* git.el:
-
- Wrapper for "git status" that provided access to other git commands.
-
- Modern alternatives to this include Magit, and VC mode that ships
- with Emacs.
-
-* git-blame.el:
-
- A wrapper for "git blame" written before Emacs's own vc-annotate
- mode learned to invoke git-blame, which can be done via C-x v g.
-
-* vc-git.el:
-
- This file used to contain the VC-mode backend for git, but it is no
- longer distributed with git. It is now maintained as part of Emacs
- and included in standard Emacs distributions starting from version
- 22.2.
-
- If you have an earlier Emacs version, upgrading to Emacs 22 is
- recommended, since the VC mode in older Emacs is not generic enough
- to be able to support git in a reasonable manner, and no attempt has
- been made to backport vc-git.el.
diff --git a/contrib/emacs/git-blame.el b/contrib/emacs/git-blame.el
deleted file mode 100644
index 6a8a2b8ff1..0000000000
--- a/contrib/emacs/git-blame.el
+++ /dev/null
@@ -1,6 +0,0 @@
-(error "git-blame.el no longer ships with git. It's recommended
-to replace its use with Emacs's own vc-annotate. See
-contrib/emacs/README in git's
-sources (https://github.com/git/git/blob/master/contrib/emacs/README)
-for more info on suggested alternatives and for why this
-happened.")
diff --git a/contrib/emacs/git.el b/contrib/emacs/git.el
deleted file mode 100644
index 03f926281f..0000000000
--- a/contrib/emacs/git.el
+++ /dev/null
@@ -1,6 +0,0 @@
-(error "git.el no longer ships with git. It's recommended to
-replace its use with Magit, or simply delete references to git.el
-in your initialization file(s). See contrib/emacs/README in git's
-sources (https://github.com/git/git/blob/master/contrib/emacs/README)
-for suggested alternatives and for why this happened. Emacs's own
-VC mode and Magit are viable alternatives.")
diff --git a/contrib/examples/README b/contrib/examples/README
deleted file mode 100644
index 18bc60b021..0000000000
--- a/contrib/examples/README
+++ /dev/null
@@ -1,20 +0,0 @@
-This directory used to contain scripted implementations of builtins
-that have since been rewritten in C.
-
-They have now been removed, but can be retrieved from an older commit
-that removed them from this directory.
-
-They're interesting for their reference value to any aspiring plumbing
-users who want to learn how pieces can be fit together, but in many
-cases have drifted enough from the actual implementations Git uses to
-be instructive.
-
-Other things that can be useful:
-
- * Some commands such as git-gc wrap other commands, and what they're
- doing behind the scenes can be seen by running them under
- GIT_TRACE=1
-
- * Doing `git log` on paths matching '*--helper.c' will show
- incremental effort in the direction of moving existing shell
- scripts to C.
diff --git a/contrib/git-resurrect.sh b/contrib/git-resurrect.sh
deleted file mode 100755
index d843df3afd..0000000000
--- a/contrib/git-resurrect.sh
+++ /dev/null
@@ -1,181 +0,0 @@
-#!/bin/sh
-
-USAGE="[-a] [-r] [-m] [-t] [-n] [-b <newname>] <name>"
-LONG_USAGE="git-resurrect attempts to find traces of a branch tip
-called <name>, and tries to resurrect it. Currently, the reflog is
-searched for checkout messages, and with -r also merge messages. With
--m and -t, the history of all refs is scanned for Merge <name> into
-other/Merge <other> into <name> (respectively) commit subjects, which
-is rather slow but allows you to resurrect other people's topic
-branches."
-
-OPTIONS_KEEPDASHDASH=
-OPTIONS_STUCKLONG=
-OPTIONS_SPEC="\
-git resurrect $USAGE
---
-b,branch= save branch as <newname> instead of <name>
-a,all same as -l -r -m -t
-k,keep-going full rev-list scan (instead of first match)
-l,reflog scan reflog for checkouts (enabled by default)
-r,reflog-merges scan for merges recorded in reflog
-m,merges scan for merges into other branches (slow)
-t,merge-targets scan for merges of other branches into <name>
-n,dry-run don't recreate the branch"
-
-. git-sh-setup
-
-search_reflog () {
- sed -ne 's~^\([^ ]*\) .* checkout: moving from '"$1"' .*~\1~p' \
- < "$GIT_DIR"/logs/HEAD
-}
-
-search_reflog_merges () {
- git rev-parse $(
- sed -ne 's~^[^ ]* \([^ ]*\) .* merge '"$1"':.*~\1^2~p' \
- < "$GIT_DIR"/logs/HEAD
- )
-}
-
-oid_pattern=$(git hash-object --stdin </dev/null | sed -e 's/./[0-9a-f]/g')
-
-search_merges () {
- git rev-list --all --grep="Merge branch '$1'" \
- --pretty=tformat:"%P %s" |
- sed -ne "/^$oid_pattern \($oid_pattern\) Merge .*/ {s//\1/p;$early_exit}"
-}
-
-search_merge_targets () {
- git rev-list --all --grep="Merge branch '[^']*' into $branch\$" \
- --pretty=tformat:"%H %s" --all |
- sed -ne "/^\($oid_pattern\) Merge .*/ {s//\1/p;$early_exit} "
-}
-
-dry_run=
-early_exit=q
-scan_reflog=t
-scan_reflog_merges=
-scan_merges=
-scan_merge_targets=
-new_name=
-
-while test "$#" != 0; do
- case "$1" in
- -b|--branch)
- shift
- new_name="$1"
- ;;
- -n|--dry-run)
- dry_run=t
- ;;
- --no-dry-run)
- dry_run=
- ;;
- -k|--keep-going)
- early_exit=
- ;;
- --no-keep-going)
- early_exit=q
- ;;
- -m|--merges)
- scan_merges=t
- ;;
- --no-merges)
- scan_merges=
- ;;
- -l|--reflog)
- scan_reflog=t
- ;;
- --no-reflog)
- scan_reflog=
- ;;
- -r|--reflog_merges)
- scan_reflog_merges=t
- ;;
- --no-reflog_merges)
- scan_reflog_merges=
- ;;
- -t|--merge-targets)
- scan_merge_targets=t
- ;;
- --no-merge-targets)
- scan_merge_targets=
- ;;
- -a|--all)
- scan_reflog=t
- scan_reflog_merges=t
- scan_merges=t
- scan_merge_targets=t
- ;;
- --)
- shift
- break
- ;;
- *)
- usage
- ;;
- esac
- shift
-done
-
-test "$#" = 1 || usage
-
-all_strategies="$scan_reflog$scan_reflog_merges$scan_merges$scan_merge_targets"
-if test -z "$all_strategies"; then
- die "must enable at least one of -lrmt"
-fi
-
-branch="$1"
-test -z "$new_name" && new_name="$branch"
-
-if test ! -z "$scan_reflog"; then
- if test -r "$GIT_DIR"/logs/HEAD; then
- candidates="$(search_reflog $branch)"
- else
- die 'reflog scanning requested, but' \
- '$GIT_DIR/logs/HEAD not readable'
- fi
-fi
-if test ! -z "$scan_reflog_merges"; then
- if test -r "$GIT_DIR"/logs/HEAD; then
- candidates="$candidates $(search_reflog_merges $branch)"
- else
- die 'reflog scanning requested, but' \
- '$GIT_DIR/logs/HEAD not readable'
- fi
-fi
-if test ! -z "$scan_merges"; then
- candidates="$candidates $(search_merges $branch)"
-fi
-if test ! -z "$scan_merge_targets"; then
- candidates="$candidates $(search_merge_targets $branch)"
-fi
-
-candidates="$(git rev-parse $candidates | sort -u)"
-
-if test -z "$candidates"; then
- hint=
- test "z$all_strategies" != "ztttt" \
- && hint=" (maybe try again with -a)"
- die "no candidates for $branch found$hint"
-fi
-
-echo "** Candidates for $branch **"
-for cmt in $candidates; do
- git --no-pager log --pretty=tformat:"%ct:%h [%cr] %s" --abbrev-commit -1 $cmt
-done \
-| sort -n | cut -d: -f2-
-
-newest="$(git rev-list -1 $candidates)"
-if test ! -z "$dry_run"; then
- printf "** Most recent: "
- git --no-pager log -1 --pretty=tformat:"%h %s" $newest
-elif ! git rev-parse --verify --quiet $new_name >/dev/null; then
- printf "** Restoring $new_name to "
- git --no-pager log -1 --pretty=tformat:"%h %s" $newest
- git branch $new_name $newest
-else
- printf "Most recent: "
- git --no-pager log -1 --pretty=tformat:"%h %s" $newest
- echo "** $new_name already exists, doing nothing"
-fi
diff --git a/contrib/hooks/multimail/README.Git b/contrib/hooks/multimail/README.Git
deleted file mode 100644
index c427efc7bd..0000000000
--- a/contrib/hooks/multimail/README.Git
+++ /dev/null
@@ -1,7 +0,0 @@
-git-multimail is developed as an independent project at the following
-website:
-
- https://github.com/git-multimail/git-multimail
-
-Please refer to that project page for information about how to report
-bugs or contribute to git-multimail.
diff --git a/contrib/hooks/post-receive-email b/contrib/hooks/post-receive-email
deleted file mode 100755
index ff565eb3d8..0000000000
--- a/contrib/hooks/post-receive-email
+++ /dev/null
@@ -1,759 +0,0 @@
-#!/bin/sh
-#
-# Copyright (c) 2007 Andy Parkins
-#
-# An example hook script to mail out commit update information.
-#
-# NOTE: This script is no longer under active development. There
-# is another script, git-multimail, which is more capable and
-# configurable and is largely backwards-compatible with this script;
-# please see "contrib/hooks/multimail/". For instructions on how to
-# migrate from post-receive-email to git-multimail, please see
-# "README.migrate-from-post-receive-email" in that directory.
-#
-# This hook sends emails listing new revisions to the repository
-# introduced by the change being reported. The rule is that (for
-# branch updates) each commit will appear on one email and one email
-# only.
-#
-# This hook is stored in the contrib/hooks directory. Your distribution
-# will have put this somewhere standard. You should make this script
-# executable then link to it in the repository you would like to use it in.
-# For example, on debian the hook is stored in
-# /usr/share/git-core/contrib/hooks/post-receive-email:
-#
-# cd /path/to/your/repository.git
-# ln -sf /usr/share/git-core/contrib/hooks/post-receive-email hooks/post-receive
-#
-# This hook script assumes it is enabled on the central repository of a
-# project, with all users pushing only to it and not between each other. It
-# will still work if you don't operate in that style, but it would become
-# possible for the email to be from someone other than the person doing the
-# push.
-#
-# To help with debugging and use on pre-v1.5.1 git servers, this script will
-# also obey the interface of hooks/update, taking its arguments on the
-# command line. Unfortunately, hooks/update is called once for each ref.
-# To avoid firing one email per ref, this script just prints its output to
-# the screen when used in this mode. The output can then be redirected if
-# wanted.
-#
-# Config
-# ------
-# hooks.mailinglist
-# This is the list that all pushes will go to; leave it blank to not send
-# emails for every ref update.
-# hooks.announcelist
-# This is the list that all pushes of annotated tags will go to. Leave it
-# blank to default to the mailinglist field. The announce emails lists
-# the short log summary of the changes since the last annotated tag.
-# hooks.envelopesender
-# If set then the -f option is passed to sendmail to allow the envelope
-# sender address to be set
-# hooks.emailprefix
-# All emails have their subjects prefixed with this prefix, or "[SCM]"
-# if emailprefix is unset, to aid filtering
-# hooks.showrev
-# The shell command used to format each revision in the email, with
-# "%s" replaced with the commit id. Defaults to "git rev-list -1
-# --pretty %s", displaying the commit id, author, date and log
-# message. To list full patches separated by a blank line, you
-# could set this to "git show -C %s; echo".
-# To list a gitweb/cgit URL *and* a full patch for each change set, use this:
-# "t=%s; printf 'http://.../?id=%%s' \$t; echo;echo; git show -C \$t; echo"
-# Be careful if "..." contains things that will be expanded by shell "eval"
-# or printf.
-# hooks.emailmaxlines
-# The maximum number of lines that should be included in the generated
-# email body. If not specified, there is no limit.
-# Lines beyond the limit are suppressed and counted, and a final
-# line is added indicating the number of suppressed lines.
-# hooks.diffopts
-# Alternate options for the git diff-tree invocation that shows changes.
-# Default is "--stat --summary --find-copies-harder". Add -p to those
-# options to include a unified diff of changes in addition to the usual
-# summary output.
-#
-# Notes
-# -----
-# All emails include the headers "X-Git-Refname", "X-Git-Oldrev",
-# "X-Git-Newrev", and "X-Git-Reftype" to enable fine tuned filtering and
-# give information for debugging.
-#
-
-# ---------------------------- Functions
-
-#
-# Function to prepare for email generation. This decides what type
-# of update this is and whether an email should even be generated.
-#
-prep_for_email()
-{
- # --- Arguments
- oldrev=$(git rev-parse $1)
- newrev=$(git rev-parse $2)
- refname="$3"
-
- # --- Interpret
- # 0000->1234 (create)
- # 1234->2345 (update)
- # 2345->0000 (delete)
- if expr "$oldrev" : '0*$' >/dev/null
- then
- change_type="create"
- else
- if expr "$newrev" : '0*$' >/dev/null
- then
- change_type="delete"
- else
- change_type="update"
- fi
- fi
-
- # --- Get the revision types
- newrev_type=$(git cat-file -t $newrev 2> /dev/null)
- oldrev_type=$(git cat-file -t "$oldrev" 2> /dev/null)
- case "$change_type" in
- create|update)
- rev="$newrev"
- rev_type="$newrev_type"
- ;;
- delete)
- rev="$oldrev"
- rev_type="$oldrev_type"
- ;;
- esac
-
- # The revision type tells us what type the commit is, combined with
- # the location of the ref we can decide between
- # - working branch
- # - tracking branch
- # - unannoted tag
- # - annotated tag
- case "$refname","$rev_type" in
- refs/tags/*,commit)
- # un-annotated tag
- refname_type="tag"
- short_refname=${refname##refs/tags/}
- ;;
- refs/tags/*,tag)
- # annotated tag
- refname_type="annotated tag"
- short_refname=${refname##refs/tags/}
- # change recipients
- if [ -n "$announcerecipients" ]; then
- recipients="$announcerecipients"
- fi
- ;;
- refs/heads/*,commit)
- # branch
- refname_type="branch"
- short_refname=${refname##refs/heads/}
- ;;
- refs/remotes/*,commit)
- # tracking branch
- refname_type="tracking branch"
- short_refname=${refname##refs/remotes/}
- echo >&2 "*** Push-update of tracking branch, $refname"
- echo >&2 "*** - no email generated."
- return 1
- ;;
- *)
- # Anything else (is there anything else?)
- echo >&2 "*** Unknown type of update to $refname ($rev_type)"
- echo >&2 "*** - no email generated"
- return 1
- ;;
- esac
-
- # Check if we've got anyone to send to
- if [ -z "$recipients" ]; then
- case "$refname_type" in
- "annotated tag")
- config_name="hooks.announcelist"
- ;;
- *)
- config_name="hooks.mailinglist"
- ;;
- esac
- echo >&2 "*** $config_name is not set so no email will be sent"
- echo >&2 "*** for $refname update $oldrev->$newrev"
- return 1
- fi
-
- return 0
-}
-
-#
-# Top level email generation function. This calls the appropriate
-# body-generation routine after outputting the common header.
-#
-# Note this function doesn't actually generate any email output, that is
-# taken care of by the functions it calls:
-# - generate_email_header
-# - generate_create_XXXX_email
-# - generate_update_XXXX_email
-# - generate_delete_XXXX_email
-# - generate_email_footer
-#
-# Note also that this function cannot 'exit' from the script; when this
-# function is running (in hook script mode), the send_mail() function
-# is already executing in another process, connected via a pipe, and
-# if this function exits without, whatever has been generated to that
-# point will be sent as an email... even if nothing has been generated.
-#
-generate_email()
-{
- # Email parameters
- # The email subject will contain the best description of the ref
- # that we can build from the parameters
- describe=$(git describe $rev 2>/dev/null)
- if [ -z "$describe" ]; then
- describe=$rev
- fi
-
- generate_email_header
-
- # Call the correct body generation function
- fn_name=general
- case "$refname_type" in
- "tracking branch"|branch)
- fn_name=branch
- ;;
- "annotated tag")
- fn_name=atag
- ;;
- esac
-
- if [ -z "$maxlines" ]; then
- generate_${change_type}_${fn_name}_email
- else
- generate_${change_type}_${fn_name}_email | limit_lines $maxlines
- fi
-
- generate_email_footer
-}
-
-generate_email_header()
-{
- # --- Email (all stdout will be the email)
- # Generate header
- cat <<-EOF
- To: $recipients
- Subject: ${emailprefix}$projectdesc $refname_type $short_refname ${change_type}d. $describe
- MIME-Version: 1.0
- Content-Type: text/plain; charset=utf-8
- Content-Transfer-Encoding: 8bit
- X-Git-Refname: $refname
- X-Git-Reftype: $refname_type
- X-Git-Oldrev: $oldrev
- X-Git-Newrev: $newrev
- Auto-Submitted: auto-generated
-
- This is an automated email from the git hooks/post-receive script. It was
- generated because a ref change was pushed to the repository containing
- the project "$projectdesc".
-
- The $refname_type, $short_refname has been ${change_type}d
- EOF
-}
-
-generate_email_footer()
-{
- SPACE=" "
- cat <<-EOF
-
-
- hooks/post-receive
- --${SPACE}
- $projectdesc
- EOF
-}
-
-# --------------- Branches
-
-#
-# Called for the creation of a branch
-#
-generate_create_branch_email()
-{
- # This is a new branch and so oldrev is not valid
- echo " at $newrev ($newrev_type)"
- echo ""
-
- echo $LOGBEGIN
- show_new_revisions
- echo $LOGEND
-}
-
-#
-# Called for the change of a pre-existing branch
-#
-generate_update_branch_email()
-{
- # Consider this:
- # 1 --- 2 --- O --- X --- 3 --- 4 --- N
- #
- # O is $oldrev for $refname
- # N is $newrev for $refname
- # X is a revision pointed to by some other ref, for which we may
- # assume that an email has already been generated.
- # In this case we want to issue an email containing only revisions
- # 3, 4, and N. Given (almost) by
- #
- # git rev-list N ^O --not --all
- #
- # The reason for the "almost", is that the "--not --all" will take
- # precedence over the "N", and effectively will translate to
- #
- # git rev-list N ^O ^X ^N
- #
- # So, we need to build up the list more carefully. git rev-parse
- # will generate a list of revs that may be fed into git rev-list.
- # We can get it to make the "--not --all" part and then filter out
- # the "^N" with:
- #
- # git rev-parse --not --all | grep -v N
- #
- # Then, using the --stdin switch to git rev-list we have effectively
- # manufactured
- #
- # git rev-list N ^O ^X
- #
- # This leaves a problem when someone else updates the repository
- # while this script is running. Their new value of the ref we're
- # working on would be included in the "--not --all" output; and as
- # our $newrev would be an ancestor of that commit, it would exclude
- # all of our commits. What we really want is to exclude the current
- # value of $refname from the --not list, rather than N itself. So:
- #
- # git rev-parse --not --all | grep -v $(git rev-parse $refname)
- #
- # Gets us to something pretty safe (apart from the small time
- # between refname being read, and git rev-parse running - for that,
- # I give up)
- #
- #
- # Next problem, consider this:
- # * --- B --- * --- O ($oldrev)
- # \
- # * --- X --- * --- N ($newrev)
- #
- # That is to say, there is no guarantee that oldrev is a strict
- # subset of newrev (it would have required a --force, but that's
- # allowed). So, we can't simply say rev-list $oldrev..$newrev.
- # Instead we find the common base of the two revs and list from
- # there.
- #
- # As above, we need to take into account the presence of X; if
- # another branch is already in the repository and points at some of
- # the revisions that we are about to output - we don't want them.
- # The solution is as before: git rev-parse output filtered.
- #
- # Finally, tags: 1 --- 2 --- O --- T --- 3 --- 4 --- N
- #
- # Tags pushed into the repository generate nice shortlog emails that
- # summarise the commits between them and the previous tag. However,
- # those emails don't include the full commit messages that we output
- # for a branch update. Therefore we still want to output revisions
- # that have been output on a tag email.
- #
- # Luckily, git rev-parse includes just the tool. Instead of using
- # "--all" we use "--branches"; this has the added benefit that
- # "remotes/" will be ignored as well.
-
- # List all of the revisions that were removed by this update, in a
- # fast-forward update, this list will be empty, because rev-list O
- # ^N is empty. For a non-fast-forward, O ^N is the list of removed
- # revisions
- fast_forward=""
- rev=""
- for rev in $(git rev-list $newrev..$oldrev)
- do
- revtype=$(git cat-file -t "$rev")
- echo " discards $rev ($revtype)"
- done
- if [ -z "$rev" ]; then
- fast_forward=1
- fi
-
- # List all the revisions from baserev to newrev in a kind of
- # "table-of-contents"; note this list can include revisions that
- # have already had notification emails and is present to show the
- # full detail of the change from rolling back the old revision to
- # the base revision and then forward to the new revision
- for rev in $(git rev-list $oldrev..$newrev)
- do
- revtype=$(git cat-file -t "$rev")
- echo " via $rev ($revtype)"
- done
-
- if [ "$fast_forward" ]; then
- echo " from $oldrev ($oldrev_type)"
- else
- # 1. Existing revisions were removed. In this case newrev
- # is a subset of oldrev - this is the reverse of a
- # fast-forward, a rewind
- # 2. New revisions were added on top of an old revision,
- # this is a rewind and addition.
-
- # (1) certainly happened, (2) possibly. When (2) hasn't
- # happened, we set a flag to indicate that no log printout
- # is required.
-
- echo ""
-
- # Find the common ancestor of the old and new revisions and
- # compare it with newrev
- baserev=$(git merge-base $oldrev $newrev)
- rewind_only=""
- if [ "$baserev" = "$newrev" ]; then
- echo "This update discarded existing revisions and left the branch pointing at"
- echo "a previous point in the repository history."
- echo ""
- echo " * -- * -- N ($newrev)"
- echo " \\"
- echo " O -- O -- O ($oldrev)"
- echo ""
- echo "The removed revisions are not necessarily gone - if another reference"
- echo "still refers to them they will stay in the repository."
- rewind_only=1
- else
- echo "This update added new revisions after undoing existing revisions. That is"
- echo "to say, the old revision is not a strict subset of the new revision. This"
- echo "situation occurs when you --force push a change and generate a repository"
- echo "containing something like this:"
- echo ""
- echo " * -- * -- B -- O -- O -- O ($oldrev)"
- echo " \\"
- echo " N -- N -- N ($newrev)"
- echo ""
- echo "When this happens we assume that you've already had alert emails for all"
- echo "of the O revisions, and so we here report only the revisions in the N"
- echo "branch from the common base, B."
- fi
- fi
-
- echo ""
- if [ -z "$rewind_only" ]; then
- echo "Those revisions listed above that are new to this repository have"
- echo "not appeared on any other notification email; so we list those"
- echo "revisions in full, below."
-
- echo ""
- echo $LOGBEGIN
- show_new_revisions
-
- # XXX: Need a way of detecting whether git rev-list actually
- # outputted anything, so that we can issue a "no new
- # revisions added by this update" message
-
- echo $LOGEND
- else
- echo "No new revisions were added by this update."
- fi
-
- # The diffstat is shown from the old revision to the new revision.
- # This is to show the truth of what happened in this change.
- # There's no point showing the stat from the base to the new
- # revision because the base is effectively a random revision at this
- # point - the user will be interested in what this revision changed
- # - including the undoing of previous revisions in the case of
- # non-fast-forward updates.
- echo ""
- echo "Summary of changes:"
- git diff-tree $diffopts $oldrev..$newrev
-}
-
-#
-# Called for the deletion of a branch
-#
-generate_delete_branch_email()
-{
- echo " was $oldrev"
- echo ""
- echo $LOGBEGIN
- git diff-tree -s --always --encoding=UTF-8 --pretty=oneline $oldrev
- echo $LOGEND
-}
-
-# --------------- Annotated tags
-
-#
-# Called for the creation of an annotated tag
-#
-generate_create_atag_email()
-{
- echo " at $newrev ($newrev_type)"
-
- generate_atag_email
-}
-
-#
-# Called for the update of an annotated tag (this is probably a rare event
-# and may not even be allowed)
-#
-generate_update_atag_email()
-{
- echo " to $newrev ($newrev_type)"
- echo " from $oldrev (which is now obsolete)"
-
- generate_atag_email
-}
-
-#
-# Called when an annotated tag is created or changed
-#
-generate_atag_email()
-{
- # Use git for-each-ref to pull out the individual fields from the
- # tag
- eval $(git for-each-ref --shell --format='
- tagobject=%(*objectname)
- tagtype=%(*objecttype)
- tagger=%(taggername)
- tagged=%(taggerdate)' $refname
- )
-
- echo " tagging $tagobject ($tagtype)"
- case "$tagtype" in
- commit)
-
- # If the tagged object is a commit, then we assume this is a
- # release, and so we calculate which tag this tag is
- # replacing
- prevtag=$(git describe --abbrev=0 $newrev^ 2>/dev/null)
-
- if [ -n "$prevtag" ]; then
- echo " replaces $prevtag"
- fi
- ;;
- *)
- echo " length $(git cat-file -s $tagobject) bytes"
- ;;
- esac
- echo " tagged by $tagger"
- echo " on $tagged"
-
- echo ""
- echo $LOGBEGIN
-
- # Show the content of the tag message; this might contain a change
- # log or release notes so is worth displaying.
- git cat-file tag $newrev | sed -e '1,/^$/d'
-
- echo ""
- case "$tagtype" in
- commit)
- # Only commit tags make sense to have rev-list operations
- # performed on them
- if [ -n "$prevtag" ]; then
- # Show changes since the previous release
- git shortlog "$prevtag..$newrev"
- else
- # No previous tag, show all the changes since time
- # began
- git shortlog $newrev
- fi
- ;;
- *)
- # XXX: Is there anything useful we can do for non-commit
- # objects?
- ;;
- esac
-
- echo $LOGEND
-}
-
-#
-# Called for the deletion of an annotated tag
-#
-generate_delete_atag_email()
-{
- echo " was $oldrev"
- echo ""
- echo $LOGBEGIN
- git diff-tree -s --always --encoding=UTF-8 --pretty=oneline $oldrev
- echo $LOGEND
-}
-
-# --------------- General references
-
-#
-# Called when any other type of reference is created (most likely a
-# non-annotated tag)
-#
-generate_create_general_email()
-{
- echo " at $newrev ($newrev_type)"
-
- generate_general_email
-}
-
-#
-# Called when any other type of reference is updated (most likely a
-# non-annotated tag)
-#
-generate_update_general_email()
-{
- echo " to $newrev ($newrev_type)"
- echo " from $oldrev"
-
- generate_general_email
-}
-
-#
-# Called for creation or update of any other type of reference
-#
-generate_general_email()
-{
- # Unannotated tags are more about marking a point than releasing a
- # version; therefore we don't do the shortlog summary that we do for
- # annotated tags above - we simply show that the point has been
- # marked, and print the log message for the marked point for
- # reference purposes
- #
- # Note this section also catches any other reference type (although
- # there aren't any) and deals with them in the same way.
-
- echo ""
- if [ "$newrev_type" = "commit" ]; then
- echo $LOGBEGIN
- git diff-tree -s --always --encoding=UTF-8 --pretty=medium $newrev
- echo $LOGEND
- else
- # What can we do here? The tag marks an object that is not
- # a commit, so there is no log for us to display. It's
- # probably not wise to output git cat-file as it could be a
- # binary blob. We'll just say how big it is
- echo "$newrev is a $newrev_type, and is $(git cat-file -s $newrev) bytes long."
- fi
-}
-
-#
-# Called for the deletion of any other type of reference
-#
-generate_delete_general_email()
-{
- echo " was $oldrev"
- echo ""
- echo $LOGBEGIN
- git diff-tree -s --always --encoding=UTF-8 --pretty=oneline $oldrev
- echo $LOGEND
-}
-
-
-# --------------- Miscellaneous utilities
-
-#
-# Show new revisions as the user would like to see them in the email.
-#
-show_new_revisions()
-{
- # This shows all log entries that are not already covered by
- # another ref - i.e. commits that are now accessible from this
- # ref that were previously not accessible
- # (see generate_update_branch_email for the explanation of this
- # command)
-
- # Revision range passed to rev-list differs for new vs. updated
- # branches.
- if [ "$change_type" = create ]
- then
- # Show all revisions exclusive to this (new) branch.
- revspec=$newrev
- else
- # Branch update; show revisions not part of $oldrev.
- revspec=$oldrev..$newrev
- fi
-
- other_branches=$(git for-each-ref --format='%(refname)' refs/heads/ |
- grep -F -v $refname)
- git rev-parse --not $other_branches |
- if [ -z "$custom_showrev" ]
- then
- git rev-list --pretty --stdin $revspec
- else
- git rev-list --stdin $revspec |
- while read onerev
- do
- eval $(printf "$custom_showrev" $onerev)
- done
- fi
-}
-
-
-limit_lines()
-{
- lines=0
- skipped=0
- while IFS="" read -r line; do
- lines=$((lines + 1))
- if [ $lines -gt $1 ]; then
- skipped=$((skipped + 1))
- else
- printf "%s\n" "$line"
- fi
- done
- if [ $skipped -ne 0 ]; then
- echo "... $skipped lines suppressed ..."
- fi
-}
-
-
-send_mail()
-{
- if [ -n "$envelopesender" ]; then
- /usr/sbin/sendmail -t -f "$envelopesender"
- else
- /usr/sbin/sendmail -t
- fi
-}
-
-# ---------------------------- main()
-
-# --- Constants
-LOGBEGIN="- Log -----------------------------------------------------------------"
-LOGEND="-----------------------------------------------------------------------"
-
-# --- Config
-# Set GIT_DIR either from the working directory, or from the environment
-# variable.
-GIT_DIR=$(git rev-parse --git-dir 2>/dev/null)
-if [ -z "$GIT_DIR" ]; then
- echo >&2 "fatal: post-receive: GIT_DIR not set"
- exit 1
-fi
-
-projectdesc=$(sed -ne '1p' "$GIT_DIR/description" 2>/dev/null)
-# Check if the description is unchanged from it's default, and shorten it to
-# a more manageable length if it is
-if expr "$projectdesc" : "Unnamed repository.*$" >/dev/null
-then
- projectdesc="UNNAMED PROJECT"
-fi
-
-recipients=$(git config hooks.mailinglist)
-announcerecipients=$(git config hooks.announcelist)
-envelopesender=$(git config hooks.envelopesender)
-emailprefix=$(git config hooks.emailprefix || echo '[SCM] ')
-custom_showrev=$(git config hooks.showrev)
-maxlines=$(git config hooks.emailmaxlines)
-diffopts=$(git config hooks.diffopts)
-: ${diffopts:="--stat --summary --find-copies-harder"}
-
-# --- Main loop
-# Allow dual mode: run from the command line just like the update hook, or
-# if no arguments are given then run as a hook script
-if [ -n "$1" -a -n "$2" -a -n "$3" ]; then
- # Output to the terminal in command line mode - if someone wanted to
- # resend an email; they could redirect the output to sendmail
- # themselves
- prep_for_email $2 $3 $1 && PAGER= generate_email
-else
- while read oldrev newrev refname
- do
- prep_for_email $oldrev $newrev $refname || continue
- generate_email $maxlines | send_mail
- done
-fi
diff --git a/contrib/hooks/pre-auto-gc-battery b/contrib/hooks/pre-auto-gc-battery
deleted file mode 100755
index 7ba78c4dff..0000000000
--- a/contrib/hooks/pre-auto-gc-battery
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/bin/sh
-#
-# An example hook script to verify if you are on battery, in case you
-# are running Linux or OS X. Called by git-gc --auto with no arguments.
-# The hook should exit with non-zero status after issuing an appropriate
-# message if it wants to stop the auto repacking.
-#
-# This hook is stored in the contrib/hooks directory. Your distribution
-# may have put this somewhere else. If you want to use this hook, you
-# should make this script executable then link to it in the repository
-# you would like to use it in.
-#
-# For example, if the hook is stored in
-# /usr/share/git-core/contrib/hooks/pre-auto-gc-battery:
-#
-# cd /path/to/your/repository.git
-# ln -sf /usr/share/git-core/contrib/hooks/pre-auto-gc-battery \
-# hooks/pre-auto-gc
-
-if test -x /sbin/on_ac_power && (/sbin/on_ac_power;test $? -ne 1)
-then
- exit 0
-elif test "$(cat /sys/class/power_supply/AC/online 2>/dev/null)" = 1
-then
- exit 0
-elif grep -q 'on-line' /proc/acpi/ac_adapter/AC/state 2>/dev/null
-then
- exit 0
-elif grep -q '0x01$' /proc/apm 2>/dev/null
-then
- exit 0
-elif grep -q "AC Power \+: 1" /proc/pmu/info 2>/dev/null
-then
- exit 0
-elif test -x /usr/bin/pmset && /usr/bin/pmset -g batt |
- grep -q "drawing from 'AC Power'"
-then
- exit 0
-fi
-
-echo "Auto packing deferred; not on AC"
-exit 1
diff --git a/contrib/hooks/setgitperms.perl b/contrib/hooks/setgitperms.perl
deleted file mode 100755
index 2770a1b1d2..0000000000
--- a/contrib/hooks/setgitperms.perl
+++ /dev/null
@@ -1,214 +0,0 @@
-#!/usr/bin/perl
-#
-# Copyright (c) 2006 Josh England
-#
-# This script can be used to save/restore full permissions and ownership data
-# within a git working tree.
-#
-# To save permissions/ownership data, place this script in your .git/hooks
-# directory and enable a `pre-commit` hook with the following lines:
-# #!/bin/sh
-# SUBDIRECTORY_OK=1 . git-sh-setup
-# $GIT_DIR/hooks/setgitperms.perl -r
-#
-# To restore permissions/ownership data, place this script in your .git/hooks
-# directory and enable a `post-merge` and `post-checkout` hook with the
-# following lines:
-# #!/bin/sh
-# SUBDIRECTORY_OK=1 . git-sh-setup
-# $GIT_DIR/hooks/setgitperms.perl -w
-#
-use strict;
-use Getopt::Long;
-use File::Find;
-use File::Basename;
-
-my $usage =
-"usage: setgitperms.perl [OPTION]... <--read|--write>
-This program uses a file `.gitmeta` to store/restore permissions and uid/gid
-info for all files/dirs tracked by git in the repository.
-
----------------------------------Read Mode-------------------------------------
--r, --read Reads perms/etc from working dir into a .gitmeta file
--s, --stdout Output to stdout instead of .gitmeta
--d, --diff Show unified diff of perms file (XOR with --stdout)
-
----------------------------------Write Mode------------------------------------
--w, --write Modify perms/etc in working dir to match the .gitmeta file
--v, --verbose Be verbose
-
-\n";
-
-my ($stdout, $showdiff, $verbose, $read_mode, $write_mode);
-
-if ((@ARGV < 0) || !GetOptions(
- "stdout", \$stdout,
- "diff", \$showdiff,
- "read", \$read_mode,
- "write", \$write_mode,
- "verbose", \$verbose,
- )) { die $usage; }
-die $usage unless ($read_mode xor $write_mode);
-
-my $topdir = `git rev-parse --show-cdup` or die "\n"; chomp $topdir;
-my $gitdir = $topdir . '.git';
-my $gitmeta = $topdir . '.gitmeta';
-
-if ($write_mode) {
- # Update the working dir permissions/ownership based on data from .gitmeta
- open (IN, "<$gitmeta") or die "Could not open $gitmeta for reading: $!\n";
- while (defined ($_ = <IN>)) {
- chomp;
- if (/^(.*) mode=(\S+)\s+uid=(\d+)\s+gid=(\d+)/) {
- # Compare recorded perms to actual perms in the working dir
- my ($path, $mode, $uid, $gid) = ($1, $2, $3, $4);
- my $fullpath = $topdir . $path;
- my (undef,undef,$wmode,undef,$wuid,$wgid) = lstat($fullpath);
- $wmode = sprintf "%04o", $wmode & 07777;
- if ($mode ne $wmode) {
- $verbose && print "Updating permissions on $path: old=$wmode, new=$mode\n";
- chmod oct($mode), $fullpath;
- }
- if ($uid != $wuid || $gid != $wgid) {
- if ($verbose) {
- # Print out user/group names instead of uid/gid
- my $pwname = getpwuid($uid);
- my $grpname = getgrgid($gid);
- my $wpwname = getpwuid($wuid);
- my $wgrpname = getgrgid($wgid);
- $pwname = $uid if !defined $pwname;
- $grpname = $gid if !defined $grpname;
- $wpwname = $wuid if !defined $wpwname;
- $wgrpname = $wgid if !defined $wgrpname;
-
- print "Updating uid/gid on $path: old=$wpwname/$wgrpname, new=$pwname/$grpname\n";
- }
- chown $uid, $gid, $fullpath;
- }
- }
- else {
- warn "Invalid input format in $gitmeta:\n\t$_\n";
- }
- }
- close IN;
-}
-elsif ($read_mode) {
- # Handle merge conflicts in the .gitperms file
- if (-e "$gitdir/MERGE_MSG") {
- if (`grep ====== $gitmeta`) {
- # Conflict not resolved -- abort the commit
- print "PERMISSIONS/OWNERSHIP CONFLICT\n";
- print " Resolve the conflict in the $gitmeta file and then run\n";
- print " `.git/hooks/setgitperms.perl --write` to reconcile.\n";
- exit 1;
- }
- elsif (`grep $gitmeta $gitdir/MERGE_MSG`) {
- # A conflict in .gitmeta has been manually resolved. Verify that
- # the working dir perms matches the current .gitmeta perms for
- # each file/dir that conflicted.
- # This is here because a `setgitperms.perl --write` was not
- # performed due to a merge conflict, so permissions/ownership
- # may not be consistent with the manually merged .gitmeta file.
- my @conflict_diff = `git show \$(cat $gitdir/MERGE_HEAD)`;
- my @conflict_files;
- my $metadiff = 0;
-
- # Build a list of files that conflicted from the .gitmeta diff
- foreach my $line (@conflict_diff) {
- if ($line =~ m|^diff --git a/$gitmeta b/$gitmeta|) {
- $metadiff = 1;
- }
- elsif ($line =~ /^diff --git/) {
- $metadiff = 0;
- }
- elsif ($metadiff && $line =~ /^\+(.*) mode=/) {
- push @conflict_files, $1;
- }
- }
-
- # Verify that each conflict file now has permissions consistent
- # with the .gitmeta file
- foreach my $file (@conflict_files) {
- my $absfile = $topdir . $file;
- my $gm_entry = `grep "^$file mode=" $gitmeta`;
- if ($gm_entry =~ /mode=(\d+) uid=(\d+) gid=(\d+)/) {
- my ($gm_mode, $gm_uid, $gm_gid) = ($1, $2, $3);
- my (undef,undef,$mode,undef,$uid,$gid) = lstat("$absfile");
- $mode = sprintf("%04o", $mode & 07777);
- if (($gm_mode ne $mode) || ($gm_uid != $uid)
- || ($gm_gid != $gid)) {
- print "PERMISSIONS/OWNERSHIP CONFLICT\n";
- print " Mismatch found for file: $file\n";
- print " Run `.git/hooks/setgitperms.perl --write` to reconcile.\n";
- exit 1;
- }
- }
- else {
- print "Warning! Permissions/ownership no longer being tracked for file: $file\n";
- }
- }
- }
- }
-
- # No merge conflicts -- write out perms/ownership data to .gitmeta file
- unless ($stdout) {
- open (OUT, ">$gitmeta.tmp") or die "Could not open $gitmeta.tmp for writing: $!\n";
- }
-
- my @files = `git ls-files`;
- my %dirs;
-
- foreach my $path (@files) {
- chomp $path;
- # We have to manually add stats for parent directories
- my $parent = dirname($path);
- while (!exists $dirs{$parent}) {
- $dirs{$parent} = 1;
- next if $parent eq '.';
- printstats($parent);
- $parent = dirname($parent);
- }
- # Now the git-tracked file
- printstats($path);
- }
-
- # diff the temporary metadata file to see if anything has changed
- # If no metadata has changed, don't overwrite the real file
- # This is just so `git commit -a` doesn't try to commit a bogus update
- unless ($stdout) {
- if (! -e $gitmeta) {
- rename "$gitmeta.tmp", $gitmeta;
- }
- else {
- my $diff = `diff -U 0 $gitmeta $gitmeta.tmp`;
- if ($diff ne '') {
- rename "$gitmeta.tmp", $gitmeta;
- }
- else {
- unlink "$gitmeta.tmp";
- }
- if ($showdiff) {
- print $diff;
- }
- }
- close OUT;
- }
- # Make sure the .gitmeta file is tracked
- system("git add $gitmeta");
-}
-
-
-sub printstats {
- my $path = $_[0];
- $path =~ s/@/\@/g;
- my (undef,undef,$mode,undef,$uid,$gid) = lstat($path);
- $path =~ s/%/\%/g;
- if ($stdout) {
- print $path;
- printf " mode=%04o uid=$uid gid=$gid\n", $mode & 07777;
- }
- else {
- print OUT $path;
- printf OUT " mode=%04o uid=$uid gid=$gid\n", $mode & 07777;
- }
-}
diff --git a/contrib/hooks/update-paranoid b/contrib/hooks/update-paranoid
deleted file mode 100755
index 0092d67b8a..0000000000
--- a/contrib/hooks/update-paranoid
+++ /dev/null
@@ -1,421 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use File::Spec;
-
-$ENV{PATH} = '/opt/git/bin';
-my $acl_git = '/vcs/acls.git';
-my $acl_branch = 'refs/heads/master';
-my $debug = 0;
-
-=doc
-Invoked as: update refname old-sha1 new-sha1
-
-This script is run by git-receive-pack once for each ref that the
-client is trying to modify. If we exit with a non-zero exit value
-then the update for that particular ref is denied, but updates for
-other refs in the same run of receive-pack may still be allowed.
-
-We are run after the objects have been uploaded, but before the
-ref is actually modified. We take advantage of that fact when we
-look for "new" commits and tags (the new objects won't show up in
-`rev-list --all`).
-
-This script loads and parses the content of the config file
-"users/$this_user.acl" from the $acl_branch commit of $acl_git ODB.
-The acl file is a git-config style file, but uses a slightly more
-restricted syntax as the Perl parser contained within this script
-is not nearly as permissive as git-config.
-
-Example:
-
- [user]
- committer = John Doe <john.doe@example.com>
- committer = John R. Doe <john.doe@example.com>
-
- [repository "acls"]
- allow = heads/master
- allow = CDUR for heads/jd/
- allow = C for ^tags/v\\d+$
-
-For all new commit or tag objects the committer (or tagger) line
-within the object must exactly match one of the user.committer
-values listed in the acl file ("HEAD:users/$this_user.acl").
-
-For a branch to be modified an allow line within the matching
-repository section must be matched for both the refname and the
-opcode.
-
-Repository sections are matched on the basename of the repository
-(after removing the .git suffix).
-
-The opcode abbreviations are:
-
- C: create new ref
- D: delete existing ref
- U: fast-forward existing ref (no commit loss)
- R: rewind/rebase existing ref (commit loss)
-
-if no opcodes are listed before the "for" keyword then "U" (for
-fast-forward update only) is assumed as this is the most common
-usage.
-
-Refnames are matched by always assuming a prefix of "refs/".
-This hook forbids pushing or deleting anything not under "refs/".
-
-Refnames that start with ^ are Perl regular expressions, and the ^
-is kept as part of the regexp. \\ is needed to get just one \, so
-\\d expands to \d in Perl. The 3rd allow line above is an example.
-
-Refnames that don't start with ^ but that end with / are prefix
-matches (2nd allow line above); all other refnames are strict
-equality matches (1st allow line).
-
-Anything pushed to "heads/" (ok, really "refs/heads/") must be
-a commit. Tags are not permitted here.
-
-Anything pushed to "tags/" (err, really "refs/tags/") must be an
-annotated tag. Commits, blobs, trees, etc. are not permitted here.
-Annotated tag signatures aren't checked, nor are they required.
-
-The special subrepository of 'info/new-commit-check' can
-be created and used to allow users to push new commits and
-tags from another local repository to this one, even if they
-aren't the committer/tagger of those objects. In a nut shell
-the info/new-commit-check directory is a Git repository whose
-objects/info/alternates file lists this repository and all other
-possible sources, and whose refs subdirectory contains symlinks
-to this repository's refs subdirectory, and to all other possible
-sources refs subdirectories. Yes, this means that you cannot
-use packed-refs in those repositories as they won't be resolved
-correctly.
-
-=cut
-
-my $git_dir = $ENV{GIT_DIR};
-my $new_commit_check = "$git_dir/info/new-commit-check";
-my $ref = $ARGV[0];
-my $old = $ARGV[1];
-my $new = $ARGV[2];
-my $new_type;
-my ($this_user) = getpwuid $<; # REAL_USER_ID
-my $repository_name;
-my %user_committer;
-my @allow_rules;
-my @path_rules;
-my %diff_cache;
-
-sub deny ($) {
- print STDERR "-Deny- $_[0]\n" if $debug;
- print STDERR "\ndenied: $_[0]\n\n";
- exit 1;
-}
-
-sub grant ($) {
- print STDERR "-Grant- $_[0]\n" if $debug;
- exit 0;
-}
-
-sub info ($) {
- print STDERR "-Info- $_[0]\n" if $debug;
-}
-
-sub git_value (@) {
- open(T,'-|','git',@_); local $_ = <T>; chop; close T; $_;
-}
-
-sub match_string ($$) {
- my ($acl_n, $ref) = @_;
- ($acl_n eq $ref)
- || ($acl_n =~ m,/$, && substr($ref,0,length $acl_n) eq $acl_n)
- || ($acl_n =~ m,^\^, && $ref =~ m:$acl_n:);
-}
-
-sub parse_config ($$$$) {
- my $data = shift;
- local $ENV{GIT_DIR} = shift;
- my $br = shift;
- my $fn = shift;
- return unless git_value('rev-list','--max-count=1',$br,'--',$fn);
- info "Loading $br:$fn";
- open(I,'-|','git','cat-file','blob',"$br:$fn");
- my $section = '';
- while (<I>) {
- chomp;
- if (/^\s*$/ || /^\s*#/) {
- } elsif (/^\[([a-z]+)\]$/i) {
- $section = lc $1;
- } elsif (/^\[([a-z]+)\s+"(.*)"\]$/i) {
- $section = join('.',lc $1,$2);
- } elsif (/^\s*([a-z][a-z0-9]+)\s*=\s*(.*?)\s*$/i) {
- push @{$data->{join('.',$section,lc $1)}}, $2;
- } else {
- deny "bad config file line $. in $br:$fn";
- }
- }
- close I;
-}
-
-sub all_new_committers () {
- local $ENV{GIT_DIR} = $git_dir;
- $ENV{GIT_DIR} = $new_commit_check if -d $new_commit_check;
-
- info "Getting committers of new commits.";
- my %used;
- open(T,'-|','git','rev-list','--pretty=raw',$new,'--not','--all');
- while (<T>) {
- next unless s/^committer //;
- chop;
- s/>.*$/>/;
- info "Found $_." unless $used{$_}++;
- }
- close T;
- info "No new commits." unless %used;
- keys %used;
-}
-
-sub all_new_taggers () {
- my %exists;
- open(T,'-|','git','for-each-ref','--format=%(objectname)','refs/tags');
- while (<T>) {
- chop;
- $exists{$_} = 1;
- }
- close T;
-
- info "Getting taggers of new tags.";
- my %used;
- my $obj = $new;
- my $obj_type = $new_type;
- while ($obj_type eq 'tag') {
- last if $exists{$obj};
- $obj_type = '';
- open(T,'-|','git','cat-file','tag',$obj);
- while (<T>) {
- chop;
- if (/^object ([a-z0-9]{40})$/) {
- $obj = $1;
- } elsif (/^type (.+)$/) {
- $obj_type = $1;
- } elsif (s/^tagger //) {
- s/>.*$/>/;
- info "Found $_." unless $used{$_}++;
- last;
- }
- }
- close T;
- }
- info "No new tags." unless %used;
- keys %used;
-}
-
-sub check_committers (@) {
- my @bad;
- foreach (@_) { push @bad, $_ unless $user_committer{$_}; }
- if (@bad) {
- print STDERR "\n";
- print STDERR "You are not $_.\n" foreach (sort @bad);
- deny "You cannot push changes not committed by you.";
- }
-}
-
-sub load_diff ($) {
- my $base = shift;
- my $d = $diff_cache{$base};
- unless ($d) {
- local $/ = "\0";
- my %this_diff;
- if ($base =~ /^0{40}$/) {
- # Don't load the diff at all; we are making the
- # branch and have no base to compare to in this
- # case. A file level ACL makes no sense in this
- # context. Having an empty diff will allow the
- # branch creation.
- #
- } else {
- open(T,'-|','git','diff-tree',
- '-r','--name-status','-z',
- $base,$new) or return undef;
- while (<T>) {
- my $op = $_;
- chop $op;
-
- my $path = <T>;
- chop $path;
-
- $this_diff{$path} = $op;
- }
- close T or return undef;
- }
- $d = \%this_diff;
- $diff_cache{$base} = $d;
- }
- return $d;
-}
-
-deny "No GIT_DIR inherited from caller" unless $git_dir;
-deny "Need a ref name" unless $ref;
-deny "Refusing funny ref $ref" unless $ref =~ s,^refs/,,;
-deny "Bad old value $old" unless $old =~ /^[a-z0-9]{40}$/;
-deny "Bad new value $new" unless $new =~ /^[a-z0-9]{40}$/;
-deny "Cannot determine who you are." unless $this_user;
-grant "No change requested." if $old eq $new;
-
-$repository_name = File::Spec->rel2abs($git_dir);
-$repository_name =~ m,/([^/]+)(?:\.git|/\.git)$,;
-$repository_name = $1;
-info "Updating in '$repository_name'.";
-
-my $op;
-if ($old =~ /^0{40}$/) { $op = 'C'; }
-elsif ($new =~ /^0{40}$/) { $op = 'D'; }
-else { $op = 'R'; }
-
-# This is really an update (fast-forward) if the
-# merge base of $old and $new is $old.
-#
-$op = 'U' if ($op eq 'R'
- && $ref =~ m,^heads/,
- && $old eq git_value('merge-base',$old,$new));
-
-# Load the user's ACL file. Expand groups (user.memberof) one level.
-{
- my %data = ('user.committer' => []);
- parse_config(\%data,$acl_git,$acl_branch,"external/$repository_name.acl");
-
- %data = (
- 'user.committer' => $data{'user.committer'},
- 'user.memberof' => [],
- );
- parse_config(\%data,$acl_git,$acl_branch,"users/$this_user.acl");
-
- %user_committer = map {$_ => $_} @{$data{'user.committer'}};
- my $rule_key = "repository.$repository_name.allow";
- my $rules = $data{$rule_key} || [];
-
- foreach my $group (@{$data{'user.memberof'}}) {
- my %g;
- parse_config(\%g,$acl_git,$acl_branch,"groups/$group.acl");
- my $group_rules = $g{$rule_key};
- push @$rules, @$group_rules if $group_rules;
- }
-
-RULE:
- foreach (@$rules) {
- while (/\${user\.([a-z][a-zA-Z0-9]+)}/) {
- my $k = lc $1;
- my $v = $data{"user.$k"};
- next RULE unless defined $v;
- next RULE if @$v != 1;
- next RULE unless defined $v->[0];
- s/\${user\.$k}/$v->[0]/g;
- }
-
- if (/^([AMD ]+)\s+of\s+([^\s]+)\s+for\s+([^\s]+)\s+diff\s+([^\s]+)$/) {
- my ($ops, $pth, $ref, $bst) = ($1, $2, $3, $4);
- $ops =~ s/ //g;
- $pth =~ s/\\\\/\\/g;
- $ref =~ s/\\\\/\\/g;
- push @path_rules, [$ops, $pth, $ref, $bst];
- } elsif (/^([AMD ]+)\s+of\s+([^\s]+)\s+for\s+([^\s]+)$/) {
- my ($ops, $pth, $ref) = ($1, $2, $3);
- $ops =~ s/ //g;
- $pth =~ s/\\\\/\\/g;
- $ref =~ s/\\\\/\\/g;
- push @path_rules, [$ops, $pth, $ref, $old];
- } elsif (/^([CDRU ]+)\s+for\s+([^\s]+)$/) {
- my $ops = $1;
- my $ref = $2;
- $ops =~ s/ //g;
- $ref =~ s/\\\\/\\/g;
- push @allow_rules, [$ops, $ref];
- } elsif (/^for\s+([^\s]+)$/) {
- # Mentioned, but nothing granted?
- } elsif (/^[^\s]+$/) {
- s/\\\\/\\/g;
- push @allow_rules, ['U', $_];
- }
- }
-}
-
-if ($op ne 'D') {
- $new_type = git_value('cat-file','-t',$new);
-
- if ($ref =~ m,^heads/,) {
- deny "$ref must be a commit." unless $new_type eq 'commit';
- } elsif ($ref =~ m,^tags/,) {
- deny "$ref must be an annotated tag." unless $new_type eq 'tag';
- }
-
- check_committers (all_new_committers);
- check_committers (all_new_taggers) if $new_type eq 'tag';
-}
-
-info "$this_user wants $op for $ref";
-foreach my $acl_entry (@allow_rules) {
- my ($acl_ops, $acl_n) = @$acl_entry;
- next unless $acl_ops =~ /^[CDRU]+$/; # Uhh.... shouldn't happen.
- next unless $acl_n;
- next unless $op =~ /^[$acl_ops]$/;
- next unless match_string $acl_n, $ref;
-
- # Don't test path rules on branch deletes.
- #
- grant "Allowed by: $acl_ops for $acl_n" if $op eq 'D';
-
- # Aggregate matching path rules; allow if there aren't
- # any matching this ref.
- #
- my %pr;
- foreach my $p_entry (@path_rules) {
- my ($p_ops, $p_n, $p_ref, $p_bst) = @$p_entry;
- next unless $p_ref;
- push @{$pr{$p_bst}}, $p_entry if match_string $p_ref, $ref;
- }
- grant "Allowed by: $acl_ops for $acl_n" unless %pr;
-
- # Allow only if all changes against a single base are
- # allowed by file path rules.
- #
- my @bad;
- foreach my $p_bst (keys %pr) {
- my $diff_ref = load_diff $p_bst;
- deny "Cannot difference trees." unless ref $diff_ref;
-
- my %fd = %$diff_ref;
- foreach my $p_entry (@{$pr{$p_bst}}) {
- my ($p_ops, $p_n, $p_ref, $p_bst) = @$p_entry;
- next unless $p_ops =~ /^[AMD]+$/;
- next unless $p_n;
-
- foreach my $f_n (keys %fd) {
- my $f_op = $fd{$f_n};
- next unless $f_op;
- next unless $f_op =~ /^[$p_ops]$/;
- delete $fd{$f_n} if match_string $p_n, $f_n;
- }
- last unless %fd;
- }
-
- if (%fd) {
- push @bad, [$p_bst, \%fd];
- } else {
- # All changes relative to $p_bst were allowed.
- #
- grant "Allowed by: $acl_ops for $acl_n diff $p_bst";
- }
- }
-
- foreach my $bad_ref (@bad) {
- my ($p_bst, $fd) = @$bad_ref;
- print STDERR "\n";
- print STDERR "Not allowed to make the following changes:\n";
- print STDERR "(base: $p_bst)\n";
- foreach my $f_n (sort keys %$fd) {
- print STDERR " $fd->{$f_n} $f_n\n";
- }
- }
- deny "You are not permitted to $op $ref";
-}
-close A;
-deny "You are not permitted to $op $ref";
diff --git a/contrib/mw-to-git/.gitignore b/contrib/mw-to-git/.gitignore
deleted file mode 100644
index ae545b013d..0000000000
--- a/contrib/mw-to-git/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-git-remote-mediawiki
-git-mw
diff --git a/contrib/mw-to-git/.perlcriticrc b/contrib/mw-to-git/.perlcriticrc
deleted file mode 100644
index b7333267ad..0000000000
--- a/contrib/mw-to-git/.perlcriticrc
+++ /dev/null
@@ -1,28 +0,0 @@
-# These 3 rules demand to add the s, m and x flag to *every* regexp. This is
-# overkill and would be harmful for readability.
-[-RegularExpressions::RequireExtendedFormatting]
-[-RegularExpressions::RequireDotMatchAnything]
-[-RegularExpressions::RequireLineBoundaryMatching]
-
-# This rule says that builtin functions should not be called with parentheses
-# e.g.: (taken from CPAN's documentation)
-# open($handle, '>', $filename); #not ok
-# open $handle, '>', $filename; #ok
-# Applying such a rule would mean modifying a huge number of lines for a
-# question of style.
-[-CodeLayout::ProhibitParensWithBuiltins]
-
-# This rule states that each system call should have its return value checked
-# The problem is that it includes the print call. Checking every print call's
-# return value would be harmful to the code readability.
-# This configuration keeps all default function but print.
-[InputOutput::RequireCheckedSyscalls]
-functions = open say close
-
-# This rule demands to add a dependency for the Readonly module. This is not
-# wished.
-[-ValuesAndExpressions::ProhibitConstantPragma]
-
-# This rule is not really useful (rather a question of style) and produces many
-# warnings among the code.
-[-ValuesAndExpressions::ProhibitNoisyQuotes]
diff --git a/contrib/mw-to-git/Git/Mediawiki.pm b/contrib/mw-to-git/Git/Mediawiki.pm
deleted file mode 100644
index 629c0cea44..0000000000
--- a/contrib/mw-to-git/Git/Mediawiki.pm
+++ /dev/null
@@ -1,101 +0,0 @@
-package Git::Mediawiki;
-
-require v5.26;
-use strict;
-use POSIX;
-use Git;
-
-BEGIN {
-
-our ($VERSION, @ISA, @EXPORT, @EXPORT_OK);
-
-# Totally unstable API.
-$VERSION = '0.01';
-
-require Exporter;
-
-@ISA = qw(Exporter);
-
-@EXPORT = ();
-
-# Methods which can be called as standalone functions as well:
-@EXPORT_OK = qw(clean_filename smudge_filename connect_maybe
- EMPTY HTTP_CODE_OK HTTP_CODE_PAGE_NOT_FOUND);
-}
-
-# Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
-use constant SLASH_REPLACEMENT => '%2F';
-
-# Used to test for empty strings
-use constant EMPTY => q{};
-
-# HTTP codes
-use constant HTTP_CODE_OK => 200;
-use constant HTTP_CODE_PAGE_NOT_FOUND => 404;
-
-sub clean_filename {
- my $filename = shift;
- $filename =~ s{@{[SLASH_REPLACEMENT]}}{/}g;
- # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
- # Do a variant of URL-encoding, i.e. looks like URL-encoding,
- # but with _ added to prevent MediaWiki from thinking this is
- # an actual special character.
- $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
- # If we use the uri escape before
- # we should unescape here, before anything
-
- return $filename;
-}
-
-sub smudge_filename {
- my $filename = shift;
- $filename =~ s{/}{@{[SLASH_REPLACEMENT]}}g;
- $filename =~ s/ /_/g;
- # Decode forbidden characters encoded in clean_filename
- $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf('%c', hex($1))/ge;
- return substr($filename, 0, NAME_MAX-length('.mw'));
-}
-
-sub connect_maybe {
- my $wiki = shift;
- if ($wiki) {
- return $wiki;
- }
-
- my $remote_name = shift;
- my $remote_url = shift;
- my ($wiki_login, $wiki_password, $wiki_domain);
-
- $wiki_login = Git::config("remote.${remote_name}.mwLogin");
- $wiki_password = Git::config("remote.${remote_name}.mwPassword");
- $wiki_domain = Git::config("remote.${remote_name}.mwDomain");
-
- $wiki = MediaWiki::API->new;
- $wiki->{config}->{api_url} = "${remote_url}/api.php";
- if ($wiki_login) {
- my %credential = (
- 'url' => $remote_url,
- 'username' => $wiki_login,
- 'password' => $wiki_password
- );
- Git::credential(\%credential);
- my $request = {lgname => $credential{username},
- lgpassword => $credential{password},
- lgdomain => $wiki_domain};
- if ($wiki->login($request)) {
- Git::credential(\%credential, 'approve');
- print {*STDERR} qq(Logged in mediawiki user "$credential{username}".\n);
- } else {
- print {*STDERR} qq(Failed to log in mediawiki user "$credential{username}" on ${remote_url}\n);
- print {*STDERR} ' (error ' .
- $wiki->{error}->{code} . ': ' .
- $wiki->{error}->{details} . ")\n";
- Git::credential(\%credential, 'reject');
- exit 1;
- }
- }
-
- return $wiki;
-}
-
-1; # Famous last words
diff --git a/contrib/mw-to-git/Makefile b/contrib/mw-to-git/Makefile
deleted file mode 100644
index 497ac434d6..0000000000
--- a/contrib/mw-to-git/Makefile
+++ /dev/null
@@ -1,61 +0,0 @@
-#
-# Copyright (C) 2013
-# Matthieu Moy <Matthieu.Moy@imag.fr>
-#
-# To build and test:
-#
-# make
-# bin-wrapper/git mw preview Some_page.mw
-# bin-wrapper/git clone mediawiki::http://example.com/wiki/
-#
-# To install, run Git's toplevel 'make install' then run:
-#
-# make install
-
-# The default target of this Makefile is...
-all::
-
-GIT_MEDIAWIKI_PM=Git/Mediawiki.pm
-SCRIPT_PERL=git-remote-mediawiki.perl
-SCRIPT_PERL+=git-mw.perl
-GIT_ROOT_DIR=../..
-HERE=contrib/mw-to-git/
-
-INSTALL = install
-
-SCRIPT_PERL_FULL=$(patsubst %,$(HERE)/%,$(SCRIPT_PERL))
-INSTLIBDIR=$(shell $(MAKE) -C $(GIT_ROOT_DIR)/ \
- -s --no-print-directory prefix=$(prefix) \
- perllibdir=$(perllibdir) perllibdir)
-DESTDIR_SQ = $(subst ','\'',$(DESTDIR))
-INSTLIBDIR_SQ = $(subst ','\'',$(INSTLIBDIR))
-
-all:: build
-
-test: all
- $(MAKE) -C t
-
-check: perlcritic test
-
-install_pm:
- $(INSTALL) -d -m 755 '$(DESTDIR_SQ)$(INSTLIBDIR_SQ)/Git'
- $(INSTALL) -m 644 $(GIT_MEDIAWIKI_PM) \
- '$(DESTDIR_SQ)$(INSTLIBDIR_SQ)/$(GIT_MEDIAWIKI_PM)'
-
-build:
- $(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
- build-perl-script
-
-install: install_pm
- $(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
- install-perl-script
-
-clean:
- $(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
- clean-perl-script
-
-perlcritic:
- perlcritic -5 $(SCRIPT_PERL)
- -perlcritic -2 $(SCRIPT_PERL)
-
-.PHONY: all test check install_pm install clean perlcritic
diff --git a/contrib/mw-to-git/bin-wrapper/git b/contrib/mw-to-git/bin-wrapper/git
deleted file mode 100755
index 6663ae57e8..0000000000
--- a/contrib/mw-to-git/bin-wrapper/git
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-
-# git executable wrapper script for Git-Mediawiki to run tests without
-# installing all the scripts and perl packages.
-
-GIT_ROOT_DIR=../../..
-GIT_EXEC_PATH=$(cd "$(dirname "$0")" && cd ${GIT_ROOT_DIR} && pwd)
-
-GITPERLLIB="$GIT_EXEC_PATH"'/contrib/mw-to-git'"${GITPERLLIB:+:$GITPERLLIB}"
-PATH="$GIT_EXEC_PATH"'/contrib/mw-to-git:'"$PATH"
-
-export GITPERLLIB PATH
-
-exec "${GIT_EXEC_PATH}/bin-wrappers/git" "$@"
diff --git a/contrib/mw-to-git/git-mw.perl b/contrib/mw-to-git/git-mw.perl
deleted file mode 100755
index eb52a53d32..0000000000
--- a/contrib/mw-to-git/git-mw.perl
+++ /dev/null
@@ -1,368 +0,0 @@
-#!/usr/bin/perl
-
-# Copyright (C) 2013
-# Benoit Person <benoit.person@ensimag.imag.fr>
-# Celestin Matte <celestin.matte@ensimag.imag.fr>
-# License: GPL v2 or later
-
-# Set of tools for git repo with a mediawiki remote.
-# Documentation & bugtracker: https://github.com/Git-Mediawiki/Git-Mediawiki
-
-use strict;
-use warnings;
-
-use Getopt::Long;
-use URI::URL qw(url);
-use LWP::UserAgent;
-use HTML::TreeBuilder;
-
-use Git;
-use MediaWiki::API;
-use Git::Mediawiki qw(clean_filename connect_maybe
- EMPTY HTTP_CODE_PAGE_NOT_FOUND);
-
-# By default, use UTF-8 to communicate with Git and the user
-binmode STDERR, ':encoding(UTF-8)';
-binmode STDOUT, ':encoding(UTF-8)';
-
-# Global parameters
-my $verbose = 0;
-sub v_print {
- if ($verbose) {
- return print {*STDERR} @_;
- }
- return;
-}
-
-# Preview parameters
-my $file_name = EMPTY;
-my $remote_name = EMPTY;
-my $preview_file_name = EMPTY;
-my $autoload = 0;
-sub file {
- $file_name = shift;
- return $file_name;
-}
-
-my %commands = (
- 'help' =>
- [\&help, {}, \&help],
- 'preview' =>
- [\&preview, {
- '<>' => \&file,
- 'output|o=s' => \$preview_file_name,
- 'remote|r=s' => \$remote_name,
- 'autoload|a' => \$autoload
- }, \&preview_help]
-);
-
-# Search for sub-command
-my $cmd = $commands{'help'};
-for (0..@ARGV-1) {
- if (defined $commands{$ARGV[$_]}) {
- $cmd = $commands{$ARGV[$_]};
- splice @ARGV, $_, 1;
- last;
- }
-};
-GetOptions( %{$cmd->[1]},
- 'help|h' => \&{$cmd->[2]},
- 'verbose|v' => \$verbose);
-
-# Launch command
-&{$cmd->[0]};
-
-############################# Preview Functions ################################
-
-sub preview_help {
- print {*STDOUT} <<'END';
-USAGE: git mw preview [--remote|-r <remote name>] [--autoload|-a]
- [--output|-o <output filename>] [--verbose|-v]
- <blob> | <filename>
-
-DESCRIPTION:
-Preview is an utiliy to preview local content of a mediawiki repo as if it was
-pushed on the remote.
-
-For that, preview searches for the remote name of the current branch's
-upstream if --remote is not set. If that remote is not found or if it
-is not a mediawiki, it lists all mediawiki remotes configured and asks
-you to replay your command with the --remote option set properly.
-
-Then, it searches for a file named 'filename'. If it's not found in
-the current dir, it will assume it's a blob.
-
-The content retrieved in the file (or in the blob) will then be parsed
-by the remote mediawiki and combined with a template retrieved from
-the mediawiki.
-
-Finally, preview will save the HTML result in a file. and autoload it
-in your default web browser if the option --autoload is present.
-
-OPTIONS:
- -r <remote name>, --remote <remote name>
- If the remote is a mediawiki, the template and the parse engine
- used for the preview will be those of that remote.
- If not, a list of valid remotes will be shown.
-
- -a, --autoload
- Try to load the HTML output in a new tab (or new window) of your
- default web browser.
-
- -o <output filename>, --output <output filename>
- Change the HTML output filename. Default filename is based on the
- input filename with its extension replaced by '.html'.
-
- -v, --verbose
- Show more information on what's going on under the hood.
-END
- exit;
-}
-
-sub preview {
- my $wiki;
- my ($remote_url, $wiki_page_name);
- my ($new_content, $template);
- my $file_content;
-
- if ($file_name eq EMPTY) {
- die "Missing file argument, see `git mw help`\n";
- }
-
- v_print("### Selecting remote\n");
- if ($remote_name eq EMPTY) {
- $remote_name = find_upstream_remote_name();
- if ($remote_name) {
- $remote_url = mediawiki_remote_url_maybe($remote_name);
- }
-
- if (! $remote_url) {
- my @valid_remotes = find_mediawiki_remotes();
-
- if ($#valid_remotes == 0) {
- print {*STDERR} "No mediawiki remote in this repo. \n";
- exit 1;
- } else {
- my $remotes_list = join("\n\t", @valid_remotes);
- print {*STDERR} <<"MESSAGE";
-There are multiple mediawiki remotes, which of:
- ${remotes_list}
-do you want ? Use the -r option to specify the remote.
-MESSAGE
- }
-
- exit 1;
- }
- } else {
- if (!is_valid_remote($remote_name)) {
- die "${remote_name} is not a remote\n";
- }
-
- $remote_url = mediawiki_remote_url_maybe($remote_name);
- if (! $remote_url) {
- die "${remote_name} is not a mediawiki remote\n";
- }
- }
- v_print("selected remote:\n\tname: ${remote_name}\n\turl: ${remote_url}\n");
-
- $wiki = connect_maybe($wiki, $remote_name, $remote_url);
-
- # Read file content
- if (! -e $file_name) {
- $file_content = git_cmd_try {
- Git::command('cat-file', 'blob', $file_name); }
- "%s failed w/ code %d";
-
- if ($file_name =~ /(.+):(.+)/) {
- $file_name = $2;
- }
- } else {
- open my $read_fh, "<", $file_name
- or die "could not open ${file_name}: $!\n";
- $file_content = do { local $/ = undef; <$read_fh> };
- close $read_fh
- or die "unable to close: $!\n";
- }
-
- v_print("### Retrieving template\n");
- ($wiki_page_name = clean_filename($file_name)) =~ s/\.[^.]+$//;
- $template = get_template($remote_url, $wiki_page_name);
-
- v_print("### Parsing local content\n");
- $new_content = $wiki->api({
- action => 'parse',
- text => $file_content,
- title => $wiki_page_name
- }, {
- skip_encoding => 1
- }) or die "No response from remote mediawiki\n";
- $new_content = $new_content->{'parse'}->{'text'}->{'*'};
-
- v_print("### Merging contents\n");
- if ($preview_file_name eq EMPTY) {
- ($preview_file_name = $file_name) =~ s/\.[^.]+$/.html/;
- }
- open(my $save_fh, '>:encoding(UTF-8)', $preview_file_name)
- or die "Could not open: $!\n";
- print {$save_fh} merge_contents($template, $new_content, $remote_url);
- close($save_fh)
- or die "Could not close: $!\n";
-
- v_print("### Results\n");
- if ($autoload) {
- v_print("Launching browser w/ file: ${preview_file_name}");
- system('git', 'web--browse', $preview_file_name);
- } else {
- print {*STDERR} "Preview file saved as: ${preview_file_name}\n";
- }
-
- exit;
-}
-
-# uses global scope variable: $remote_name
-sub merge_contents {
- my $template = shift;
- my $content = shift;
- my $remote_url = shift;
- my ($content_tree, $html_tree, $mw_content_text);
- my $template_content_id = 'bodyContent';
-
- $html_tree = HTML::TreeBuilder->new;
- $html_tree->parse($template);
-
- $content_tree = HTML::TreeBuilder->new;
- $content_tree->parse($content);
-
- $template_content_id = Git::config("remote.${remote_name}.mwIDcontent")
- || $template_content_id;
- v_print("Using '${template_content_id}' as the content ID\n");
-
- $mw_content_text = $html_tree->look_down('id', $template_content_id);
- if (!defined $mw_content_text) {
- print {*STDERR} <<"CONFIG";
-Could not combine the new content with the template. You might want to
-configure `mediawiki.IDContent` in your config:
- git config --add remote.${remote_name}.mwIDcontent <id>
-and re-run the command afterward.
-CONFIG
- exit 1;
- }
- $mw_content_text->delete_content();
- $mw_content_text->push_content($content_tree);
-
- make_links_absolute($html_tree, $remote_url);
-
- return $html_tree->as_HTML;
-}
-
-sub make_links_absolute {
- my $html_tree = shift;
- my $remote_url = shift;
- for (@{ $html_tree->extract_links() }) {
- my ($link, $element, $attr) = @{ $_ };
- my $url = url($link)->canonical;
- if ($url !~ /#/) {
- $element->attr($attr, URI->new_abs($url, $remote_url));
- }
- }
- return $html_tree;
-}
-
-sub is_valid_remote {
- my $remote = shift;
- my @remotes = git_cmd_try {
- Git::command('remote') }
- "%s failed w/ code %d";
- my $found_remote = 0;
- foreach my $remote (@remotes) {
- if ($remote eq $remote) {
- $found_remote = 1;
- last;
- }
- }
- return $found_remote;
-}
-
-sub find_mediawiki_remotes {
- my @remotes = git_cmd_try {
- Git::command('remote'); }
- "%s failed w/ code %d";
- my $remote_url;
- my @valid_remotes = ();
- foreach my $remote (@remotes) {
- $remote_url = mediawiki_remote_url_maybe($remote);
- if ($remote_url) {
- push(@valid_remotes, $remote);
- }
- }
- return @valid_remotes;
-}
-
-sub find_upstream_remote_name {
- my $current_branch = git_cmd_try {
- Git::command_oneline('symbolic-ref', '--short', 'HEAD') }
- "%s failed w/ code %d";
- return Git::config("branch.${current_branch}.remote");
-}
-
-sub mediawiki_remote_url_maybe {
- my $remote = shift;
-
- # Find remote url
- my $remote_url = Git::config("remote.${remote}.url");
- if ($remote_url =~ s/mediawiki::(.*)/$1/) {
- return url($remote_url)->canonical;
- }
-
- return;
-}
-
-sub get_template {
- my $url = shift;
- my $page_name = shift;
- my ($req, $res, $code, $url_after);
-
- $req = LWP::UserAgent->new;
- if ($verbose) {
- $req->show_progress(1);
- }
-
- $res = $req->get("${url}/index.php?title=${page_name}");
- if (!$res->is_success) {
- $code = $res->code;
- $url_after = $res->request()->uri(); # resolve all redirections
- if ($code == HTTP_CODE_PAGE_NOT_FOUND) {
- if ($verbose) {
- print {*STDERR} <<"WARNING";
-Warning: Failed to retrieve '$page_name'. Create it on the mediawiki if you want
-all the links to work properly.
-Trying to use the mediawiki homepage as a fallback template ...
-WARNING
- }
-
- # LWP automatically redirects GET request
- $res = $req->get("${url}/index.php");
- if (!$res->is_success) {
- $url_after = $res->request()->uri(); # resolve all redirections
- die "Failed to get homepage @ ${url_after} w/ code ${code}\n";
- }
- } else {
- die "Failed to get '${page_name}' @ ${url_after} w/ code ${code}\n";
- }
- }
-
- return $res->decoded_content;
-}
-
-############################## Help Functions ##################################
-
-sub help {
- print {*STDOUT} <<'END';
-usage: git mw <command> <args>
-
-git mw commands are:
- help Display help information about git mw
- preview Parse and render local file into HTML
-END
- exit;
-}
diff --git a/contrib/mw-to-git/git-remote-mediawiki.perl b/contrib/mw-to-git/git-remote-mediawiki.perl
deleted file mode 100755
index a5624413dc..0000000000
--- a/contrib/mw-to-git/git-remote-mediawiki.perl
+++ /dev/null
@@ -1,1390 +0,0 @@
-#! /usr/bin/perl
-
-# Copyright (C) 2011
-# Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
-# Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
-# Claire Fousse <claire.fousse@ensimag.imag.fr>
-# David Amouyal <david.amouyal@ensimag.imag.fr>
-# Matthieu Moy <matthieu.moy@grenoble-inp.fr>
-# License: GPL v2 or later
-
-# Gateway between Git and MediaWiki.
-# Documentation & bugtracker: https://github.com/Git-Mediawiki/Git-Mediawiki
-
-use strict;
-use MediaWiki::API;
-use Git;
-use Git::Mediawiki qw(clean_filename smudge_filename connect_maybe
- EMPTY HTTP_CODE_OK);
-use DateTime::Format::ISO8601;
-use warnings;
-
-# By default, use UTF-8 to communicate with Git and the user
-binmode STDERR, ':encoding(UTF-8)';
-binmode STDOUT, ':encoding(UTF-8)';
-
-use URI::Escape;
-
-# It's not always possible to delete pages (may require some
-# privileges). Deleted pages are replaced with this content.
-use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
-
-# It's not possible to create empty pages. New empty files in Git are
-# sent with this content instead.
-use constant EMPTY_CONTENT => "<!-- empty page -->\n";
-
-# used to reflect file creation or deletion in diff.
-use constant NULL_SHA1 => '0000000000000000000000000000000000000000';
-
-# Used on Git's side to reflect empty edit messages on the wiki
-use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
-
-# Number of pages taken into account at once in submodule get_mw_page_list
-use constant SLICE_SIZE => 50;
-
-# Number of linked mediafile to get at once in get_linked_mediafiles
-# The query is split in small batches because of the MW API limit of
-# the number of links to be returned (500 links max).
-use constant BATCH_SIZE => 10;
-
-if (@ARGV != 2) {
- exit_error_usage();
-}
-
-my $remotename = $ARGV[0];
-my $url = $ARGV[1];
-
-# Accept both space-separated and multiple keys in config file.
-# Spaces should be written as _ anyway because we'll use chomp.
-my @tracked_pages = split(/[ \n]/, run_git_quoted(["config", "--get-all", "remote.${remotename}.pages"]));
-chomp(@tracked_pages);
-
-# Just like @tracked_pages, but for MediaWiki categories.
-my @tracked_categories = split(/[ \n]/, run_git_quoted(["config", "--get-all", "remote.${remotename}.categories"]));
-chomp(@tracked_categories);
-
-# Just like @tracked_categories, but for MediaWiki namespaces.
-my @tracked_namespaces = split(/[ \n]/, run_git_quoted(["config", "--get-all", "remote.${remotename}.namespaces"]));
-for (@tracked_namespaces) { s/_/ /g; }
-chomp(@tracked_namespaces);
-
-# Import media files on pull
-my $import_media = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.mediaimport"]);
-chomp($import_media);
-$import_media = ($import_media eq 'true');
-
-# Export media files on push
-my $export_media = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.mediaexport"]);
-chomp($export_media);
-$export_media = !($export_media eq 'false');
-
-my $wiki_login = run_git_quoted(["config", "--get", "remote.${remotename}.mwLogin"]);
-# Note: mwPassword is discouraged. Use the credential system instead.
-my $wiki_passwd = run_git_quoted(["config", "--get", "remote.${remotename}.mwPassword"]);
-my $wiki_domain = run_git_quoted(["config", "--get", "remote.${remotename}.mwDomain"]);
-chomp($wiki_login);
-chomp($wiki_passwd);
-chomp($wiki_domain);
-
-# Import only last revisions (both for clone and fetch)
-my $shallow_import = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.shallow"]);
-chomp($shallow_import);
-$shallow_import = ($shallow_import eq 'true');
-
-# Fetch (clone and pull) by revisions instead of by pages. This behavior
-# is more efficient when we have a wiki with lots of pages and we fetch
-# the revisions quite often so that they concern only few pages.
-# Possible values:
-# - by_rev: perform one query per new revision on the remote wiki
-# - by_page: query each tracked page for new revision
-my $fetch_strategy = run_git_quoted(["config", "--get", "remote.${remotename}.fetchStrategy"]);
-if (!$fetch_strategy) {
- $fetch_strategy = run_git_quoted(["config", "--get", "mediawiki.fetchStrategy"]);
-}
-chomp($fetch_strategy);
-if (!$fetch_strategy) {
- $fetch_strategy = 'by_page';
-}
-
-# Remember the timestamp corresponding to a revision id.
-my %basetimestamps;
-
-# Dumb push: don't update notes and mediawiki ref to reflect the last push.
-#
-# Configurable with mediawiki.dumbPush, or per-remote with
-# remote.<remotename>.dumbPush.
-#
-# This means the user will have to re-import the just-pushed
-# revisions. On the other hand, this means that the Git revisions
-# corresponding to MediaWiki revisions are all imported from the wiki,
-# regardless of whether they were initially created in Git or from the
-# web interface, hence all users will get the same history (i.e. if
-# the push from Git to MediaWiki loses some information, everybody
-# will get the history with information lost). If the import is
-# deterministic, this means everybody gets the same sha1 for each
-# MediaWiki revision.
-my $dumb_push = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.dumbPush"]);
-if (!$dumb_push) {
- $dumb_push = run_git_quoted(["config", "--get", "--bool", "mediawiki.dumbPush"]);
-}
-chomp($dumb_push);
-$dumb_push = ($dumb_push eq 'true');
-
-my $wiki_name = $url;
-$wiki_name =~ s{[^/]*://}{};
-# If URL is like http://user:password@example.com/, we clearly don't
-# want the password in $wiki_name. While we're there, also remove user
-# and '@' sign, to avoid author like MWUser@HTTPUser@host.com
-$wiki_name =~ s/^.*@//;
-
-# Commands parser
-while (<STDIN>) {
- chomp;
-
- if (!parse_command($_)) {
- last;
- }
-
- BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
- # command is fully processed.
-}
-
-########################## Functions ##############################
-
-## error handling
-sub exit_error_usage {
- die "ERROR: git-remote-mediawiki module was not called with a correct number of\n" .
- "parameters\n" .
- "You may obtain this error because you attempted to run the git-remote-mediawiki\n" .
- "module directly.\n" .
- "This module can be used the following way:\n" .
- "\tgit clone mediawiki://<address of a mediawiki>\n" .
- "Then, use git commit, push and pull as with every normal git repository.\n";
-}
-
-sub parse_command {
- my ($line) = @_;
- my @cmd = split(/ /, $line);
- if (!defined $cmd[0]) {
- return 0;
- }
- if ($cmd[0] eq 'capabilities') {
- die("Too many arguments for capabilities\n")
- if (defined($cmd[1]));
- mw_capabilities();
- } elsif ($cmd[0] eq 'list') {
- die("Too many arguments for list\n") if (defined($cmd[2]));
- mw_list($cmd[1]);
- } elsif ($cmd[0] eq 'import') {
- die("Invalid argument for import\n")
- if ($cmd[1] eq EMPTY);
- die("Too many arguments for import\n")
- if (defined($cmd[2]));
- mw_import($cmd[1]);
- } elsif ($cmd[0] eq 'option') {
- die("Invalid arguments for option\n")
- if ($cmd[1] eq EMPTY || $cmd[2] eq EMPTY);
- die("Too many arguments for option\n")
- if (defined($cmd[3]));
- mw_option($cmd[1],$cmd[2]);
- } elsif ($cmd[0] eq 'push') {
- mw_push($cmd[1]);
- } else {
- print {*STDERR} "Unknown command. Aborting...\n";
- return 0;
- }
- return 1;
-}
-
-# MediaWiki API instance, created lazily.
-my $mediawiki;
-
-sub fatal_mw_error {
- my $action = shift;
- print STDERR "fatal: could not $action.\n";
- print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
- if ($url =~ /^https/) {
- print STDERR "fatal: make sure '$url/api.php' is a valid page\n";
- print STDERR "fatal: and the SSL certificate is correct.\n";
- } else {
- print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
- }
- print STDERR "fatal: (error " .
- $mediawiki->{error}->{code} . ': ' .
- $mediawiki->{error}->{details} . ")\n";
- exit 1;
-}
-
-## Functions for listing pages on the remote wiki
-sub get_mw_tracked_pages {
- my $pages = shift;
- get_mw_page_list(\@tracked_pages, $pages);
- return;
-}
-
-sub get_mw_page_list {
- my $page_list = shift;
- my $pages = shift;
- my @some_pages = @{$page_list};
- while (@some_pages) {
- my $last_page = SLICE_SIZE;
- if ($#some_pages < $last_page) {
- $last_page = $#some_pages;
- }
- my @slice = @some_pages[0..$last_page];
- get_mw_first_pages(\@slice, $pages);
- @some_pages = @some_pages[(SLICE_SIZE + 1)..$#some_pages];
- }
- return;
-}
-
-sub get_mw_tracked_categories {
- my $pages = shift;
- foreach my $category (@tracked_categories) {
- if (index($category, ':') < 0) {
- # Mediawiki requires the Category
- # prefix, but let's not force the user
- # to specify it.
- $category = "Category:${category}";
- }
- my $mw_pages = $mediawiki->list( {
- action => 'query',
- list => 'categorymembers',
- cmtitle => $category,
- cmlimit => 'max' } )
- || die $mediawiki->{error}->{code} . ': '
- . $mediawiki->{error}->{details} . "\n";
- foreach my $page (@{$mw_pages}) {
- $pages->{$page->{title}} = $page;
- }
- }
- return;
-}
-
-sub get_mw_tracked_namespaces {
- my $pages = shift;
- foreach my $local_namespace (sort @tracked_namespaces) {
- my $namespace_id;
- if ($local_namespace eq "(Main)") {
- $namespace_id = 0;
- } else {
- $namespace_id = get_mw_namespace_id($local_namespace);
- }
- # virtual namespaces don't support allpages
- next if !defined($namespace_id) || $namespace_id < 0;
- my $mw_pages = $mediawiki->list( {
- action => 'query',
- list => 'allpages',
- apnamespace => $namespace_id,
- aplimit => 'max' } )
- || die $mediawiki->{error}->{code} . ': '
- . $mediawiki->{error}->{details} . "\n";
- print {*STDERR} "$#{$mw_pages} found in namespace $local_namespace ($namespace_id)\n";
- foreach my $page (@{$mw_pages}) {
- $pages->{$page->{title}} = $page;
- }
- }
- return;
-}
-
-sub get_mw_all_pages {
- my $pages = shift;
- # No user-provided list, get the list of pages from the API.
- my $mw_pages = $mediawiki->list({
- action => 'query',
- list => 'allpages',
- aplimit => 'max'
- });
- if (!defined($mw_pages)) {
- fatal_mw_error("get the list of wiki pages");
- }
- foreach my $page (@{$mw_pages}) {
- $pages->{$page->{title}} = $page;
- }
- return;
-}
-
-# queries the wiki for a set of pages. Meant to be used within a loop
-# querying the wiki for slices of page list.
-sub get_mw_first_pages {
- my $some_pages = shift;
- my @some_pages = @{$some_pages};
-
- my $pages = shift;
-
- # pattern 'page1|page2|...' required by the API
- my $titles = join('|', @some_pages);
-
- my $mw_pages = $mediawiki->api({
- action => 'query',
- titles => $titles,
- });
- if (!defined($mw_pages)) {
- fatal_mw_error("query the list of wiki pages");
- }
- while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
- if ($id < 0) {
- print {*STDERR} "Warning: page $page->{title} not found on wiki\n";
- } else {
- $pages->{$page->{title}} = $page;
- }
- }
- return;
-}
-
-# Get the list of pages to be fetched according to configuration.
-sub get_mw_pages {
- $mediawiki = connect_maybe($mediawiki, $remotename, $url);
-
- print {*STDERR} "Listing pages on remote wiki...\n";
-
- my %pages; # hash on page titles to avoid duplicates
- my $user_defined;
- if (@tracked_pages) {
- $user_defined = 1;
- # The user provided a list of pages titles, but we
- # still need to query the API to get the page IDs.
- get_mw_tracked_pages(\%pages);
- }
- if (@tracked_categories) {
- $user_defined = 1;
- get_mw_tracked_categories(\%pages);
- }
- if (@tracked_namespaces) {
- $user_defined = 1;
- get_mw_tracked_namespaces(\%pages);
- }
- if (!$user_defined) {
- get_mw_all_pages(\%pages);
- }
- if ($import_media) {
- print {*STDERR} "Getting media files for selected pages...\n";
- if ($user_defined) {
- get_linked_mediafiles(\%pages);
- } else {
- get_all_mediafiles(\%pages);
- }
- }
- print {*STDERR} (scalar keys %pages) . " pages found.\n";
- return %pages;
-}
-
-# usage: $out = run_git_quoted(["command", "args", ...]);
-# $out = run_git_quoted(["command", "args", ...], "raw"); # don't interpret output as UTF-8.
-# $out = run_git_quoted_nostderr(["command", "args", ...]); # discard stderr
-# $out = run_git_quoted_nostderr(["command", "args", ...], "raw"); # ditto but raw instead of UTF-8 as above
-sub _run_git {
- my $args = shift;
- my $encoding = (shift || 'encoding(UTF-8)');
- open(my $git, "-|:${encoding}", @$args)
- or die "Unable to fork: $!\n";
- my $res = do {
- local $/ = undef;
- <$git>
- };
- close($git);
-
- return $res;
-}
-
-sub run_git_quoted {
- _run_git(["git", @{$_[0]}], $_[1]);
-}
-
-sub run_git_quoted_nostderr {
- _run_git(['sh', '-c', 'git "$@" 2>/dev/null', '--', @{$_[0]}], $_[1]);
-}
-
-sub get_all_mediafiles {
- my $pages = shift;
- # Attach list of all pages for media files from the API,
- # they are in a different namespace, only one namespace
- # can be queried at the same moment
- my $mw_pages = $mediawiki->list({
- action => 'query',
- list => 'allpages',
- apnamespace => get_mw_namespace_id('File'),
- aplimit => 'max'
- });
- if (!defined($mw_pages)) {
- print {*STDERR} "fatal: could not get the list of pages for media files.\n";
- print {*STDERR} "fatal: '$url' does not appear to be a mediawiki\n";
- print {*STDERR} "fatal: make sure '$url/api.php' is a valid page.\n";
- exit 1;
- }
- foreach my $page (@{$mw_pages}) {
- $pages->{$page->{title}} = $page;
- }
- return;
-}
-
-sub get_linked_mediafiles {
- my $pages = shift;
- my @titles = map { $_->{title} } values(%{$pages});
-
- my $batch = BATCH_SIZE;
- while (@titles) {
- if ($#titles < $batch) {
- $batch = $#titles;
- }
- my @slice = @titles[0..$batch];
-
- # pattern 'page1|page2|...' required by the API
- my $mw_titles = join('|', @slice);
-
- # Media files could be included or linked from
- # a page, get all related
- my $query = {
- action => 'query',
- prop => 'links|images',
- titles => $mw_titles,
- plnamespace => get_mw_namespace_id('File'),
- pllimit => 'max'
- };
- my $result = $mediawiki->api($query);
-
- while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
- my @media_titles;
- if (defined($page->{links})) {
- my @link_titles
- = map { $_->{title} } @{$page->{links}};
- push(@media_titles, @link_titles);
- }
- if (defined($page->{images})) {
- my @image_titles
- = map { $_->{title} } @{$page->{images}};
- push(@media_titles, @image_titles);
- }
- if (@media_titles) {
- get_mw_page_list(\@media_titles, $pages);
- }
- }
-
- @titles = @titles[($batch+1)..$#titles];
- }
- return;
-}
-
-sub get_mw_mediafile_for_page_revision {
- # Name of the file on Wiki, with the prefix.
- my $filename = shift;
- my $timestamp = shift;
- my %mediafile;
-
- # Search if on a media file with given timestamp exists on
- # MediaWiki. In that case download the file.
- my $query = {
- action => 'query',
- prop => 'imageinfo',
- titles => "File:${filename}",
- iistart => $timestamp,
- iiend => $timestamp,
- iiprop => 'timestamp|archivename|url',
- iilimit => 1
- };
- my $result = $mediawiki->api($query);
-
- my ($fileid, $file) = each( %{$result->{query}->{pages}} );
- # If not defined it means there is no revision of the file for
- # given timestamp.
- if (defined($file->{imageinfo})) {
- $mediafile{title} = $filename;
-
- my $fileinfo = pop(@{$file->{imageinfo}});
- $mediafile{timestamp} = $fileinfo->{timestamp};
- # Mediawiki::API's download function doesn't support https URLs
- # and can't download old versions of files.
- print {*STDERR} "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
- $mediafile{content} = download_mw_mediafile($fileinfo->{url});
- }
- return %mediafile;
-}
-
-sub download_mw_mediafile {
- my $download_url = shift;
-
- my $response = $mediawiki->{ua}->get($download_url);
- if ($response->code == HTTP_CODE_OK) {
- # It is tempting to return
- # $response->decoded_content({charset => "none"}), but
- # when doing so, utf8::downgrade($content) fails with
- # "Wide character in subroutine entry".
- $response->decode();
- return $response->content();
- } else {
- print {*STDERR} "Error downloading mediafile from :\n";
- print {*STDERR} "URL: ${download_url}\n";
- print {*STDERR} 'Server response: ' . $response->code . q{ } . $response->message . "\n";
- exit 1;
- }
-}
-
-sub get_last_local_revision {
- # Get note regarding last mediawiki revision.
- my $note = run_git_quoted_nostderr(["notes", "--ref=${remotename}/mediawiki",
- "show", "refs/mediawiki/${remotename}/master"]);
- my @note_info = split(/ /, $note);
-
- my $lastrevision_number;
- if (!(defined($note_info[0]) && $note_info[0] eq 'mediawiki_revision:')) {
- print {*STDERR} 'No previous mediawiki revision found';
- $lastrevision_number = 0;
- } else {
- # Notes are formatted : mediawiki_revision: #number
- $lastrevision_number = $note_info[1];
- chomp($lastrevision_number);
- print {*STDERR} "Last local mediawiki revision found is ${lastrevision_number}";
- }
- return $lastrevision_number;
-}
-
-# Get the last remote revision without taking in account which pages are
-# tracked or not. This function makes a single request to the wiki thus
-# avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
-# option.
-sub get_last_global_remote_rev {
- $mediawiki = connect_maybe($mediawiki, $remotename, $url);
-
- my $query = {
- action => 'query',
- list => 'recentchanges',
- prop => 'revisions',
- rclimit => '1',
- rcdir => 'older',
- };
- my $result = $mediawiki->api($query);
- return $result->{query}->{recentchanges}[0]->{revid};
-}
-
-# Get the last remote revision concerning the tracked pages and the tracked
-# categories.
-sub get_last_remote_revision {
- $mediawiki = connect_maybe($mediawiki, $remotename, $url);
-
- my %pages_hash = get_mw_pages();
- my @pages = values(%pages_hash);
-
- my $max_rev_num = 0;
-
- print {*STDERR} "Getting last revision id on tracked pages...\n";
-
- foreach my $page (@pages) {
- my $id = $page->{pageid};
-
- my $query = {
- action => 'query',
- prop => 'revisions',
- rvprop => 'ids|timestamp',
- pageids => $id,
- };
-
- my $result = $mediawiki->api($query);
-
- my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}});
-
- $basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp};
-
- $max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
- }
-
- print {*STDERR} "Last remote revision found is $max_rev_num.\n";
- return $max_rev_num;
-}
-
-# Clean content before sending it to MediaWiki
-sub mediawiki_clean {
- my $string = shift;
- my $page_created = shift;
- # Mediawiki does not allow blank space at the end of a page and ends with a single \n.
- # This function right trims a string and adds a \n at the end to follow this rule
- $string =~ s/\s+$//;
- if ($string eq EMPTY && $page_created) {
- # Creating empty pages is forbidden.
- $string = EMPTY_CONTENT;
- }
- return $string."\n";
-}
-
-# Filter applied on MediaWiki data before adding them to Git
-sub mediawiki_smudge {
- my $string = shift;
- if ($string eq EMPTY_CONTENT) {
- $string = EMPTY;
- }
- # This \n is important. This is due to mediawiki's way to handle end of files.
- return "${string}\n";
-}
-
-sub literal_data {
- my ($content) = @_;
- print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
- return;
-}
-
-sub literal_data_raw {
- # Output possibly binary content.
- my ($content) = @_;
- # Avoid confusion between size in bytes and in characters
- utf8::downgrade($content);
- binmode STDOUT, ':raw';
- print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
- binmode STDOUT, ':encoding(UTF-8)';
- return;
-}
-
-sub mw_capabilities {
- # Revisions are imported to the private namespace
- # refs/mediawiki/$remotename/ by the helper and fetched into
- # refs/remotes/$remotename later by fetch.
- print {*STDOUT} "refspec refs/heads/*:refs/mediawiki/${remotename}/*\n";
- print {*STDOUT} "import\n";
- print {*STDOUT} "list\n";
- print {*STDOUT} "push\n";
- if ($dumb_push) {
- print {*STDOUT} "no-private-update\n";
- }
- print {*STDOUT} "\n";
- return;
-}
-
-sub mw_list {
- # MediaWiki do not have branches, we consider one branch arbitrarily
- # called master, and HEAD pointing to it.
- print {*STDOUT} "? refs/heads/master\n";
- print {*STDOUT} "\@refs/heads/master HEAD\n";
- print {*STDOUT} "\n";
- return;
-}
-
-sub mw_option {
- print {*STDERR} "remote-helper command 'option $_[0]' not yet implemented\n";
- print {*STDOUT} "unsupported\n";
- return;
-}
-
-sub fetch_mw_revisions_for_page {
- my $page = shift;
- my $id = shift;
- my $fetch_from = shift;
- my @page_revs = ();
- my $query = {
- action => 'query',
- prop => 'revisions',
- rvprop => 'ids',
- rvdir => 'newer',
- rvstartid => $fetch_from,
- rvlimit => 500,
- pageids => $id,
-
- # Let MediaWiki know that we support the latest API.
- continue => '',
- };
-
- my $revnum = 0;
- # Get 500 revisions at a time due to the mediawiki api limit
- while (1) {
- my $result = $mediawiki->api($query);
-
- # Parse each of those 500 revisions
- foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) {
- my $page_rev_ids;
- $page_rev_ids->{pageid} = $page->{pageid};
- $page_rev_ids->{revid} = $revision->{revid};
- push(@page_revs, $page_rev_ids);
- $revnum++;
- }
-
- if ($result->{'query-continue'}) { # For legacy APIs
- $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
- } elsif ($result->{continue}) { # For newer APIs
- $query->{rvstartid} = $result->{continue}->{rvcontinue};
- $query->{continue} = $result->{continue}->{continue};
- } else {
- last;
- }
- }
- if ($shallow_import && @page_revs) {
- print {*STDERR} " Found 1 revision (shallow import).\n";
- @page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
- return $page_revs[0];
- }
- print {*STDERR} " Found ${revnum} revision(s).\n";
- return @page_revs;
-}
-
-sub fetch_mw_revisions {
- my $pages = shift; my @pages = @{$pages};
- my $fetch_from = shift;
-
- my @revisions = ();
- my $n = 1;
- foreach my $page (@pages) {
- my $id = $page->{pageid};
- print {*STDERR} "page ${n}/", scalar(@pages), ': ', $page->{title}, "\n";
- $n++;
- my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
- @revisions = (@page_revs, @revisions);
- }
-
- return ($n, @revisions);
-}
-
-sub fe_escape_path {
- my $path = shift;
- $path =~ s/\\/\\\\/g;
- $path =~ s/"/\\"/g;
- $path =~ s/\n/\\n/g;
- return qq("${path}");
-}
-
-sub import_file_revision {
- my $commit = shift;
- my %commit = %{$commit};
- my $full_import = shift;
- my $n = shift;
- my $mediafile = shift;
- my %mediafile;
- if ($mediafile) {
- %mediafile = %{$mediafile};
- }
-
- my $title = $commit{title};
- my $comment = $commit{comment};
- my $content = $commit{content};
- my $author = $commit{author};
- my $date = $commit{date};
-
- print {*STDOUT} "commit refs/mediawiki/${remotename}/master\n";
- print {*STDOUT} "mark :${n}\n";
- print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
- literal_data($comment);
-
- # If it's not a clone, we need to know where to start from
- if (!$full_import && $n == 1) {
- print {*STDOUT} "from refs/mediawiki/${remotename}/master^0\n";
- }
- if ($content ne DELETED_CONTENT) {
- print {*STDOUT} 'M 644 inline ' .
- fe_escape_path("${title}.mw") . "\n";
- literal_data($content);
- if (%mediafile) {
- print {*STDOUT} 'M 644 inline '
- . fe_escape_path($mediafile{title}) . "\n";
- literal_data_raw($mediafile{content});
- }
- print {*STDOUT} "\n\n";
- } else {
- print {*STDOUT} 'D ' . fe_escape_path("${title}.mw") . "\n";
- }
-
- # mediawiki revision number in the git note
- if ($full_import && $n == 1) {
- print {*STDOUT} "reset refs/notes/${remotename}/mediawiki\n";
- }
- print {*STDOUT} "commit refs/notes/${remotename}/mediawiki\n";
- print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
- literal_data('Note added by git-mediawiki during import');
- if (!$full_import && $n == 1) {
- print {*STDOUT} "from refs/notes/${remotename}/mediawiki^0\n";
- }
- print {*STDOUT} "N inline :${n}\n";
- literal_data("mediawiki_revision: $commit{mw_revision}");
- print {*STDOUT} "\n\n";
- return;
-}
-
-# parse a sequence of
-# <cmd> <arg1>
-# <cmd> <arg2>
-# \n
-# (like batch sequence of import and sequence of push statements)
-sub get_more_refs {
- my $cmd = shift;
- my @refs;
- while (1) {
- my $line = <STDIN>;
- if ($line =~ /^$cmd (.*)$/) {
- push(@refs, $1);
- } elsif ($line eq "\n") {
- return @refs;
- } else {
- die("Invalid command in a '$cmd' batch: $_\n");
- }
- }
- return;
-}
-
-sub mw_import {
- # multiple import commands can follow each other.
- my @refs = (shift, get_more_refs('import'));
- my $processedRefs;
- foreach my $ref (@refs) {
- next if $processedRefs->{$ref}; # skip duplicates: "import refs/heads/master" being issued twice; TODO: why?
- $processedRefs->{$ref} = 1;
- mw_import_ref($ref);
- }
- print {*STDOUT} "done\n";
- return;
-}
-
-sub mw_import_ref {
- my $ref = shift;
- # The remote helper will call "import HEAD" and
- # "import refs/heads/master".
- # Since HEAD is a symbolic ref to master (by convention,
- # followed by the output of the command "list" that we gave),
- # we don't need to do anything in this case.
- if ($ref eq 'HEAD') {
- return;
- }
-
- $mediawiki = connect_maybe($mediawiki, $remotename, $url);
-
- print {*STDERR} "Searching revisions...\n";
- my $last_local = get_last_local_revision();
- my $fetch_from = $last_local + 1;
- if ($fetch_from == 1) {
- print {*STDERR} ", fetching from beginning.\n";
- } else {
- print {*STDERR} ", fetching from here.\n";
- }
-
- my $n = 0;
- if ($fetch_strategy eq 'by_rev') {
- print {*STDERR} "Fetching & writing export data by revs...\n";
- $n = mw_import_ref_by_revs($fetch_from);
- } elsif ($fetch_strategy eq 'by_page') {
- print {*STDERR} "Fetching & writing export data by pages...\n";
- $n = mw_import_ref_by_pages($fetch_from);
- } else {
- print {*STDERR} qq(fatal: invalid fetch strategy "${fetch_strategy}".\n);
- print {*STDERR} "Check your configuration variables remote.${remotename}.fetchStrategy and mediawiki.fetchStrategy\n";
- exit 1;
- }
-
- if ($fetch_from == 1 && $n == 0) {
- print {*STDERR} "You appear to have cloned an empty MediaWiki.\n";
- # Something has to be done remote-helper side. If nothing is done, an error is
- # thrown saying that HEAD is referring to unknown object 0000000000000000000
- # and the clone fails.
- }
- return;
-}
-
-sub mw_import_ref_by_pages {
-
- my $fetch_from = shift;
- my %pages_hash = get_mw_pages();
- my @pages = values(%pages_hash);
-
- my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
-
- @revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
- my @revision_ids = map { $_->{revid} } @revisions;
-
- return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
-}
-
-sub mw_import_ref_by_revs {
-
- my $fetch_from = shift;
- my %pages_hash = get_mw_pages();
-
- my $last_remote = get_last_global_remote_rev();
- my @revision_ids = $fetch_from..$last_remote;
- return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
-}
-
-# Import revisions given in second argument (array of integers).
-# Only pages appearing in the third argument (hash indexed by page titles)
-# will be imported.
-sub mw_import_revids {
- my $fetch_from = shift;
- my $revision_ids = shift;
- my $pages = shift;
-
- my $n = 0;
- my $n_actual = 0;
- my $last_timestamp = 0; # Placeholder in case $rev->timestamp is undefined
-
- foreach my $pagerevid (@{$revision_ids}) {
- # Count page even if we skip it, since we display
- # $n/$total and $total includes skipped pages.
- $n++;
-
- # fetch the content of the pages
- my $query = {
- action => 'query',
- prop => 'revisions',
- rvprop => 'content|timestamp|comment|user|ids',
- revids => $pagerevid,
- };
-
- my $result = $mediawiki->api($query);
-
- if (!$result) {
- die "Failed to retrieve modified page for revision $pagerevid\n";
- }
-
- if (defined($result->{query}->{badrevids}->{$pagerevid})) {
- # The revision id does not exist on the remote wiki.
- next;
- }
-
- if (!defined($result->{query}->{pages})) {
- die "Invalid revision ${pagerevid}.\n";
- }
-
- my @result_pages = values(%{$result->{query}->{pages}});
- my $result_page = $result_pages[0];
- my $rev = $result_pages[0]->{revisions}->[0];
-
- my $page_title = $result_page->{title};
-
- if (!exists($pages->{$page_title})) {
- print {*STDERR} "${n}/", scalar(@{$revision_ids}),
- ": Skipping revision #$rev->{revid} of ${page_title}\n";
- next;
- }
-
- $n_actual++;
-
- my %commit;
- $commit{author} = $rev->{user} || 'Anonymous';
- $commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
- $commit{title} = smudge_filename($page_title);
- $commit{mw_revision} = $rev->{revid};
- $commit{content} = mediawiki_smudge($rev->{'*'});
-
- if (!defined($rev->{timestamp})) {
- $last_timestamp++;
- } else {
- $last_timestamp = $rev->{timestamp};
- }
- $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
-
- # Differentiates classic pages and media files.
- my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/;
- my %mediafile;
- if ($namespace) {
- my $id = get_mw_namespace_id($namespace);
- if ($id && $id == get_mw_namespace_id('File')) {
- %mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
- }
- }
- # If this is a revision of the media page for new version
- # of a file do one common commit for both file and media page.
- # Else do commit only for that page.
- print {*STDERR} "${n}/", scalar(@{$revision_ids}), ": Revision #$rev->{revid} of $commit{title}\n";
- import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
- }
-
- return $n_actual;
-}
-
-sub error_non_fast_forward {
- my $advice = run_git_quoted(["config", "--bool", "advice.pushNonFastForward"]);
- chomp($advice);
- if ($advice ne 'false') {
- # Native git-push would show this after the summary.
- # We can't ask it to display it cleanly, so print it
- # ourselves before.
- print {*STDERR} "To prevent you from losing history, non-fast-forward updates were rejected\n";
- print {*STDERR} "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
- print {*STDERR} "'Note about fast-forwards' section of 'git push --help' for details.\n";
- }
- print {*STDOUT} qq(error $_[0] "non-fast-forward"\n);
- return 0;
-}
-
-sub mw_upload_file {
- my $complete_file_name = shift;
- my $new_sha1 = shift;
- my $extension = shift;
- my $file_deleted = shift;
- my $summary = shift;
- my $newrevid;
- my $path = "File:${complete_file_name}";
- my %hashFiles = get_allowed_file_extensions();
- if (!exists($hashFiles{$extension})) {
- print {*STDERR} "${complete_file_name} is not a permitted file on this wiki.\n";
- print {*STDERR} "Check the configuration of file uploads in your mediawiki.\n";
- return $newrevid;
- }
- # Deleting and uploading a file requires a privileged user
- if ($file_deleted) {
- $mediawiki = connect_maybe($mediawiki, $remotename, $url);
- my $query = {
- action => 'delete',
- title => $path,
- reason => $summary
- };
- if (!$mediawiki->edit($query)) {
- print {*STDERR} "Failed to delete file on remote wiki\n";
- print {*STDERR} "Check your permissions on the remote site. Error code:\n";
- print {*STDERR} $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
- exit 1;
- }
- } else {
- # Don't let perl try to interpret file content as UTF-8 => use "raw"
- my $content = run_git_quoted(["cat-file", "blob", $new_sha1], 'raw');
- if ($content ne EMPTY) {
- $mediawiki = connect_maybe($mediawiki, $remotename, $url);
- $mediawiki->{config}->{upload_url} =
- "${url}/index.php/Special:Upload";
- $mediawiki->edit({
- action => 'upload',
- filename => $complete_file_name,
- comment => $summary,
- file => [undef,
- $complete_file_name,
- Content => $content],
- ignorewarnings => 1,
- }, {
- skip_encoding => 1
- } ) || die $mediawiki->{error}->{code} . ':'
- . $mediawiki->{error}->{details} . "\n";
- my $last_file_page = $mediawiki->get_page({title => $path});
- $newrevid = $last_file_page->{revid};
- print {*STDERR} "Pushed file: ${new_sha1} - ${complete_file_name}.\n";
- } else {
- print {*STDERR} "Empty file ${complete_file_name} not pushed.\n";
- }
- }
- return $newrevid;
-}
-
-sub mw_push_file {
- my $diff_info = shift;
- # $diff_info contains a string in this format:
- # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
- my @diff_info_split = split(/[ \t]/, $diff_info);
-
- # Filename, including .mw extension
- my $complete_file_name = shift;
- # Commit message
- my $summary = shift;
- # MediaWiki revision number. Keep the previous one by default,
- # in case there's no edit to perform.
- my $oldrevid = shift;
- my $newrevid;
-
- if ($summary eq EMPTY_MESSAGE) {
- $summary = EMPTY;
- }
-
- my $new_sha1 = $diff_info_split[3];
- my $old_sha1 = $diff_info_split[2];
- my $page_created = ($old_sha1 eq NULL_SHA1);
- my $page_deleted = ($new_sha1 eq NULL_SHA1);
- $complete_file_name = clean_filename($complete_file_name);
-
- my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
- if (!defined($extension)) {
- $extension = EMPTY;
- }
- if ($extension eq 'mw') {
- my $ns = get_mw_namespace_id_for_page($complete_file_name);
- if ($ns && $ns == get_mw_namespace_id('File') && (!$export_media)) {
- print {*STDERR} "Ignoring media file related page: ${complete_file_name}\n";
- return ($oldrevid, 'ok');
- }
- my $file_content;
- if ($page_deleted) {
- # Deleting a page usually requires
- # special privileges. A common
- # convention is to replace the page
- # with this content instead:
- $file_content = DELETED_CONTENT;
- } else {
- $file_content = run_git_quoted(["cat-file", "blob", $new_sha1]);
- }
-
- $mediawiki = connect_maybe($mediawiki, $remotename, $url);
-
- my $result = $mediawiki->edit( {
- action => 'edit',
- summary => $summary,
- title => $title,
- basetimestamp => $basetimestamps{$oldrevid},
- text => mediawiki_clean($file_content, $page_created),
- }, {
- skip_encoding => 1 # Helps with names with accentuated characters
- });
- if (!$result) {
- if ($mediawiki->{error}->{code} == 3) {
- # edit conflicts, considered as non-fast-forward
- print {*STDERR} 'Warning: Error ' .
- $mediawiki->{error}->{code} .
- ' from mediawiki: ' . $mediawiki->{error}->{details} .
- ".\n";
- return ($oldrevid, 'non-fast-forward');
- } else {
- # Other errors. Shouldn't happen => just die()
- die 'Fatal: Error ' .
- $mediawiki->{error}->{code} .
- ' from mediawiki: ' . $mediawiki->{error}->{details} . "\n";
- }
- }
- $newrevid = $result->{edit}->{newrevid};
- print {*STDERR} "Pushed file: ${new_sha1} - ${title}\n";
- } elsif ($export_media) {
- $newrevid = mw_upload_file($complete_file_name, $new_sha1,
- $extension, $page_deleted,
- $summary);
- } else {
- print {*STDERR} "Ignoring media file ${title}\n";
- }
- $newrevid = ($newrevid or $oldrevid);
- return ($newrevid, 'ok');
-}
-
-sub mw_push {
- # multiple push statements can follow each other
- my @refsspecs = (shift, get_more_refs('push'));
- my $pushed;
- for my $refspec (@refsspecs) {
- my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
- or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>\n");
- if ($force) {
- print {*STDERR} "Warning: forced push not allowed on a MediaWiki.\n";
- }
- if ($local eq EMPTY) {
- print {*STDERR} "Cannot delete remote branch on a MediaWiki\n";
- print {*STDOUT} "error ${remote} cannot delete\n";
- next;
- }
- if ($remote ne 'refs/heads/master') {
- print {*STDERR} "Only push to the branch 'master' is supported on a MediaWiki\n";
- print {*STDOUT} "error ${remote} only master allowed\n";
- next;
- }
- if (mw_push_revision($local, $remote)) {
- $pushed = 1;
- }
- }
-
- # Notify Git that the push is done
- print {*STDOUT} "\n";
-
- if ($pushed && $dumb_push) {
- print {*STDERR} "Just pushed some revisions to MediaWiki.\n";
- print {*STDERR} "The pushed revisions now have to be re-imported, and your current branch\n";
- print {*STDERR} "needs to be updated with these re-imported commits. You can do this with\n";
- print {*STDERR} "\n";
- print {*STDERR} " git pull --rebase\n";
- print {*STDERR} "\n";
- }
- return;
-}
-
-sub mw_push_revision {
- my $local = shift;
- my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
- my $last_local_revid = get_last_local_revision();
- print {*STDERR} ".\n"; # Finish sentence started by get_last_local_revision()
- my $last_remote_revid = get_last_remote_revision();
- my $mw_revision = $last_remote_revid;
-
- # Get sha1 of commit pointed by local HEAD
- my $HEAD_sha1 = run_git_quoted_nostderr(["rev-parse", $local]);
- chomp($HEAD_sha1);
- # Get sha1 of commit pointed by remotes/$remotename/master
- my $remoteorigin_sha1 = run_git_quoted_nostderr(["rev-parse", "refs/remotes/${remotename}/master"]);
- chomp($remoteorigin_sha1);
-
- if ($last_local_revid > 0 &&
- $last_local_revid < $last_remote_revid) {
- return error_non_fast_forward($remote);
- }
-
- if ($HEAD_sha1 eq $remoteorigin_sha1) {
- # nothing to push
- return 0;
- }
-
- # Get every commit in between HEAD and refs/remotes/origin/master,
- # including HEAD and refs/remotes/origin/master
- my @commit_pairs = ();
- if ($last_local_revid > 0) {
- my $parsed_sha1 = $remoteorigin_sha1;
- # Find a path from last MediaWiki commit to pushed commit
- print {*STDERR} "Computing path from local to remote ...\n";
- my @local_ancestry = split(/\n/, run_git_quoted(["rev-list", "--boundary", "--parents", $local, "^${parsed_sha1}"]));
- my %local_ancestry;
- foreach my $line (@local_ancestry) {
- if (my ($child, $parents) = $line =~ /^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
- foreach my $parent (split(/ /, $parents)) {
- $local_ancestry{$parent} = $child;
- }
- } elsif (!$line =~ /^([a-f0-9]+)/) {
- die "Unexpected output from git rev-list: ${line}\n";
- }
- }
- while ($parsed_sha1 ne $HEAD_sha1) {
- my $child = $local_ancestry{$parsed_sha1};
- if (!$child) {
- print {*STDERR} "Cannot find a path in history from remote commit to last commit\n";
- return error_non_fast_forward($remote);
- }
- push(@commit_pairs, [$parsed_sha1, $child]);
- $parsed_sha1 = $child;
- }
- } else {
- # No remote mediawiki revision. Export the whole
- # history (linearized with --first-parent)
- print {*STDERR} "Warning: no common ancestor, pushing complete history\n";
- my $history = run_git_quoted(["rev-list", "--first-parent", "--children", $local]);
- my @history = split(/\n/, $history);
- @history = @history[1..$#history];
- foreach my $line (reverse @history) {
- my @commit_info_split = split(/[ \n]/, $line);
- push(@commit_pairs, \@commit_info_split);
- }
- }
-
- foreach my $commit_info_split (@commit_pairs) {
- my $sha1_child = @{$commit_info_split}[0];
- my $sha1_commit = @{$commit_info_split}[1];
- my $diff_infos = run_git_quoted(["diff-tree", "-r", "--raw", "-z", $sha1_child, $sha1_commit]);
- # TODO: we could detect rename, and encode them with a #redirect on the wiki.
- # TODO: for now, it's just a delete+add
- my @diff_info_list = split(/\0/, $diff_infos);
- # Keep the subject line of the commit message as mediawiki comment for the revision
- my $commit_msg = run_git_quoted(["log", "--no-walk", '--format="%s"', $sha1_commit]);
- chomp($commit_msg);
- # Push every blob
- while (@diff_info_list) {
- my $status;
- # git diff-tree -z gives an output like
- # <metadata>\0<filename1>\0
- # <metadata>\0<filename2>\0
- # and we've split on \0.
- my $info = shift(@diff_info_list);
- my $file = shift(@diff_info_list);
- ($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
- if ($status eq 'non-fast-forward') {
- # we may already have sent part of the
- # commit to MediaWiki, but it's too
- # late to cancel it. Stop the push in
- # the middle, but still give an
- # accurate error message.
- return error_non_fast_forward($remote);
- }
- if ($status ne 'ok') {
- die("Unknown error from mw_push_file()\n");
- }
- }
- if (!$dumb_push) {
- run_git_quoted(["notes", "--ref=${remotename}/mediawiki",
- "add", "-f", "-m",
- "mediawiki_revision: ${mw_revision}",
- $sha1_commit]);
- }
- }
-
- print {*STDOUT} "ok ${remote}\n";
- return 1;
-}
-
-sub get_allowed_file_extensions {
- $mediawiki = connect_maybe($mediawiki, $remotename, $url);
-
- my $query = {
- action => 'query',
- meta => 'siteinfo',
- siprop => 'fileextensions'
- };
- my $result = $mediawiki->api($query);
- my @file_extensions = map { $_->{ext}} @{$result->{query}->{fileextensions}};
- my %hashFile = map { $_ => 1 } @file_extensions;
-
- return %hashFile;
-}
-
-# In memory cache for MediaWiki namespace ids.
-my %namespace_id;
-
-# Namespaces whose id is cached in the configuration file
-# (to avoid duplicates)
-my %cached_mw_namespace_id;
-
-# Return MediaWiki id for a canonical namespace name.
-# Ex.: "File", "Project".
-sub get_mw_namespace_id {
- $mediawiki = connect_maybe($mediawiki, $remotename, $url);
- my $name = shift;
-
- if (!exists $namespace_id{$name}) {
- # Look at configuration file, if the record for that namespace is
- # already cached. Namespaces are stored in form:
- # "Name_of_namespace:Id_namespace", ex.: "File:6".
- my @temp = split(/\n/,
- run_git_quoted(["config", "--get-all", "remote.${remotename}.namespaceCache"]));
- chomp(@temp);
- foreach my $ns (@temp) {
- my ($n, $id) = split(/:/, $ns);
- if ($id eq 'notANameSpace') {
- $namespace_id{$n} = {is_namespace => 0};
- } else {
- $namespace_id{$n} = {is_namespace => 1, id => $id};
- }
- $cached_mw_namespace_id{$n} = 1;
- }
- }
-
- if (!exists $namespace_id{$name}) {
- print {*STDERR} "Namespace ${name} not found in cache, querying the wiki ...\n";
- # NS not found => get namespace id from MW and store it in
- # configuration file.
- my $query = {
- action => 'query',
- meta => 'siteinfo',
- siprop => 'namespaces'
- };
- my $result = $mediawiki->api($query);
-
- while (my ($id, $ns) = each(%{$result->{query}->{namespaces}})) {
- if (defined($ns->{id}) && defined($ns->{canonical})) {
- $namespace_id{$ns->{canonical}} = {is_namespace => 1, id => $ns->{id}};
- if ($ns->{'*'}) {
- # alias (e.g. french Fichier: as alias for canonical File:)
- $namespace_id{$ns->{'*'}} = {is_namespace => 1, id => $ns->{id}};
- }
- }
- }
- }
-
- my $ns = $namespace_id{$name};
- my $id;
-
- if (!defined $ns) {
- my @namespaces = map { s/ /_/g; $_; } sort keys %namespace_id;
- print {*STDERR} "No such namespace ${name} on MediaWiki, known namespaces: @namespaces\n";
- $ns = {is_namespace => 0};
- $namespace_id{$name} = $ns;
- }
-
- if ($ns->{is_namespace}) {
- $id = $ns->{id};
- }
-
- # Store "notANameSpace" as special value for inexisting namespaces
- my $store_id = ($id || 'notANameSpace');
-
- # Store explicitly requested namespaces on disk
- if (!exists $cached_mw_namespace_id{$name}) {
- run_git_quoted(["config", "--add", "remote.${remotename}.namespaceCache", "${name}:${store_id}"]);
- $cached_mw_namespace_id{$name} = 1;
- }
- return $id;
-}
-
-sub get_mw_namespace_id_for_page {
- my $namespace = shift;
- if ($namespace =~ /^([^:]*):/) {
- return get_mw_namespace_id($namespace);
- } else {
- return;
- }
-}
diff --git a/contrib/mw-to-git/git-remote-mediawiki.txt b/contrib/mw-to-git/git-remote-mediawiki.txt
deleted file mode 100644
index 5da825f61e..0000000000
--- a/contrib/mw-to-git/git-remote-mediawiki.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-Git-Mediawiki is a project which aims the creation of a gate
-between git and mediawiki, allowing git users to push and pull
-objects from mediawiki just as one would do with a classic git
-repository thanks to remote-helpers.
-
-For more information, visit the wiki at
-https://github.com/Git-Mediawiki/Git-Mediawiki
diff --git a/contrib/mw-to-git/t/.gitignore b/contrib/mw-to-git/t/.gitignore
deleted file mode 100644
index 2b8dc30c6d..0000000000
--- a/contrib/mw-to-git/t/.gitignore
+++ /dev/null
@@ -1,4 +0,0 @@
-WEB/
-mediawiki/
-trash directory.t*/
-test-results/
diff --git a/contrib/mw-to-git/t/Makefile b/contrib/mw-to-git/t/Makefile
deleted file mode 100644
index 6c9f377caa..0000000000
--- a/contrib/mw-to-git/t/Makefile
+++ /dev/null
@@ -1,32 +0,0 @@
-#
-# Copyright (C) 2012
-# Charles Roussel <charles.roussel@ensimag.imag.fr>
-# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
-# Julien Khayat <julien.khayat@ensimag.imag.fr>
-# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
-# Simon Perrat <simon.perrat@ensimag.imag.fr>
-#
-## Test git-remote-mediawiki
-
-# The default target of this Makefile is...
-all:: test
-
--include ../../../config.mak.autogen
--include ../../../config.mak
-
-T = $(wildcard t[0-9][0-9][0-9][0-9]-*.sh)
-
-.PHONY: help test clean all
-
-help:
- @echo 'Run "$(MAKE) test" to launch test scripts'
- @echo 'Run "$(MAKE) clean" to remove trash folders'
-
-test:
- @for t in $(T); do \
- echo "$$t"; \
- "./$$t" || exit 1; \
- done
-
-clean:
- $(RM) -r 'trash directory'.*
diff --git a/contrib/mw-to-git/t/README b/contrib/mw-to-git/t/README
deleted file mode 100644
index 72c4889db7..0000000000
--- a/contrib/mw-to-git/t/README
+++ /dev/null
@@ -1,124 +0,0 @@
-Tests for Mediawiki-to-Git
-==========================
-
-Introduction
-------------
-This manual describes how to install the git-remote-mediawiki test
-environment on a machine with git installed on it.
-
-Prerequisite
-------------
-
-In order to run this test environment correctly, you will need to
-install the following packages (Debian/Ubuntu names, may need to be
-adapted for another distribution):
-
-* lighttpd
-* php
-* php-cgi
-* php-cli
-* php-curl
-* php-sqlite
-
-Principles and Technical Choices
---------------------------------
-
-The test environment makes it easy to install and manipulate one or
-several MediaWiki instances. To allow developers to run the testsuite
-easily, the environment does not require root privilege (except to
-install the required packages if needed). It starts a webserver
-instance on the user's account (using lighttpd greatly helps for
-that), and does not need a separate database daemon (thanks to the use
-of sqlite).
-
-Run the test environment
-------------------------
-
-Install a new wiki
-~~~~~~~~~~~~~~~~~~
-
-Once you have all the prerequisite, you need to install a MediaWiki
-instance on your machine. If you already have one, it is still
-strongly recommended to install one with the script provided. Here's
-how to work it:
-
-a. change directory to contrib/mw-to-git/t/
-b. if needed, edit test.config to choose your installation parameters
-c. run `./install-wiki.sh install`
-d. check on your favourite web browser if your wiki is correctly
- installed.
-
-Remove an existing wiki
-~~~~~~~~~~~~~~~~~~~~~~~
-
-Edit the file test.config to fit the wiki you want to delete, and then
-execute the command `./install-wiki.sh delete` from the
-contrib/mw-to-git/t directory.
-
-Run the existing tests
-~~~~~~~~~~~~~~~~~~~~~~
-
-The provided tests are currently in the `contrib/mw-to-git/t` directory.
-The files are all the t936[0-9]-*.sh shell scripts.
-
-a. Run all tests:
-To do so, run "make test" from the contrib/mw-to-git/ directory.
-
-b. Run a specific test:
-To run a given test <test_name>, run ./<test_name> from the
-contrib/mw-to-git/t directory.
-
-How to create new tests
------------------------
-
-Available functions
-~~~~~~~~~~~~~~~~~~~
-
-The test environment of git-remote-mediawiki provides some functions
-useful to test its behaviour. for more details about the functions'
-parameters, please refer to the `test-gitmw-lib.sh` and
-`test-gitmw.pl` files.
-
-** `test_check_wiki_precond`:
-Check if the tests must be skipped or not. Please use this function
-at the beginning of each new test file.
-
-** `wiki_getpage`:
-Fetch a given page from the wiki and puts its content in the
-directory in parameter.
-
-** `wiki_delete_page`:
-Delete a given page from the wiki.
-
-** `wiki_edit_page`:
-Create or modify a given page in the wiki. You can specify several
-parameters like a summary for the page edition, or add the page to a
-given category.
-See test-gitmw.pl for more details.
-
-** `wiki_getallpage`:
-Fetch all pages from the wiki into a given directory. The directory
-is created if it does not exists.
-
-** `test_diff_directories`:
-Compare the content of two directories. The content must be the same.
-Use this function to compare the content of a git directory and a wiki
-one created by wiki_getallpage.
-
-** `test_contains_N_files`:
-Check if the given directory contains a given number of file.
-
-** `wiki_page_exists`:
-Tests if a given page exists on the wiki.
-
-** `wiki_reset`:
-Reset the wiki, i.e. flush the database. Use this function at the
-beginning of each new test, except if the test re-uses the same wiki
-(and history) as the previous test.
-
-How to write a new test
-~~~~~~~~~~~~~~~~~~~~~~~
-
-Please, follow the standards given by git. See git/t/README.
-New file should be named as t936[0-9]-*.sh.
-Be sure to reset your wiki regularly with the function `wiki_reset`.
diff --git a/contrib/mw-to-git/t/install-wiki.sh b/contrib/mw-to-git/t/install-wiki.sh
deleted file mode 100755
index c215213c4b..0000000000
--- a/contrib/mw-to-git/t/install-wiki.sh
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/bin/sh
-
-# This script installs or deletes a MediaWiki on your computer.
-# It requires a web server with PHP and SQLite running. In addition, if you
-# do not have MediaWiki sources on your computer, the option 'install'
-# downloads them for you.
-# Please set the CONFIGURATION VARIABLES in ./test-gitmw-lib.sh
-
-WIKI_TEST_DIR=$(cd "$(dirname "$0")" && pwd)
-
-if test -z "$WIKI_TEST_DIR"
-then
- WIKI_TEST_DIR=.
-fi
-
-. "$WIKI_TEST_DIR"/test-gitmw-lib.sh
-usage () {
- echo "usage: "
- echo " ./install-wiki.sh <install | delete | --help>"
- echo " install | -i : Install a wiki on your computer."
- echo " delete | -d : Delete the wiki and all its pages and "
- echo " content."
- echo " start | -s : Start the previously configured lighttpd daemon"
- echo " stop : Stop lighttpd daemon."
-}
-
-
-# Argument: install, delete, --help | -h
-case "$1" in
- "install" | "-i")
- wiki_install
- exit 0
- ;;
- "delete" | "-d")
- wiki_delete
- exit 0
- ;;
- "start" | "-s")
- start_lighttpd
- exit
- ;;
- "stop")
- stop_lighttpd
- exit
- ;;
- "--help" | "-h")
- usage
- exit 0
- ;;
- *)
- echo "Invalid argument: $1"
- usage
- exit 1
- ;;
-esac
diff --git a/contrib/mw-to-git/t/push-pull-tests.sh b/contrib/mw-to-git/t/push-pull-tests.sh
deleted file mode 100644
index 9da2dc5ff0..0000000000
--- a/contrib/mw-to-git/t/push-pull-tests.sh
+++ /dev/null
@@ -1,144 +0,0 @@
-test_push_pull () {
-
- test_expect_success 'Git pull works after adding a new wiki page' '
- wiki_reset &&
-
- git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
- wiki_editpage Foo "page created after the git clone" false &&
-
- (
- cd mw_dir_1 &&
- git pull
- ) &&
-
- wiki_getallpage ref_page_1 &&
- test_diff_directories mw_dir_1 ref_page_1
- '
-
- test_expect_success 'Git pull works after editing a wiki page' '
- wiki_reset &&
-
- wiki_editpage Foo "page created before the git clone" false &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
- wiki_editpage Foo "new line added on the wiki" true &&
-
- (
- cd mw_dir_2 &&
- git pull
- ) &&
-
- wiki_getallpage ref_page_2 &&
- test_diff_directories mw_dir_2 ref_page_2
- '
-
- test_expect_success 'git pull works on conflict handled by auto-merge' '
- wiki_reset &&
-
- wiki_editpage Foo "1 init
-3
-5
- " false &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_3 &&
-
- wiki_editpage Foo "1 init
-2 content added on wiki after clone
-3
-5
- " false &&
-
- (
- cd mw_dir_3 &&
- echo "1 init
-3
-4 content added on git after clone
-5
-" >Foo.mw &&
- git commit -am "conflicting change on foo" &&
- git pull &&
- git push
- )
- '
-
- test_expect_success 'Git push works after adding a file .mw' '
- wiki_reset &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_4 &&
- wiki_getallpage ref_page_4 &&
- (
- cd mw_dir_4 &&
- test_path_is_missing Foo.mw &&
- touch Foo.mw &&
- echo "hello world" >>Foo.mw &&
- git add Foo.mw &&
- git commit -m "Foo" &&
- git push
- ) &&
- wiki_getallpage ref_page_4 &&
- test_diff_directories mw_dir_4 ref_page_4
- '
-
- test_expect_success 'Git push works after editing a file .mw' '
- wiki_reset &&
- wiki_editpage "Foo" "page created before the git clone" false &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_5 &&
-
- (
- cd mw_dir_5 &&
- echo "new line added in the file Foo.mw" >>Foo.mw &&
- git commit -am "edit file Foo.mw" &&
- git push
- ) &&
-
- wiki_getallpage ref_page_5 &&
- test_diff_directories mw_dir_5 ref_page_5
- '
-
- test_expect_failure 'Git push works after deleting a file' '
- wiki_reset &&
- wiki_editpage Foo "wiki page added before git clone" false &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
-
- (
- cd mw_dir_6 &&
- git rm Foo.mw &&
- git commit -am "page Foo.mw deleted" &&
- git push
- ) &&
-
- test_must_fail wiki_page_exist Foo
- '
-
- test_expect_success 'Merge conflict expected and solving it' '
- wiki_reset &&
-
- git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
- wiki_editpage Foo "1 conflict
-3 wiki
-4" false &&
-
- (
- cd mw_dir_7 &&
- echo "1 conflict
-2 git
-4" >Foo.mw &&
- git add Foo.mw &&
- git commit -m "conflict created" &&
- test_must_fail git pull &&
- "$PERL_PATH" -pi -e "s/[<=>].*//g" Foo.mw &&
- git commit -am "merge conflict solved" &&
- git push
- )
- '
-
- test_expect_failure 'git pull works after deleting a wiki page' '
- wiki_reset &&
- wiki_editpage Foo "wiki page added before the git clone" false &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_8 &&
-
- wiki_delete_page Foo &&
- (
- cd mw_dir_8 &&
- git pull &&
- test_path_is_missing Foo.mw
- )
- '
-}
diff --git a/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh b/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh
deleted file mode 100755
index f08890d9e7..0000000000
--- a/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh
+++ /dev/null
@@ -1,257 +0,0 @@
-#!/bin/sh
-#
-# Copyright (C) 2012
-# Charles Roussel <charles.roussel@ensimag.imag.fr>
-# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
-# Julien Khayat <julien.khayat@ensimag.imag.fr>
-# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
-# Simon Perrat <simon.perrat@ensimag.imag.fr>
-#
-# License: GPL v2 or later
-
-
-test_description='Test the Git Mediawiki remote helper: git clone'
-
-. ./test-gitmw-lib.sh
-. $TEST_DIRECTORY/test-lib.sh
-
-
-test_check_precond
-
-
-test_expect_success 'Git clone creates the expected git log with one file' '
- wiki_reset &&
- wiki_editpage foo "this is not important" false -c cat -s "this must be the same" &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
- (
- cd mw_dir_1 &&
- git log --format=%s HEAD^..HEAD >log.tmp
- ) &&
- echo "this must be the same" >msg.tmp &&
- test_cmp msg.tmp mw_dir_1/log.tmp
-'
-
-
-test_expect_success 'Git clone creates the expected git log with multiple files' '
- wiki_reset &&
- wiki_editpage daddy "this is not important" false -s="this must be the same" &&
- wiki_editpage daddy "neither is this" true -s="this must also be the same" &&
- wiki_editpage daddy "neither is this" true -s="same same same" &&
- wiki_editpage dj "dont care" false -s="identical" &&
- wiki_editpage dj "dont care either" true -s="identical too" &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
- (
- cd mw_dir_2 &&
- git log --format=%s Daddy.mw >logDaddy.tmp &&
- git log --format=%s Dj.mw >logDj.tmp
- ) &&
- echo "same same same" >msgDaddy.tmp &&
- echo "this must also be the same" >>msgDaddy.tmp &&
- echo "this must be the same" >>msgDaddy.tmp &&
- echo "identical too" >msgDj.tmp &&
- echo "identical" >>msgDj.tmp &&
- test_cmp msgDaddy.tmp mw_dir_2/logDaddy.tmp &&
- test_cmp msgDj.tmp mw_dir_2/logDj.tmp
-'
-
-
-test_expect_success 'Git clone creates only Main_Page.mw with an empty wiki' '
- wiki_reset &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_3 &&
- test_contains_N_files mw_dir_3 1 &&
- test_path_is_file mw_dir_3/Main_Page.mw
-'
-
-test_expect_success 'Git clone does not fetch a deleted page' '
- wiki_reset &&
- wiki_editpage foo "this page must be deleted before the clone" false &&
- wiki_delete_page foo &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_4 &&
- test_contains_N_files mw_dir_4 1 &&
- test_path_is_file mw_dir_4/Main_Page.mw &&
- test_path_is_missing mw_dir_4/Foo.mw
-'
-
-test_expect_success 'Git clone works with page added' '
- wiki_reset &&
- wiki_editpage foo " I will be cloned" false &&
- wiki_editpage bar "I will be cloned" false &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_5 &&
- wiki_getallpage ref_page_5 &&
- test_diff_directories mw_dir_5 ref_page_5 &&
- wiki_delete_page foo &&
- wiki_delete_page bar
-'
-
-test_expect_success 'Git clone works with an edited page ' '
- wiki_reset &&
- wiki_editpage foo "this page will be edited" \
- false -s "first edition of page foo" &&
- wiki_editpage foo "this page has been edited and must be on the clone " true &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
- test_path_is_file mw_dir_6/Foo.mw &&
- test_path_is_file mw_dir_6/Main_Page.mw &&
- wiki_getallpage mw_dir_6/page_ref_6 &&
- test_diff_directories mw_dir_6 mw_dir_6/page_ref_6 &&
- (
- cd mw_dir_6 &&
- git log --format=%s HEAD^ Foo.mw > ../Foo.log
- ) &&
- echo "first edition of page foo" > FooExpect.log &&
- diff FooExpect.log Foo.log
-'
-
-
-test_expect_success 'Git clone works with several pages and some deleted ' '
- wiki_reset &&
- wiki_editpage foo "this page will not be deleted" false &&
- wiki_editpage bar "I must not be erased" false &&
- wiki_editpage namnam "I will not be there at the end" false &&
- wiki_editpage nyancat "nyan nyan nyan delete me" false &&
- wiki_delete_page namnam &&
- wiki_delete_page nyancat &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
- test_path_is_file mw_dir_7/Foo.mw &&
- test_path_is_file mw_dir_7/Bar.mw &&
- test_path_is_missing mw_dir_7/Namnam.mw &&
- test_path_is_missing mw_dir_7/Nyancat.mw &&
- wiki_getallpage mw_dir_7/page_ref_7 &&
- test_diff_directories mw_dir_7 mw_dir_7/page_ref_7
-'
-
-
-test_expect_success 'Git clone works with one specific page cloned ' '
- wiki_reset &&
- wiki_editpage foo "I will not be cloned" false &&
- wiki_editpage bar "Do not clone me" false &&
- wiki_editpage namnam "I will be cloned :)" false -s="this log must stay" &&
- wiki_editpage nyancat "nyan nyan nyan you cant clone me" false &&
- git clone -c remote.origin.pages=namnam \
- mediawiki::'"$WIKI_URL"' mw_dir_8 &&
- test_contains_N_files mw_dir_8 1 &&
- test_path_is_file mw_dir_8/Namnam.mw &&
- test_path_is_missing mw_dir_8/Main_Page.mw &&
- (
- cd mw_dir_8 &&
- echo "this log must stay" >msg.tmp &&
- git log --format=%s >log.tmp &&
- test_cmp msg.tmp log.tmp
- ) &&
- wiki_check_content mw_dir_8/Namnam.mw Namnam
-'
-
-test_expect_success 'Git clone works with multiple specific page cloned ' '
- wiki_reset &&
- wiki_editpage foo "I will be there" false &&
- wiki_editpage bar "I will not disappear" false &&
- wiki_editpage namnam "I be erased" false &&
- wiki_editpage nyancat "nyan nyan nyan you will not erase me" false &&
- wiki_delete_page namnam &&
- git clone -c remote.origin.pages="foo bar nyancat namnam" \
- mediawiki::'"$WIKI_URL"' mw_dir_9 &&
- test_contains_N_files mw_dir_9 3 &&
- test_path_is_missing mw_dir_9/Namnam.mw &&
- test_path_is_file mw_dir_9/Foo.mw &&
- test_path_is_file mw_dir_9/Nyancat.mw &&
- test_path_is_file mw_dir_9/Bar.mw &&
- wiki_check_content mw_dir_9/Foo.mw Foo &&
- wiki_check_content mw_dir_9/Bar.mw Bar &&
- wiki_check_content mw_dir_9/Nyancat.mw Nyancat
-'
-
-test_expect_success 'Mediawiki-clone of several specific pages on wiki' '
- wiki_reset &&
- wiki_editpage foo "foo 1" false &&
- wiki_editpage bar "bar 1" false &&
- wiki_editpage dummy "dummy 1" false &&
- wiki_editpage cloned_1 "cloned_1 1" false &&
- wiki_editpage cloned_2 "cloned_2 2" false &&
- wiki_editpage cloned_3 "cloned_3 3" false &&
- mkdir -p ref_page_10 &&
- wiki_getpage cloned_1 ref_page_10 &&
- wiki_getpage cloned_2 ref_page_10 &&
- wiki_getpage cloned_3 ref_page_10 &&
- git clone -c remote.origin.pages="cloned_1 cloned_2 cloned_3" \
- mediawiki::'"$WIKI_URL"' mw_dir_10 &&
- test_diff_directories mw_dir_10 ref_page_10
-'
-
-test_expect_success 'Git clone works with the shallow option' '
- wiki_reset &&
- wiki_editpage foo "1st revision, should be cloned" false &&
- wiki_editpage bar "1st revision, should be cloned" false &&
- wiki_editpage nyan "1st revision, should not be cloned" false &&
- wiki_editpage nyan "2nd revision, should be cloned" false &&
- git -c remote.origin.shallow=true clone \
- mediawiki::'"$WIKI_URL"' mw_dir_11 &&
- test_contains_N_files mw_dir_11 4 &&
- test_path_is_file mw_dir_11/Nyan.mw &&
- test_path_is_file mw_dir_11/Foo.mw &&
- test_path_is_file mw_dir_11/Bar.mw &&
- test_path_is_file mw_dir_11/Main_Page.mw &&
- (
- cd mw_dir_11 &&
- test $(git log --oneline Nyan.mw | wc -l) -eq 1 &&
- test $(git log --oneline Foo.mw | wc -l) -eq 1 &&
- test $(git log --oneline Bar.mw | wc -l) -eq 1 &&
- test $(git log --oneline Main_Page.mw | wc -l ) -eq 1
- ) &&
- wiki_check_content mw_dir_11/Nyan.mw Nyan &&
- wiki_check_content mw_dir_11/Foo.mw Foo &&
- wiki_check_content mw_dir_11/Bar.mw Bar &&
- wiki_check_content mw_dir_11/Main_Page.mw Main_Page
-'
-
-test_expect_success 'Git clone works with the shallow option with a delete page' '
- wiki_reset &&
- wiki_editpage foo "1st revision, will be deleted" false &&
- wiki_editpage bar "1st revision, should be cloned" false &&
- wiki_editpage nyan "1st revision, should not be cloned" false &&
- wiki_editpage nyan "2nd revision, should be cloned" false &&
- wiki_delete_page foo &&
- git -c remote.origin.shallow=true clone \
- mediawiki::'"$WIKI_URL"' mw_dir_12 &&
- test_contains_N_files mw_dir_12 3 &&
- test_path_is_file mw_dir_12/Nyan.mw &&
- test_path_is_missing mw_dir_12/Foo.mw &&
- test_path_is_file mw_dir_12/Bar.mw &&
- test_path_is_file mw_dir_12/Main_Page.mw &&
- (
- cd mw_dir_12 &&
- test $(git log --oneline Nyan.mw | wc -l) -eq 1 &&
- test $(git log --oneline Bar.mw | wc -l) -eq 1 &&
- test $(git log --oneline Main_Page.mw | wc -l ) -eq 1
- ) &&
- wiki_check_content mw_dir_12/Nyan.mw Nyan &&
- wiki_check_content mw_dir_12/Bar.mw Bar &&
- wiki_check_content mw_dir_12/Main_Page.mw Main_Page
-'
-
-test_expect_success 'Test of fetching a category' '
- wiki_reset &&
- wiki_editpage Foo "I will be cloned" false -c=Category &&
- wiki_editpage Bar "Meet me on the repository" false -c=Category &&
- wiki_editpage Dummy "I will not come" false &&
- wiki_editpage BarWrong "I will stay online only" false -c=NotCategory &&
- git clone -c remote.origin.categories="Category" \
- mediawiki::'"$WIKI_URL"' mw_dir_13 &&
- wiki_getallpage ref_page_13 Category &&
- test_diff_directories mw_dir_13 ref_page_13
-'
-
-test_expect_success 'Test of resistance to modification of category on wiki for clone' '
- wiki_reset &&
- wiki_editpage Tobedeleted "this page will be deleted" false -c=Catone &&
- wiki_editpage Tobeedited "this page will be modified" false -c=Catone &&
- wiki_editpage Normalone "this page wont be modified and will be on git" false -c=Catone &&
- wiki_editpage Notconsidered "this page will not appear on local" false &&
- wiki_editpage Othercategory "this page will not appear on local" false -c=Cattwo &&
- wiki_editpage Tobeedited "this page have been modified" true -c=Catone &&
- wiki_delete_page Tobedeleted &&
- git clone -c remote.origin.categories="Catone" \
- mediawiki::'"$WIKI_URL"' mw_dir_14 &&
- wiki_getallpage ref_page_14 Catone &&
- test_diff_directories mw_dir_14 ref_page_14
-'
-
-test_done
diff --git a/contrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh b/contrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh
deleted file mode 100755
index 9ea201459b..0000000000
--- a/contrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/sh
-#
-# Copyright (C) 2012
-# Charles Roussel <charles.roussel@ensimag.imag.fr>
-# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
-# Julien Khayat <julien.khayat@ensimag.imag.fr>
-# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
-# Simon Perrat <simon.perrat@ensimag.imag.fr>
-#
-# License: GPL v2 or later
-
-# tests for git-remote-mediawiki
-
-test_description='Test the Git Mediawiki remote helper: git push and git pull simple test cases'
-
-. ./test-gitmw-lib.sh
-. ./push-pull-tests.sh
-. $TEST_DIRECTORY/test-lib.sh
-
-test_check_precond
-
-test_push_pull
-
-test_done
diff --git a/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh b/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh
deleted file mode 100755
index 526d92850f..0000000000
--- a/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh
+++ /dev/null
@@ -1,347 +0,0 @@
-#!/bin/sh
-#
-# Copyright (C) 2012
-# Charles Roussel <charles.roussel@ensimag.imag.fr>
-# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
-# Julien Khayat <julien.khayat@ensimag.imag.fr>
-# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
-# Simon Perrat <simon.perrat@ensimag.imag.fr>
-#
-# License: GPL v2 or later
-
-# tests for git-remote-mediawiki
-
-test_description='Test git-mediawiki with special characters in filenames'
-
-. ./test-gitmw-lib.sh
-. $TEST_DIRECTORY/test-lib.sh
-
-
-test_check_precond
-
-
-test_expect_success 'Git clone works for a wiki with accents in the page names' '
- wiki_reset &&
- wiki_editpage féé "This page must be délétéd before clone" false &&
- wiki_editpage kèè "This page must be deleted before clone" false &&
- wiki_editpage hàà "This page must be deleted before clone" false &&
- wiki_editpage kîî "This page must be deleted before clone" false &&
- wiki_editpage foo "This page must be deleted before clone" false &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
- wiki_getallpage ref_page_1 &&
- test_diff_directories mw_dir_1 ref_page_1
-'
-
-
-test_expect_success 'Git pull works with a wiki with accents in the pages names' '
- wiki_reset &&
- wiki_editpage kîî "this page must be cloned" false &&
- wiki_editpage foo "this page must be cloned" false &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
- wiki_editpage éàîôû "This page must be pulled" false &&
- (
- cd mw_dir_2 &&
- git pull
- ) &&
- wiki_getallpage ref_page_2 &&
- test_diff_directories mw_dir_2 ref_page_2
-'
-
-
-test_expect_success 'Cloning a chosen page works with accents' '
- wiki_reset &&
- wiki_editpage kîî "this page must be cloned" false &&
- git clone -c remote.origin.pages=kîî \
- mediawiki::'"$WIKI_URL"' mw_dir_3 &&
- wiki_check_content mw_dir_3/Kîî.mw Kîî &&
- test_path_is_file mw_dir_3/Kîî.mw &&
- rm -rf mw_dir_3
-'
-
-
-test_expect_success 'The shallow option works with accents' '
- wiki_reset &&
- wiki_editpage néoà "1st revision, should not be cloned" false &&
- wiki_editpage néoà "2nd revision, should be cloned" false &&
- git -c remote.origin.shallow=true clone \
- mediawiki::'"$WIKI_URL"' mw_dir_4 &&
- test_contains_N_files mw_dir_4 2 &&
- test_path_is_file mw_dir_4/Néoà.mw &&
- test_path_is_file mw_dir_4/Main_Page.mw &&
- (
- cd mw_dir_4 &&
- test $(git log --oneline Néoà.mw | wc -l) -eq 1 &&
- test $(git log --oneline Main_Page.mw | wc -l ) -eq 1
- ) &&
- wiki_check_content mw_dir_4/Néoà.mw Néoà &&
- wiki_check_content mw_dir_4/Main_Page.mw Main_Page
-'
-
-
-test_expect_success 'Cloning works when page name first letter has an accent' '
- wiki_reset &&
- wiki_editpage îî "this page must be cloned" false &&
- git clone -c remote.origin.pages=îî \
- mediawiki::'"$WIKI_URL"' mw_dir_5 &&
- test_path_is_file mw_dir_5/Îî.mw &&
- wiki_check_content mw_dir_5/Îî.mw Îî
-'
-
-
-test_expect_success 'Git push works with a wiki with accents' '
- wiki_reset &&
- wiki_editpage féé "lots of accents : éèàÖ" false &&
- wiki_editpage foo "this page must be cloned" false &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
- (
- cd mw_dir_6 &&
- echo "A wild Pîkächû appears on the wiki" >Pîkächû.mw &&
- git add Pîkächû.mw &&
- git commit -m "A new page appears" &&
- git push
- ) &&
- wiki_getallpage ref_page_6 &&
- test_diff_directories mw_dir_6 ref_page_6
-'
-
-test_expect_success 'Git clone works with accentsand spaces' '
- wiki_reset &&
- wiki_editpage "é à î" "this page must be délété before the clone" false &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
- wiki_getallpage ref_page_7 &&
- test_diff_directories mw_dir_7 ref_page_7
-'
-
-test_expect_success 'character $ in page name (mw -> git)' '
- wiki_reset &&
- wiki_editpage file_\$_foo "expect to be called file_$_foo" false &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_8 &&
- test_path_is_file mw_dir_8/File_\$_foo.mw &&
- wiki_getallpage ref_page_8 &&
- test_diff_directories mw_dir_8 ref_page_8
-'
-
-
-
-test_expect_success 'character $ in file name (git -> mw) ' '
- wiki_reset &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_9 &&
- (
- cd mw_dir_9 &&
- echo "this file is called File_\$_foo.mw" >File_\$_foo.mw &&
- git add . &&
- git commit -am "file File_\$_foo.mw" &&
- git pull &&
- git push
- ) &&
- wiki_getallpage ref_page_9 &&
- test_diff_directories mw_dir_9 ref_page_9
-'
-
-
-test_expect_failure 'capital at the beginning of file names' '
- wiki_reset &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_10 &&
- (
- cd mw_dir_10 &&
- echo "my new file foo" >foo.mw &&
- echo "my new file Foo... Finger crossed" >Foo.mw &&
- git add . &&
- git commit -am "file foo.mw" &&
- git pull &&
- git push
- ) &&
- wiki_getallpage ref_page_10 &&
- test_diff_directories mw_dir_10 ref_page_10
-'
-
-
-test_expect_failure 'special character at the beginning of file name from mw to git' '
- wiki_reset &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_11 &&
- wiki_editpage {char_1 "expect to be renamed {char_1" false &&
- wiki_editpage [char_2 "expect to be renamed [char_2" false &&
- (
- cd mw_dir_11 &&
- git pull
- ) &&
- test_path_is_file mw_dir_11/{char_1 &&
- test_path_is_file mw_dir_11/[char_2
-'
-
-test_expect_success 'Pull page with title containing ":" other than namespace separator' '
- wiki_editpage Foo:Bar content false &&
- (
- cd mw_dir_11 &&
- git pull
- ) &&
- test_path_is_file mw_dir_11/Foo:Bar.mw
-'
-
-test_expect_success 'Push page with title containing ":" other than namespace separator' '
- (
- cd mw_dir_11 &&
- echo content >NotANameSpace:Page.mw &&
- git add NotANameSpace:Page.mw &&
- git commit -m "add page with colon" &&
- git push
- ) &&
- wiki_page_exist NotANameSpace:Page
-'
-
-test_expect_success 'test of correct formatting for file name from mw to git' '
- wiki_reset &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_12 &&
- wiki_editpage char_%_7b_1 "expect to be renamed char{_1" false &&
- wiki_editpage char_%_5b_2 "expect to be renamed char{_2" false &&
- (
- cd mw_dir_12 &&
- git pull
- ) &&
- test_path_is_file mw_dir_12/Char\{_1.mw &&
- test_path_is_file mw_dir_12/Char\[_2.mw &&
- wiki_getallpage ref_page_12 &&
- mv ref_page_12/Char_%_7b_1.mw ref_page_12/Char\{_1.mw &&
- mv ref_page_12/Char_%_5b_2.mw ref_page_12/Char\[_2.mw &&
- test_diff_directories mw_dir_12 ref_page_12
-'
-
-
-test_expect_failure 'test of correct formatting for file name beginning with special character' '
- wiki_reset &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_13 &&
- (
- cd mw_dir_13 &&
- echo "my new file {char_1" >\{char_1.mw &&
- echo "my new file [char_2" >\[char_2.mw &&
- git add . &&
- git commit -am "committing some exotic file name..." &&
- git push &&
- git pull
- ) &&
- wiki_getallpage ref_page_13 &&
- test_path_is_file ref_page_13/{char_1.mw &&
- test_path_is_file ref_page_13/[char_2.mw &&
- test_diff_directories mw_dir_13 ref_page_13
-'
-
-
-test_expect_success 'test of correct formatting for file name from git to mw' '
- wiki_reset &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_14 &&
- (
- cd mw_dir_14 &&
- echo "my new file char{_1" >Char\{_1.mw &&
- echo "my new file char[_2" >Char\[_2.mw &&
- git add . &&
- git commit -m "committing some exotic file name..." &&
- git push
- ) &&
- wiki_getallpage ref_page_14 &&
- mv mw_dir_14/Char\{_1.mw mw_dir_14/Char_%_7b_1.mw &&
- mv mw_dir_14/Char\[_2.mw mw_dir_14/Char_%_5b_2.mw &&
- test_diff_directories mw_dir_14 ref_page_14
-'
-
-
-test_expect_success 'git clone with /' '
- wiki_reset &&
- wiki_editpage \/fo\/o "this is not important" false -c=Deleted &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_15 &&
- test_path_is_file mw_dir_15/%2Ffo%2Fo.mw &&
- wiki_check_content mw_dir_15/%2Ffo%2Fo.mw \/fo\/o
-'
-
-
-test_expect_success 'git push with /' '
- wiki_reset &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_16 &&
- echo "I will be on the wiki" >mw_dir_16/%2Ffo%2Fo.mw &&
- (
- cd mw_dir_16 &&
- git add %2Ffo%2Fo.mw &&
- git commit -m " %2Ffo%2Fo added" &&
- git push
- ) &&
- wiki_page_exist \/fo\/o &&
- wiki_check_content mw_dir_16/%2Ffo%2Fo.mw \/fo\/o
-
-'
-
-
-test_expect_success 'git clone with \' '
- wiki_reset &&
- wiki_editpage \\ko\\o "this is not important" false -c=Deleted &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_17 &&
- test_path_is_file mw_dir_17/\\ko\\o.mw &&
- wiki_check_content mw_dir_17/\\ko\\o.mw \\ko\\o
-'
-
-
-test_expect_success 'git push with \' '
- wiki_reset &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_18 &&
- echo "I will be on the wiki" >mw_dir_18/\\ko\\o.mw &&
- (
- cd mw_dir_18 &&
- git add \\ko\\o.mw &&
- git commit -m " \\ko\\o added" &&
- git push
- ) &&
- wiki_page_exist \\ko\\o &&
- wiki_check_content mw_dir_18/\\ko\\o.mw \\ko\\o
-
-'
-
-test_expect_success 'git clone with \ in format control' '
- wiki_reset &&
- wiki_editpage \\no\\o "this is not important" false &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_19 &&
- test_path_is_file mw_dir_19/\\no\\o.mw &&
- wiki_check_content mw_dir_19/\\no\\o.mw \\no\\o
-'
-
-
-test_expect_success 'git push with \ in format control' '
- wiki_reset &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_20 &&
- echo "I will be on the wiki" >mw_dir_20/\\fo\\o.mw &&
- (
- cd mw_dir_20 &&
- git add \\fo\\o.mw &&
- git commit -m " \\fo\\o added" &&
- git push
- ) &&
- wiki_page_exist \\fo\\o &&
- wiki_check_content mw_dir_20/\\fo\\o.mw \\fo\\o
-
-'
-
-
-test_expect_success 'fast-import meta-characters in page name (mw -> git)' '
- wiki_reset &&
- wiki_editpage \"file\"_\\_foo "expect to be called \"file\"_\\_foo" false &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_21 &&
- test_path_is_file mw_dir_21/\"file\"_\\_foo.mw &&
- wiki_getallpage ref_page_21 &&
- test_diff_directories mw_dir_21 ref_page_21
-'
-
-
-test_expect_success 'fast-import meta-characters in page name (git -> mw) ' '
- wiki_reset &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir_22 &&
- (
- cd mw_dir_22 &&
- echo "this file is called \"file\"_\\_foo.mw" >\"file\"_\\_foo &&
- git add . &&
- git commit -am "file \"file\"_\\_foo" &&
- git pull &&
- git push
- ) &&
- wiki_getallpage ref_page_22 &&
- test_diff_directories mw_dir_22 ref_page_22
-'
-
-
-test_done
diff --git a/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh b/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh
deleted file mode 100755
index 7139995a40..0000000000
--- a/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh
+++ /dev/null
@@ -1,218 +0,0 @@
-#!/bin/sh
-#
-# Copyright (C) 2012
-# Charles Roussel <charles.roussel@ensimag.imag.fr>
-# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
-# Julien Khayat <julien.khayat@ensimag.imag.fr>
-# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
-# Simon Perrat <simon.perrat@ensimag.imag.fr>
-#
-# License: GPL v2 or later
-
-# tests for git-remote-mediawiki
-
-test_description='Test the Git Mediawiki remote helper: git push and git pull simple test cases'
-
-. ./test-gitmw-lib.sh
-. $TEST_DIRECTORY/test-lib.sh
-
-
-test_check_precond
-
-
-test_git_reimport () {
- git -c remote.origin.dumbPush=true push &&
- git -c remote.origin.mediaImport=true pull --rebase
-}
-
-# Don't bother with permissions, be administrator by default
-test_expect_success 'setup config' '
- git config --global remote.origin.mwLogin "$WIKI_ADMIN" &&
- git config --global remote.origin.mwPassword "$WIKI_PASSW" &&
- test_might_fail git config --global --unset remote.origin.mediaImport
-'
-
-test_expect_failure 'git push can upload media (File:) files' '
- wiki_reset &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir &&
- (
- cd mw_dir &&
- echo "hello world" >Foo.txt &&
- git add Foo.txt &&
- git commit -m "add a text file" &&
- git push &&
- "$PERL_PATH" -e "print STDOUT \"binary content: \".chr(255);" >Foo.txt &&
- git add Foo.txt &&
- git commit -m "add a text file with binary content" &&
- git push
- )
-'
-
-test_expect_failure 'git clone works on previously created wiki with media files' '
- test_when_finished "rm -rf mw_dir mw_dir_clone" &&
- git clone -c remote.origin.mediaimport=true \
- mediawiki::'"$WIKI_URL"' mw_dir_clone &&
- test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt &&
- (cd mw_dir_clone && git checkout HEAD^) &&
- (cd mw_dir && git checkout HEAD^) &&
- test_path_is_file mw_dir_clone/Foo.txt &&
- test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt
-'
-
-test_expect_success 'git push can upload media (File:) files containing valid UTF-8' '
- wiki_reset &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir &&
- (
- cd mw_dir &&
- "$PERL_PATH" -e "print STDOUT \"UTF-8 content: éèàéê€.\";" >Bar.txt &&
- git add Bar.txt &&
- git commit -m "add a text file with UTF-8 content" &&
- git push
- )
-'
-
-test_expect_success 'git clone works on previously created wiki with media files containing valid UTF-8' '
- test_when_finished "rm -rf mw_dir mw_dir_clone" &&
- git clone -c remote.origin.mediaimport=true \
- mediawiki::'"$WIKI_URL"' mw_dir_clone &&
- test_cmp mw_dir_clone/Bar.txt mw_dir/Bar.txt
-'
-
-test_expect_success 'git push & pull work with locally renamed media files' '
- wiki_reset &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir &&
- test_when_finished "rm -fr mw_dir" &&
- (
- cd mw_dir &&
- echo "A File" >Foo.txt &&
- git add Foo.txt &&
- git commit -m "add a file" &&
- git mv Foo.txt Bar.txt &&
- git commit -m "Rename a file" &&
- test_git_reimport &&
- echo "A File" >expect &&
- test_cmp expect Bar.txt &&
- test_path_is_missing Foo.txt
- )
-'
-
-test_expect_success 'git push can propagate local page deletion' '
- wiki_reset &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir &&
- test_when_finished "rm -fr mw_dir" &&
- (
- cd mw_dir &&
- test_path_is_missing Foo.mw &&
- echo "hello world" >Foo.mw &&
- git add Foo.mw &&
- git commit -m "Add the page Foo" &&
- git push &&
- rm -f Foo.mw &&
- git commit -am "Delete the page Foo" &&
- test_git_reimport &&
- test_path_is_missing Foo.mw
- )
-'
-
-test_expect_success 'git push can propagate local media file deletion' '
- wiki_reset &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir &&
- test_when_finished "rm -fr mw_dir" &&
- (
- cd mw_dir &&
- echo "hello world" >Foo.txt &&
- git add Foo.txt &&
- git commit -m "Add the text file Foo" &&
- git rm Foo.txt &&
- git commit -m "Delete the file Foo" &&
- test_git_reimport &&
- test_path_is_missing Foo.txt
- )
-'
-
-# test failure: the file is correctly uploaded, and then deleted but
-# as no page link to it, the import (which looks at page revisions)
-# doesn't notice the file deletion on the wiki. We fetch the list of
-# files from the wiki, but as the file is deleted, it doesn't appear.
-test_expect_failure 'git pull correctly imports media file deletion when no page link to it' '
- wiki_reset &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir &&
- test_when_finished "rm -fr mw_dir" &&
- (
- cd mw_dir &&
- echo "hello world" >Foo.txt &&
- git add Foo.txt &&
- git commit -m "Add the text file Foo" &&
- git push &&
- git rm Foo.txt &&
- git commit -m "Delete the file Foo" &&
- test_git_reimport &&
- test_path_is_missing Foo.txt
- )
-'
-
-test_expect_success 'git push properly warns about insufficient permissions' '
- wiki_reset &&
- git clone mediawiki::'"$WIKI_URL"' mw_dir &&
- test_when_finished "rm -fr mw_dir" &&
- (
- cd mw_dir &&
- echo "A File" >foo.forbidden &&
- git add foo.forbidden &&
- git commit -m "add a file" &&
- git push 2>actual &&
- test_grep "foo.forbidden is not a permitted file" actual
- )
-'
-
-test_expect_success 'setup a repository with media files' '
- wiki_reset &&
- wiki_editpage testpage "I am linking a file [[File:File.txt]]" false &&
- echo "File content" >File.txt &&
- wiki_upload_file File.txt &&
- echo "Another file content" >AnotherFile.txt &&
- wiki_upload_file AnotherFile.txt
-'
-
-test_expect_success 'git clone works with one specific page cloned and mediaimport=true' '
- git clone -c remote.origin.pages=testpage \
- -c remote.origin.mediaimport=true \
- mediawiki::'"$WIKI_URL"' mw_dir_15 &&
- test_when_finished "rm -rf mw_dir_15" &&
- test_contains_N_files mw_dir_15 3 &&
- test_path_is_file mw_dir_15/Testpage.mw &&
- test_path_is_file mw_dir_15/File:File.txt.mw &&
- test_path_is_file mw_dir_15/File.txt &&
- test_path_is_missing mw_dir_15/Main_Page.mw &&
- test_path_is_missing mw_dir_15/File:AnotherFile.txt.mw &&
- test_path_is_missing mw_dir_15/AnothetFile.txt &&
- wiki_check_content mw_dir_15/Testpage.mw Testpage &&
- test_cmp mw_dir_15/File.txt File.txt
-'
-
-test_expect_success 'git clone works with one specific page cloned and mediaimport=false' '
- test_when_finished "rm -rf mw_dir_16" &&
- git clone -c remote.origin.pages=testpage \
- mediawiki::'"$WIKI_URL"' mw_dir_16 &&
- test_contains_N_files mw_dir_16 1 &&
- test_path_is_file mw_dir_16/Testpage.mw &&
- test_path_is_missing mw_dir_16/File:File.txt.mw &&
- test_path_is_missing mw_dir_16/File.txt &&
- test_path_is_missing mw_dir_16/Main_Page.mw &&
- wiki_check_content mw_dir_16/Testpage.mw Testpage
-'
-
-# should behave like mediaimport=false
-test_expect_success 'git clone works with one specific page cloned and mediaimport unset' '
- test_when_finished "rm -fr mw_dir_17" &&
- git clone -c remote.origin.pages=testpage \
- mediawiki::'"$WIKI_URL"' mw_dir_17 &&
- test_contains_N_files mw_dir_17 1 &&
- test_path_is_file mw_dir_17/Testpage.mw &&
- test_path_is_missing mw_dir_17/File:File.txt.mw &&
- test_path_is_missing mw_dir_17/File.txt &&
- test_path_is_missing mw_dir_17/Main_Page.mw &&
- wiki_check_content mw_dir_17/Testpage.mw Testpage
-'
-
-test_done
diff --git a/contrib/mw-to-git/t/t9364-pull-by-rev.sh b/contrib/mw-to-git/t/t9364-pull-by-rev.sh
deleted file mode 100755
index 5c22457a0b..0000000000
--- a/contrib/mw-to-git/t/t9364-pull-by-rev.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/sh
-
-test_description='Test the Git Mediawiki remote helper: git pull by revision'
-
-. ./test-gitmw-lib.sh
-. ./push-pull-tests.sh
-. $TEST_DIRECTORY/test-lib.sh
-
-test_check_precond
-
-test_expect_success 'configuration' '
- git config --global mediawiki.fetchStrategy by_rev
-'
-
-test_push_pull
-
-test_done
diff --git a/contrib/mw-to-git/t/t9365-continuing-queries.sh b/contrib/mw-to-git/t/t9365-continuing-queries.sh
deleted file mode 100755
index d3e7312659..0000000000
--- a/contrib/mw-to-git/t/t9365-continuing-queries.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/sh
-
-test_description='Test the Git Mediawiki remote helper: queries w/ more than 500 results'
-
-. ./test-gitmw-lib.sh
-. $TEST_DIRECTORY/test-lib.sh
-
-test_check_precond
-
-test_expect_success 'creating page w/ >500 revisions' '
- wiki_reset &&
- for i in $(test_seq 501)
- do
- echo "creating revision $i" &&
- wiki_editpage foo "revision $i<br/>" true || return 1
- done
-'
-
-test_expect_success 'cloning page w/ >500 revisions' '
- git clone mediawiki::'"$WIKI_URL"' mw_dir
-'
-
-test_done
diff --git a/contrib/mw-to-git/t/test-gitmw-lib.sh b/contrib/mw-to-git/t/test-gitmw-lib.sh
deleted file mode 100755
index 64e46c1671..0000000000
--- a/contrib/mw-to-git/t/test-gitmw-lib.sh
+++ /dev/null
@@ -1,432 +0,0 @@
-# Copyright (C) 2012
-# Charles Roussel <charles.roussel@ensimag.imag.fr>
-# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
-# Julien Khayat <julien.khayat@ensimag.imag.fr>
-# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
-# Simon Perrat <simon.perrat@ensimag.imag.fr>
-# License: GPL v2 or later
-
-#
-# CONFIGURATION VARIABLES
-# You might want to change these ones
-#
-
-. ./test.config
-
-WIKI_BASE_URL=http://$SERVER_ADDR:$PORT
-WIKI_URL=$WIKI_BASE_URL/$WIKI_DIR_NAME
-CURR_DIR=$(pwd)
-TEST_OUTPUT_DIRECTORY=$(pwd)
-TEST_DIRECTORY="$CURR_DIR"/../../../t
-
-export TEST_OUTPUT_DIRECTORY TEST_DIRECTORY CURR_DIR
-
-if test "$LIGHTTPD" = "false" ; then
- PORT=80
-else
- WIKI_DIR_INST="$CURR_DIR/$WEB_WWW"
-fi
-
-wiki_upload_file () {
- "$CURR_DIR"/test-gitmw.pl upload_file "$@"
-}
-
-wiki_getpage () {
- "$CURR_DIR"/test-gitmw.pl get_page "$@"
-}
-
-wiki_delete_page () {
- "$CURR_DIR"/test-gitmw.pl delete_page "$@"
-}
-
-wiki_editpage () {
- "$CURR_DIR"/test-gitmw.pl edit_page "$@"
-}
-
-die () {
- die_with_status 1 "$@"
-}
-
-die_with_status () {
- status=$1
- shift
- echo >&2 "$*"
- exit "$status"
-}
-
-
-# Check the preconditions to run git-remote-mediawiki's tests
-test_check_precond () {
- if ! test_have_prereq PERL
- then
- skip_all='skipping gateway git-mw tests, perl not available'
- test_done
- fi
-
- GIT_EXEC_PATH=$(cd "$(dirname "$0")" && cd "../.." && pwd)
- PATH="$GIT_EXEC_PATH"'/bin-wrapper:'"$PATH"
-
- if ! test -d "$WIKI_DIR_INST/$WIKI_DIR_NAME"
- then
- skip_all='skipping gateway git-mw tests, no mediawiki found'
- test_done
- fi
-}
-
-# test_diff_directories <dir_git> <dir_wiki>
-#
-# Compare the contents of directories <dir_git> and <dir_wiki> with diff
-# and errors if they do not match. The program will
-# not look into .git in the process.
-# Warning: the first argument MUST be the directory containing the git data
-test_diff_directories () {
- rm -rf "$1_tmp"
- mkdir -p "$1_tmp"
- cp "$1"/*.mw "$1_tmp"
- diff -r -b "$1_tmp" "$2"
-}
-
-# $1=<dir>
-# $2=<N>
-#
-# Check that <dir> contains exactly <N> files
-test_contains_N_files () {
- if test $(ls -- "$1" | wc -l) -ne "$2"; then
- echo "directory $1 should contain $2 files"
- echo "it contains these files:"
- ls "$1"
- false
- fi
-}
-
-
-# wiki_check_content <file_name> <page_name>
-#
-# Compares the contents of the file <file_name> and the wiki page
-# <page_name> and exits with error 1 if they do not match.
-wiki_check_content () {
- mkdir -p wiki_tmp
- wiki_getpage "$2" wiki_tmp
- # replacement of forbidden character in file name
- page_name=$(printf "%s\n" "$2" | sed -e "s/\//%2F/g")
-
- diff -b "$1" wiki_tmp/"$page_name".mw
- if test $? -ne 0
- then
- rm -rf wiki_tmp
- error "ERROR: file $2 not found on wiki"
- fi
- rm -rf wiki_tmp
-}
-
-# wiki_page_exist <page_name>
-#
-# Check the existence of the page <page_name> on the wiki and exits
-# with error if it is absent from it.
-wiki_page_exist () {
- mkdir -p wiki_tmp
- wiki_getpage "$1" wiki_tmp
- page_name=$(printf "%s\n" "$1" | sed "s/\//%2F/g")
- if test -f wiki_tmp/"$page_name".mw ; then
- rm -rf wiki_tmp
- else
- rm -rf wiki_tmp
- error "test failed: file $1 not found on wiki"
- fi
-}
-
-# wiki_getallpagename
-#
-# Fetch the name of each page on the wiki.
-wiki_getallpagename () {
- "$CURR_DIR"/test-gitmw.pl getallpagename
-}
-
-# wiki_getallpagecategory <category>
-#
-# Fetch the name of each page belonging to <category> on the wiki.
-wiki_getallpagecategory () {
- "$CURR_DIR"/test-gitmw.pl getallpagename "$@"
-}
-
-# wiki_getallpage <dest_dir> [<category>]
-#
-# Fetch all the pages from the wiki and place them in the directory
-# <dest_dir>.
-# If <category> is define, then wiki_getallpage fetch the pages included
-# in <category>.
-wiki_getallpage () {
- if test -z "$2";
- then
- wiki_getallpagename
- else
- wiki_getallpagecategory "$2"
- fi
- mkdir -p "$1"
- while read -r line; do
- wiki_getpage "$line" $1;
- done < all.txt
-}
-
-# ================= Install part =================
-
-error () {
- echo "$@" >&2
- exit 1
-}
-
-# config_lighttpd
-#
-# Create the configuration files and the folders necessary to start lighttpd.
-# Overwrite any existing file.
-config_lighttpd () {
- mkdir -p $WEB
- mkdir -p $WEB_TMP
- mkdir -p $WEB_WWW
- cat > $WEB/lighttpd.conf <<EOF
- server.document-root = "$CURR_DIR/$WEB_WWW"
- server.port = $PORT
- server.pid-file = "$CURR_DIR/$WEB_TMP/pid"
-
- server.modules = (
- "mod_rewrite",
- "mod_redirect",
- "mod_access",
- "mod_accesslog",
- "mod_fastcgi"
- )
-
- index-file.names = ("index.php" , "index.html")
-
- mimetype.assign = (
- ".pdf" => "application/pdf",
- ".sig" => "application/pgp-signature",
- ".spl" => "application/futuresplash",
- ".class" => "application/octet-stream",
- ".ps" => "application/postscript",
- ".torrent" => "application/x-bittorrent",
- ".dvi" => "application/x-dvi",
- ".gz" => "application/x-gzip",
- ".pac" => "application/x-ns-proxy-autoconfig",
- ".swf" => "application/x-shockwave-flash",
- ".tar.gz" => "application/x-tgz",
- ".tgz" => "application/x-tgz",
- ".tar" => "application/x-tar",
- ".zip" => "application/zip",
- ".mp3" => "audio/mpeg",
- ".m3u" => "audio/x-mpegurl",
- ".wma" => "audio/x-ms-wma",
- ".wax" => "audio/x-ms-wax",
- ".ogg" => "application/ogg",
- ".wav" => "audio/x-wav",
- ".gif" => "image/gif",
- ".jpg" => "image/jpeg",
- ".jpeg" => "image/jpeg",
- ".png" => "image/png",
- ".xbm" => "image/x-xbitmap",
- ".xpm" => "image/x-xpixmap",
- ".xwd" => "image/x-xwindowdump",
- ".css" => "text/css",
- ".html" => "text/html",
- ".htm" => "text/html",
- ".js" => "text/javascript",
- ".asc" => "text/plain",
- ".c" => "text/plain",
- ".cpp" => "text/plain",
- ".log" => "text/plain",
- ".conf" => "text/plain",
- ".text" => "text/plain",
- ".txt" => "text/plain",
- ".dtd" => "text/xml",
- ".xml" => "text/xml",
- ".mpeg" => "video/mpeg",
- ".mpg" => "video/mpeg",
- ".mov" => "video/quicktime",
- ".qt" => "video/quicktime",
- ".avi" => "video/x-msvideo",
- ".asf" => "video/x-ms-asf",
- ".asx" => "video/x-ms-asf",
- ".wmv" => "video/x-ms-wmv",
- ".bz2" => "application/x-bzip",
- ".tbz" => "application/x-bzip-compressed-tar",
- ".tar.bz2" => "application/x-bzip-compressed-tar",
- "" => "text/plain"
- )
-
- fastcgi.server = ( ".php" =>
- ("localhost" =>
- ( "socket" => "$CURR_DIR/$WEB_TMP/php.socket",
- "bin-path" => "$PHP_DIR/php-cgi -c $CURR_DIR/$WEB/php.ini"
-
- )
- )
- )
-EOF
-
- cat > $WEB/php.ini <<EOF
- session.save_path ='$CURR_DIR/$WEB_TMP'
-EOF
-}
-
-# start_lighttpd
-#
-# Start or restart daemon lighttpd. If restart, rewrite configuration files.
-start_lighttpd () {
- if test -f "$WEB_TMP/pid"; then
- echo "Instance already running. Restarting..."
- stop_lighttpd
- fi
- config_lighttpd
- "$LIGHTTPD_DIR"/lighttpd -f "$WEB"/lighttpd.conf
-
- if test $? -ne 0 ; then
- echo "Could not execute http daemon lighttpd"
- exit 1
- fi
-}
-
-# stop_lighttpd
-#
-# Kill daemon lighttpd and removes files and folders associated.
-stop_lighttpd () {
- test -f "$WEB_TMP/pid" && kill $(cat "$WEB_TMP/pid")
-}
-
-wiki_delete_db () {
- rm -rf \
- "$FILES_FOLDER_DB"/* || error "Couldn't delete $FILES_FOLDER_DB/"
-}
-
-wiki_delete_db_backup () {
- rm -rf \
- "$FILES_FOLDER_POST_INSTALL_DB"/* || error "Couldn't delete $FILES_FOLDER_POST_INSTALL_DB/"
-}
-
-# Install MediaWiki using its install.php script. If the database file
-# already exists, it will be deleted.
-install_mediawiki () {
-
- localsettings="$WIKI_DIR_INST/$WIKI_DIR_NAME/LocalSettings.php"
- if test -f "$localsettings"
- then
- error "We already installed the wiki, since $localsettings exists" \
- "perhaps you wanted to run 'delete' first?"
- fi
-
- wiki_delete_db
- wiki_delete_db_backup
- mkdir \
- "$FILES_FOLDER_DB/" \
- "$FILES_FOLDER_POST_INSTALL_DB/"
-
- install_script="$WIKI_DIR_INST/$WIKI_DIR_NAME/maintenance/install.php"
- echo "Installing MediaWiki using $install_script. This may take some time ..."
-
- php "$WIKI_DIR_INST/$WIKI_DIR_NAME/maintenance/install.php" \
- --server $WIKI_BASE_URL \
- --scriptpath /wiki \
- --lang en \
- --dbtype sqlite \
- --dbpath $PWD/$FILES_FOLDER_DB/ \
- --pass "$WIKI_PASSW" \
- Git-MediaWiki-Test \
- "$WIKI_ADMIN" ||
- error "Couldn't run $install_script, see errors above. Try to run ./install-wiki.sh delete first."
- cat <<-'EOF' >>$localsettings
-# Custom settings added by test-gitmw-lib.sh
-#
-# Uploading text files is needed for
-# t9363-mw-to-git-export-import.sh
-$wgEnableUploads = true;
-$wgFileExtensions[] = 'txt';
-EOF
-
- # Copy the initially generated database file into our backup
- # folder
- cp -R "$FILES_FOLDER_DB/"* "$FILES_FOLDER_POST_INSTALL_DB/" ||
- error "Unable to copy $FILES_FOLDER_DB/* to $FILES_FOLDER_POST_INSTALL_DB/*"
-}
-
-# Install a wiki in your web server directory.
-wiki_install () {
- if test $LIGHTTPD = "true" ; then
- start_lighttpd
- fi
-
- # In this part, we change directory to $TMP in order to download,
- # unpack and copy the files of MediaWiki
- (
- mkdir -p "$WIKI_DIR_INST/$WIKI_DIR_NAME"
- if ! test -d "$WIKI_DIR_INST/$WIKI_DIR_NAME"
- then
- error "Folder $WIKI_DIR_INST/$WIKI_DIR_NAME doesn't exist.
- Please create it and launch the script again."
- fi
-
- # Fetch MediaWiki's archive if not already present in the
- # download directory
- mkdir -p "$FILES_FOLDER_DOWNLOAD"
- MW_FILENAME="mediawiki-$MW_VERSION_MAJOR.$MW_VERSION_MINOR.tar.gz"
- cd "$FILES_FOLDER_DOWNLOAD"
- if ! test -f $MW_FILENAME
- then
- echo "Downloading $MW_VERSION_MAJOR.$MW_VERSION_MINOR sources ..."
- wget "http://download.wikimedia.org/mediawiki/$MW_VERSION_MAJOR/$MW_FILENAME" ||
- error "Unable to download "\
- "http://download.wikimedia.org/mediawiki/$MW_VERSION_MAJOR/"\
- "$MW_FILENAME. "\
- "Please fix your connection and launch the script again."
- echo "$MW_FILENAME downloaded in $(pwd)/;" \
- "you can delete it later if you want."
- else
- echo "Reusing existing $MW_FILENAME downloaded in $(pwd)/"
- fi
- archive_abs_path=$(pwd)/$MW_FILENAME
- cd "$WIKI_DIR_INST/$WIKI_DIR_NAME/" ||
- error "can't cd to $WIKI_DIR_INST/$WIKI_DIR_NAME/"
- tar xzf "$archive_abs_path" --strip-components=1 ||
- error "Unable to extract WikiMedia's files from $archive_abs_path to "\
- "$WIKI_DIR_INST/$WIKI_DIR_NAME"
- ) || exit 1
- echo Extracted in "$WIKI_DIR_INST/$WIKI_DIR_NAME"
-
- install_mediawiki
-
- echo "Your wiki has been installed. You can check it at
- $WIKI_URL"
-}
-
-# Reset the database of the wiki and the password of the admin
-#
-# Warning: This function must be called only in a subdirectory of t/ directory
-wiki_reset () {
- # Copy initial database of the wiki
- if ! test -d "../$FILES_FOLDER_DB"
- then
- error "No wiki database at ../$FILES_FOLDER_DB, not installed yet?"
- fi
- if ! test -d "../$FILES_FOLDER_POST_INSTALL_DB"
- then
- error "No wiki backup database at ../$FILES_FOLDER_POST_INSTALL_DB, failed installation?"
- fi
- wiki_delete_db
- cp -R "../$FILES_FOLDER_POST_INSTALL_DB/"* "../$FILES_FOLDER_DB/" ||
- error "Can't copy ../$FILES_FOLDER_POST_INSTALL_DB/* to ../$FILES_FOLDER_DB/*"
- echo "File $FILES_FOLDER_DB/* has been reset"
-}
-
-# Delete the wiki created in the web server's directory and all its content
-# saved in the database.
-wiki_delete () {
- if test $LIGHTTPD = "true"; then
- stop_lighttpd
- rm -fr "$WEB"
- else
- # Delete the wiki's directory.
- rm -rf "$WIKI_DIR_INST/$WIKI_DIR_NAME" ||
- error "Wiki's directory $WIKI_DIR_INST/" \
- "$WIKI_DIR_NAME could not be deleted"
- fi
- wiki_delete_db
- wiki_delete_db_backup
-}
diff --git a/contrib/mw-to-git/t/test-gitmw.pl b/contrib/mw-to-git/t/test-gitmw.pl
deleted file mode 100755
index c5d687f078..0000000000
--- a/contrib/mw-to-git/t/test-gitmw.pl
+++ /dev/null
@@ -1,223 +0,0 @@
-#!/usr/bin/perl -w -s
-# Copyright (C) 2012
-# Charles Roussel <charles.roussel@ensimag.imag.fr>
-# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
-# Julien Khayat <julien.khayat@ensimag.imag.fr>
-# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
-# Simon Perrat <simon.perrat@ensimag.imag.fr>
-# License: GPL v2 or later
-
-# Usage:
-# ./test-gitmw.pl <command> [argument]*
-# Execute in terminal using the name of the function to call as first
-# parameter, and the function's arguments as following parameters
-#
-# Example:
-# ./test-gitmw.pl "get_page" foo .
-# will call <wiki_getpage> with arguments <foo> and <.>
-#
-# Available functions are:
-# "get_page"
-# "delete_page"
-# "edit_page"
-# "getallpagename"
-
-use MediaWiki::API;
-use Getopt::Long;
-use DateTime::Format::ISO8601;
-use constant SLASH_REPLACEMENT => "%2F";
-
-#Parsing of the config file
-
-my $configfile = "$ENV{'CURR_DIR'}/test.config";
-my %config;
-open my $CONFIG, "<", $configfile or die "can't open $configfile: $!";
-while (<$CONFIG>)
-{
- chomp;
- s/#.*//;
- s/^\s+//;
- s/\s+$//;
- next unless length;
- my ($key, $value) = split (/\s*=\s*/,$_, 2);
- $config{$key} = $value;
- last if ($key eq 'LIGHTTPD' and $value eq 'false');
- last if ($key eq 'PORT');
-}
-close $CONFIG or die "can't close $configfile: $!";
-
-my $wiki_address = "http://$config{'SERVER_ADDR'}".":"."$config{'PORT'}";
-my $wiki_url = "$wiki_address/$config{'WIKI_DIR_NAME'}/api.php";
-my $wiki_admin = "$config{'WIKI_ADMIN'}";
-my $wiki_admin_pass = "$config{'WIKI_PASSW'}";
-my $mw = MediaWiki::API->new;
-$mw->{config}->{api_url} = $wiki_url;
-
-
-# wiki_login <name> <password>
-#
-# Logs the user with <name> and <password> in the global variable
-# of the mediawiki $mw
-sub wiki_login {
- $mw->login( { lgname => "$_[0]",lgpassword => "$_[1]" } )
- || die "getpage: login failed";
-}
-
-# wiki_getpage <wiki_page> <dest_path>
-#
-# fetch a page <wiki_page> from the wiki referenced in the global variable
-# $mw and copies its content in directory dest_path
-sub wiki_getpage {
- my $pagename = $_[0];
- my $destdir = $_[1];
-
- my $page = $mw->get_page( { title => $pagename } );
- if (!defined($page)) {
- die "getpage: wiki does not exist";
- }
-
- my $content = $page->{'*'};
- if (!defined($content)) {
- die "getpage: page does not exist";
- }
-
- $pagename=$page->{'title'};
- # Replace spaces by underscore in the page name
- $pagename =~ s/ /_/g;
- $pagename =~ s/\//%2F/g;
- open(my $file, ">:encoding(UTF-8)", "$destdir/$pagename.mw");
- print $file "$content";
- close ($file);
-
-}
-
-# wiki_delete_page <page_name>
-#
-# delete the page with name <page_name> from the wiki referenced
-# in the global variable $mw
-sub wiki_delete_page {
- my $pagename = $_[0];
-
- my $exist=$mw->get_page({title => $pagename});
-
- if (defined($exist->{'*'})){
- $mw->edit({ action => 'delete',
- title => $pagename})
- || die $mw->{error}->{code} . ": " . $mw->{error}->{details};
- } else {
- die "no page with such name found: $pagename\n";
- }
-}
-
-# wiki_editpage <wiki_page> <wiki_content> <wiki_append> [-c=<category>] [-s=<summary>]
-#
-# Edit a page named <wiki_page> with content <wiki_content> on the wiki
-# referenced with the global variable $mw
-# If <wiki_append> == true : append <wiki_content> at the end of the actual
-# content of the page <wiki_page>
-# If <wik_page> doesn't exist, that page is created with the <wiki_content>
-sub wiki_editpage {
- my $wiki_page = $_[0];
- my $wiki_content = $_[1];
- my $wiki_append = $_[2];
- my $summary = "";
- my ($summ, $cat) = ();
- GetOptions('s=s' => \$summ, 'c=s' => \$cat);
-
- my $append = 0;
- if (defined($wiki_append) && $wiki_append eq 'true') {
- $append=1;
- }
-
- my $previous_text ="";
-
- if ($append) {
- my $ref = $mw->get_page( { title => $wiki_page } );
- $previous_text = $ref->{'*'};
- }
-
- my $text = $wiki_content;
- if (defined($previous_text)) {
- $text="$previous_text$text";
- }
-
- # Eventually, add this page to a category.
- if (defined($cat)) {
- my $category_name="[[Category:$cat]]";
- $text="$text\n $category_name";
- }
- if(defined($summ)){
- $summary=$summ;
- }
-
- $mw->edit( { action => 'edit', title => $wiki_page, summary => $summary, text => "$text"} );
-}
-
-# wiki_getallpagename [<category>]
-#
-# Fetch all pages of the wiki referenced by the global variable $mw
-# and print the names of each one in the file all.txt with a new line
-# ("\n") between these.
-# If the argument <category> is defined, then this function get only the pages
-# belonging to <category>.
-sub wiki_getallpagename {
- # fetch the pages of the wiki
- if (defined($_[0])) {
- my $mw_pages = $mw->list ( { action => 'query',
- list => 'categorymembers',
- cmtitle => "Category:$_[0]",
- cmnamespace => 0,
- cmlimit => 500 },
- )
- || die $mw->{error}->{code}.": ".$mw->{error}->{details};
- open(my $file, ">:encoding(UTF-8)", "all.txt");
- foreach my $page (@{$mw_pages}) {
- print $file "$page->{title}\n";
- }
- close ($file);
-
- } else {
- my $mw_pages = $mw->list({
- action => 'query',
- list => 'allpages',
- aplimit => 500,
- })
- || die $mw->{error}->{code}.": ".$mw->{error}->{details};
- open(my $file, ">:encoding(UTF-8)", "all.txt");
- foreach my $page (@{$mw_pages}) {
- print $file "$page->{title}\n";
- }
- close ($file);
- }
-}
-
-sub wiki_upload_file {
- my $file_name = $_[0];
- my $resultat = $mw->edit ( {
- action => 'upload',
- filename => $file_name,
- comment => 'upload a file',
- file => [ $file_name ],
- ignorewarnings=>1,
- }, {
- skip_encoding => 1
- } ) || die $mw->{error}->{code} . ' : ' . $mw->{error}->{details};
-}
-
-
-
-# Main part of this script: parse the command line arguments
-# and select which function to execute
-my $fct_to_call = shift;
-
-wiki_login($wiki_admin, $wiki_admin_pass);
-
-my %functions_to_call = (
- upload_file => \&wiki_upload_file,
- get_page => \&wiki_getpage,
- delete_page => \&wiki_delete_page,
- edit_page => \&wiki_editpage,
- getallpagename => \&wiki_getallpagename,
-);
-die "$0 ERROR: wrong argument" unless exists $functions_to_call{$fct_to_call};
-$functions_to_call{$fct_to_call}->(map { utf8::decode($_); $_ } @ARGV);
diff --git a/contrib/mw-to-git/t/test.config b/contrib/mw-to-git/t/test.config
deleted file mode 100644
index ed10b3e4a4..0000000000
--- a/contrib/mw-to-git/t/test.config
+++ /dev/null
@@ -1,40 +0,0 @@
-# Name of the web server's directory dedicated to the wiki is WIKI_DIR_NAME
-WIKI_DIR_NAME=wiki
-
-# Login and password of the wiki's admin
-WIKI_ADMIN=WikiAdmin
-WIKI_PASSW=AdminPass1
-
-# Address of the web server
-SERVER_ADDR=localhost
-
-# If LIGHTTPD is not set to true, the script will use the default
-# web server running in WIKI_DIR_INST.
-WIKI_DIR_INST=/var/www
-
-# If LIGHTTPD is set to true, the script will use Lighttpd to run
-# the wiki.
-LIGHTTPD=true
-
-# The variables below are useful only if LIGHTTPD is set to true.
-PORT=1234
-PHP_DIR=/usr/bin
-LIGHTTPD_DIR=/usr/sbin
-WEB=WEB
-WEB_TMP=$WEB/tmp
-WEB_WWW=$WEB/www
-
-# Where our configuration for the wiki is located
-FILES_FOLDER=mediawiki
-FILES_FOLDER_DOWNLOAD=$FILES_FOLDER/download
-FILES_FOLDER_DB=$FILES_FOLDER/db
-FILES_FOLDER_POST_INSTALL_DB=$FILES_FOLDER/post-install-db
-
-# The variables below are used by the script to install a wiki.
-# You should not modify these unless you are modifying the script itself.
-# tested versions: 1.19.X -> 1.21.1 -> 1.34.2
-#
-# See https://www.mediawiki.org/wiki/Download for what the latest
-# version is.
-MW_VERSION_MAJOR=1.34
-MW_VERSION_MINOR=2
diff --git a/contrib/persistent-https/LICENSE b/contrib/persistent-https/LICENSE
deleted file mode 100644
index d645695673..0000000000
--- a/contrib/persistent-https/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/contrib/persistent-https/Makefile b/contrib/persistent-https/Makefile
deleted file mode 100644
index 691737e76b..0000000000
--- a/contrib/persistent-https/Makefile
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2012 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# The default target of this Makefile is...
-all::
-
-BUILD_LABEL=$(shell cut -d" " -f3 ../../GIT-VERSION-FILE)
-TAR_OUT=$(shell go env GOOS)_$(shell go env GOARCH).tar.gz
-
-all:: git-remote-persistent-https git-remote-persistent-https--proxy \
- git-remote-persistent-http
-
-git-remote-persistent-https--proxy: git-remote-persistent-https
- ln -f -s git-remote-persistent-https git-remote-persistent-https--proxy
-
-git-remote-persistent-http: git-remote-persistent-https
- ln -f -s git-remote-persistent-https git-remote-persistent-http
-
-git-remote-persistent-https:
- case $$(go version) in \
- "go version go"1.[0-5].*) EQ=" " ;; *) EQ="=" ;; esac && \
- go build -o git-remote-persistent-https \
- -ldflags "-X main._BUILD_EMBED_LABEL$${EQ}$(BUILD_LABEL)"
-
-clean:
- rm -f git-remote-persistent-http* *.tar.gz
-
-tar: clean all
- @chmod 555 git-remote-persistent-https
- @tar -czf $(TAR_OUT) git-remote-persistent-http* README LICENSE
- @echo
- @echo "Created $(TAR_OUT)"
diff --git a/contrib/persistent-https/README b/contrib/persistent-https/README
deleted file mode 100644
index 7c4cd8d257..0000000000
--- a/contrib/persistent-https/README
+++ /dev/null
@@ -1,72 +0,0 @@
-git-remote-persistent-https
-
-The git-remote-persistent-https binary speeds up SSL operations
-by running a daemon job (git-remote-persistent-https--proxy) that
-keeps a connection open to a server.
-
-
-PRE-BUILT BINARIES
-
-Darwin amd64:
-https://commondatastorage.googleapis.com/git-remote-persistent-https/darwin_amd64.tar.gz
-
-Linux amd64:
-https://commondatastorage.googleapis.com/git-remote-persistent-https/linux_amd64.tar.gz
-
-
-INSTALLING
-
-Move all of the git-remote-persistent-http* binaries to a directory
-in PATH.
-
-
-USAGE
-
-HTTPS requests can be delegated to the proxy by using the
-"persistent-https" scheme, e.g.
-
-git clone persistent-https://kernel.googlesource.com/pub/scm/git/git
-
-Likewise, .gitconfig can be updated as follows to rewrite https urls
-to use persistent-https:
-
-[url "persistent-https"]
- insteadof = https
-[url "persistent-http"]
- insteadof = http
-
-You may also want to allow the use of the persistent-https helper for
-submodule URLs (since any https URLs pointing to submodules will be
-rewritten, and Git's out-of-the-box defaults forbid submodules from
-using unknown remote helpers):
-
-[protocol "persistent-https"]
- allow = always
-[protocol "persistent-http"]
- allow = always
-
-
-#####################################################################
-# BUILDING FROM SOURCE
-#####################################################################
-
-LOCATION
-
-The source is available in the contrib/persistent-https directory of
-the Git source repository. The Git source repository is available at
-git://git.kernel.org/pub/scm/git/git.git/
-https://kernel.googlesource.com/pub/scm/git/git
-
-
-PREREQUISITES
-
-The code is written in Go (http://golang.org/) and the Go compiler is
-required. Currently, the compiler must be built and installed from tip
-of source, in order to include a fix in the reverse http proxy:
-http://code.google.com/p/go/source/detail?r=a615b796570a2cd8591884767a7d67ede74f6648
-
-
-BUILDING
-
-Run "make" to build the binaries. See the section on
-INSTALLING above.
diff --git a/contrib/persistent-https/client.go b/contrib/persistent-https/client.go
deleted file mode 100644
index 71125b5832..0000000000
--- a/contrib/persistent-https/client.go
+++ /dev/null
@@ -1,189 +0,0 @@
-// Copyright 2012 Google Inc. All Rights Reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-import (
- "bufio"
- "errors"
- "fmt"
- "net"
- "net/url"
- "os"
- "os/exec"
- "strings"
- "syscall"
- "time"
-)
-
-type Client struct {
- ProxyBin string
- Args []string
-
- insecure bool
-}
-
-func (c *Client) Run() error {
- if err := c.resolveArgs(); err != nil {
- return fmt.Errorf("resolveArgs() got error: %v", err)
- }
-
- // Connect to the proxy.
- uconn, hconn, addr, err := c.connect()
- if err != nil {
- return fmt.Errorf("connect() got error: %v", err)
- }
- // Keep the unix socket connection open for the duration of the request.
- defer uconn.Close()
- // Keep a connection to the HTTP server open, so no other user can
- // bind on the same address so long as the process is running.
- defer hconn.Close()
-
- // Start the git-remote-http subprocess.
- cargs := []string{"-c", fmt.Sprintf("http.proxy=%v", addr), "remote-http"}
- cargs = append(cargs, c.Args...)
- cmd := exec.Command("git", cargs...)
-
- for _, v := range os.Environ() {
- if !strings.HasPrefix(v, "GIT_PERSISTENT_HTTPS_SECURE=") {
- cmd.Env = append(cmd.Env, v)
- }
- }
- // Set the GIT_PERSISTENT_HTTPS_SECURE environment variable when
- // the proxy is using a SSL connection. This allows credential helpers
- // to identify secure proxy connections, despite being passed an HTTP
- // scheme.
- if !c.insecure {
- cmd.Env = append(cmd.Env, "GIT_PERSISTENT_HTTPS_SECURE=1")
- }
-
- cmd.Stdin = os.Stdin
- cmd.Stdout = os.Stdout
- cmd.Stderr = os.Stderr
- if err := cmd.Run(); err != nil {
- if eerr, ok := err.(*exec.ExitError); ok {
- if stat, ok := eerr.ProcessState.Sys().(syscall.WaitStatus); ok && stat.ExitStatus() != 0 {
- os.Exit(stat.ExitStatus())
- }
- }
- return fmt.Errorf("git-remote-http subprocess got error: %v", err)
- }
- return nil
-}
-
-func (c *Client) connect() (uconn net.Conn, hconn net.Conn, addr string, err error) {
- uconn, err = DefaultSocket.Dial()
- if err != nil {
- if e, ok := err.(*net.OpError); ok && (os.IsNotExist(e.Err) || e.Err == syscall.ECONNREFUSED) {
- if err = c.startProxy(); err == nil {
- uconn, err = DefaultSocket.Dial()
- }
- }
- if err != nil {
- return
- }
- }
-
- if addr, err = c.readAddr(uconn); err != nil {
- return
- }
-
- // Open a tcp connection to the proxy.
- if hconn, err = net.Dial("tcp", addr); err != nil {
- return
- }
-
- // Verify the address hasn't changed ownership.
- var addr2 string
- if addr2, err = c.readAddr(uconn); err != nil {
- return
- } else if addr != addr2 {
- err = fmt.Errorf("address changed after connect. got %q, want %q", addr2, addr)
- return
- }
- return
-}
-
-func (c *Client) readAddr(conn net.Conn) (string, error) {
- conn.SetDeadline(time.Now().Add(5 * time.Second))
- data := make([]byte, 100)
- n, err := conn.Read(data)
- if err != nil {
- return "", fmt.Errorf("error reading unix socket: %v", err)
- } else if n == 0 {
- return "", errors.New("empty data response")
- }
- conn.Write([]byte{1}) // Ack
-
- var addr string
- if addrs := strings.Split(string(data[:n]), "\n"); len(addrs) != 2 {
- return "", fmt.Errorf("got %q, wanted 2 addresses", data[:n])
- } else if c.insecure {
- addr = addrs[1]
- } else {
- addr = addrs[0]
- }
- return addr, nil
-}
-
-func (c *Client) startProxy() error {
- cmd := exec.Command(c.ProxyBin)
- cmd.SysProcAttr = &syscall.SysProcAttr{Setpgid: true}
- stdout, err := cmd.StdoutPipe()
- if err != nil {
- return err
- }
- defer stdout.Close()
- if err := cmd.Start(); err != nil {
- return err
- }
- result := make(chan error)
- go func() {
- bytes, _, err := bufio.NewReader(stdout).ReadLine()
- if line := string(bytes); err == nil && line != "OK" {
- err = fmt.Errorf("proxy returned %q, want \"OK\"", line)
- }
- result <- err
- }()
- select {
- case err := <-result:
- return err
- case <-time.After(5 * time.Second):
- return errors.New("timeout waiting for proxy to start")
- }
- panic("not reachable")
-}
-
-func (c *Client) resolveArgs() error {
- if nargs := len(c.Args); nargs == 0 {
- return errors.New("remote needed")
- } else if nargs > 2 {
- return fmt.Errorf("want at most 2 args, got %v", c.Args)
- }
-
- // Rewrite the url scheme to be http.
- idx := len(c.Args) - 1
- rawurl := c.Args[idx]
- rurl, err := url.Parse(rawurl)
- if err != nil {
- return fmt.Errorf("invalid remote: %v", err)
- }
- c.insecure = rurl.Scheme == "persistent-http"
- rurl.Scheme = "http"
- c.Args[idx] = rurl.String()
- if idx != 0 && c.Args[0] == rawurl {
- c.Args[0] = c.Args[idx]
- }
- return nil
-}
diff --git a/contrib/persistent-https/main.go b/contrib/persistent-https/main.go
deleted file mode 100644
index fd1b107743..0000000000
--- a/contrib/persistent-https/main.go
+++ /dev/null
@@ -1,82 +0,0 @@
-// Copyright 2012 Google Inc. All Rights Reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// The git-remote-persistent-https binary speeds up SSL operations by running
-// a daemon job that keeps a connection open to a Git server. This ensures the
-// git-remote-persistent-https--proxy is running and delegating execution
-// to the git-remote-http binary with the http_proxy set to the daemon job.
-// A unix socket is used to authenticate the proxy and discover the
-// HTTP address. Note, both the client and proxy are included in the same
-// binary.
-package main
-
-import (
- "flag"
- "fmt"
- "log"
- "os"
- "strings"
- "time"
-)
-
-var (
- forceProxy = flag.Bool("proxy", false, "Whether to start the binary in proxy mode")
- proxyBin = flag.String("proxy_bin", "git-remote-persistent-https--proxy", "Path to the proxy binary")
- printLabel = flag.Bool("print_label", false, "Prints the build label for the binary")
-
- // Variable that should be defined through the -X linker flag.
- _BUILD_EMBED_LABEL string
-)
-
-const (
- defaultMaxIdleDuration = 24 * time.Hour
- defaultPollUpdateInterval = 15 * time.Minute
-)
-
-func main() {
- flag.Parse()
- if *printLabel {
- // Short circuit execution to print the build label
- fmt.Println(buildLabel())
- return
- }
-
- var err error
- if *forceProxy || strings.HasSuffix(os.Args[0], "--proxy") {
- log.SetPrefix("git-remote-persistent-https--proxy: ")
- proxy := &Proxy{
- BuildLabel: buildLabel(),
- MaxIdleDuration: defaultMaxIdleDuration,
- PollUpdateInterval: defaultPollUpdateInterval,
- }
- err = proxy.Run()
- } else {
- log.SetPrefix("git-remote-persistent-https: ")
- client := &Client{
- ProxyBin: *proxyBin,
- Args: flag.Args(),
- }
- err = client.Run()
- }
- if err != nil {
- log.Fatalln(err)
- }
-}
-
-func buildLabel() string {
- if _BUILD_EMBED_LABEL == "" {
- log.Println(`unlabeled build; build with "make" to label`)
- }
- return _BUILD_EMBED_LABEL
-}
diff --git a/contrib/persistent-https/proxy.go b/contrib/persistent-https/proxy.go
deleted file mode 100644
index bb0cdba386..0000000000
--- a/contrib/persistent-https/proxy.go
+++ /dev/null
@@ -1,190 +0,0 @@
-// Copyright 2012 Google Inc. All Rights Reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-import (
- "fmt"
- "log"
- "net"
- "net/http"
- "net/http/httputil"
- "os"
- "os/exec"
- "os/signal"
- "sync"
- "syscall"
- "time"
-)
-
-type Proxy struct {
- BuildLabel string
- MaxIdleDuration time.Duration
- PollUpdateInterval time.Duration
-
- ul net.Listener
- httpAddr string
- httpsAddr string
-}
-
-func (p *Proxy) Run() error {
- hl, err := net.Listen("tcp", "127.0.0.1:0")
- if err != nil {
- return fmt.Errorf("http listen failed: %v", err)
- }
- defer hl.Close()
-
- hsl, err := net.Listen("tcp", "127.0.0.1:0")
- if err != nil {
- return fmt.Errorf("https listen failed: %v", err)
- }
- defer hsl.Close()
-
- p.ul, err = DefaultSocket.Listen()
- if err != nil {
- c, derr := DefaultSocket.Dial()
- if derr == nil {
- c.Close()
- fmt.Println("OK\nA proxy is already running... exiting")
- return nil
- } else if e, ok := derr.(*net.OpError); ok && e.Err == syscall.ECONNREFUSED {
- // Nothing is listening on the socket, unlink it and try again.
- syscall.Unlink(DefaultSocket.Path())
- p.ul, err = DefaultSocket.Listen()
- }
- if err != nil {
- return fmt.Errorf("unix listen failed on %v: %v", DefaultSocket.Path(), err)
- }
- }
- defer p.ul.Close()
- go p.closeOnSignal()
- go p.closeOnUpdate()
-
- p.httpAddr = hl.Addr().String()
- p.httpsAddr = hsl.Addr().String()
- fmt.Printf("OK\nListening on unix socket=%v http=%v https=%v\n",
- p.ul.Addr(), p.httpAddr, p.httpsAddr)
-
- result := make(chan error, 2)
- go p.serveUnix(result)
- go func() {
- result <- http.Serve(hl, &httputil.ReverseProxy{
- FlushInterval: 500 * time.Millisecond,
- Director: func(r *http.Request) {},
- })
- }()
- go func() {
- result <- http.Serve(hsl, &httputil.ReverseProxy{
- FlushInterval: 500 * time.Millisecond,
- Director: func(r *http.Request) {
- r.URL.Scheme = "https"
- },
- })
- }()
- return <-result
-}
-
-type socketContext struct {
- sync.WaitGroup
- mutex sync.Mutex
- last time.Time
-}
-
-func (sc *socketContext) Done() {
- sc.mutex.Lock()
- defer sc.mutex.Unlock()
- sc.last = time.Now()
- sc.WaitGroup.Done()
-}
-
-func (p *Proxy) serveUnix(result chan<- error) {
- sockCtx := &socketContext{}
- go p.closeOnIdle(sockCtx)
-
- var err error
- for {
- var uconn net.Conn
- uconn, err = p.ul.Accept()
- if err != nil {
- err = fmt.Errorf("accept failed: %v", err)
- break
- }
- sockCtx.Add(1)
- go p.handleUnixConn(sockCtx, uconn)
- }
- sockCtx.Wait()
- result <- err
-}
-
-func (p *Proxy) handleUnixConn(sockCtx *socketContext, uconn net.Conn) {
- defer sockCtx.Done()
- defer uconn.Close()
- data := []byte(fmt.Sprintf("%v\n%v", p.httpsAddr, p.httpAddr))
- uconn.SetDeadline(time.Now().Add(5 * time.Second))
- for i := 0; i < 2; i++ {
- if n, err := uconn.Write(data); err != nil {
- log.Printf("error sending http addresses: %+v\n", err)
- return
- } else if n != len(data) {
- log.Printf("sent %d data bytes, wanted %d\n", n, len(data))
- return
- }
- if _, err := uconn.Read([]byte{0, 0, 0, 0}); err != nil {
- log.Printf("error waiting for Ack: %+v\n", err)
- return
- }
- }
- // Wait without a deadline for the client to finish via EOF
- uconn.SetDeadline(time.Time{})
- uconn.Read([]byte{0, 0, 0, 0})
-}
-
-func (p *Proxy) closeOnIdle(sockCtx *socketContext) {
- for d := p.MaxIdleDuration; d > 0; {
- time.Sleep(d)
- sockCtx.Wait()
- sockCtx.mutex.Lock()
- if d = sockCtx.last.Add(p.MaxIdleDuration).Sub(time.Now()); d <= 0 {
- log.Println("graceful shutdown from idle timeout")
- p.ul.Close()
- }
- sockCtx.mutex.Unlock()
- }
-}
-
-func (p *Proxy) closeOnUpdate() {
- for {
- time.Sleep(p.PollUpdateInterval)
- if out, err := exec.Command(os.Args[0], "--print_label").Output(); err != nil {
- log.Printf("error polling for updated binary: %v\n", err)
- } else if s := string(out[:len(out)-1]); p.BuildLabel != s {
- log.Printf("graceful shutdown from updated binary: %q --> %q\n", p.BuildLabel, s)
- p.ul.Close()
- break
- }
- }
-}
-
-func (p *Proxy) closeOnSignal() {
- ch := make(chan os.Signal, 10)
- signal.Notify(ch, os.Interrupt, os.Kill, os.Signal(syscall.SIGTERM), os.Signal(syscall.SIGHUP))
- sig := <-ch
- p.ul.Close()
- switch sig {
- case os.Signal(syscall.SIGHUP):
- log.Printf("graceful shutdown from signal: %v\n", sig)
- default:
- log.Fatalf("exiting from signal: %v\n", sig)
- }
-}
diff --git a/contrib/persistent-https/socket.go b/contrib/persistent-https/socket.go
deleted file mode 100644
index 193b911dd1..0000000000
--- a/contrib/persistent-https/socket.go
+++ /dev/null
@@ -1,97 +0,0 @@
-// Copyright 2012 Google Inc. All Rights Reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-import (
- "fmt"
- "log"
- "net"
- "os"
- "path/filepath"
- "syscall"
-)
-
-// A Socket is a wrapper around a Unix socket that verifies directory
-// permissions.
-type Socket struct {
- Dir string
-}
-
-func defaultDir() string {
- sockPath := ".git-credential-cache"
- if home := os.Getenv("HOME"); home != "" {
- return filepath.Join(home, sockPath)
- }
- log.Printf("socket: cannot find HOME path. using relative directory %q for socket", sockPath)
- return sockPath
-}
-
-// DefaultSocket is a Socket in the $HOME/.git-credential-cache directory.
-var DefaultSocket = Socket{Dir: defaultDir()}
-
-// Listen announces the local network address of the unix socket. The
-// permissions on the socket directory are verified before attempting
-// the actual listen.
-func (s Socket) Listen() (net.Listener, error) {
- network, addr := "unix", s.Path()
- if err := s.mkdir(); err != nil {
- return nil, &net.OpError{Op: "listen", Net: network, Addr: &net.UnixAddr{Name: addr, Net: network}, Err: err}
- }
- return net.Listen(network, addr)
-}
-
-// Dial connects to the unix socket. The permissions on the socket directory
-// are verified before attempting the actual dial.
-func (s Socket) Dial() (net.Conn, error) {
- network, addr := "unix", s.Path()
- if err := s.checkPermissions(); err != nil {
- return nil, &net.OpError{Op: "dial", Net: network, Addr: &net.UnixAddr{Name: addr, Net: network}, Err: err}
- }
- return net.Dial(network, addr)
-}
-
-// Path returns the fully specified file name of the unix socket.
-func (s Socket) Path() string {
- return filepath.Join(s.Dir, "persistent-https-proxy-socket")
-}
-
-func (s Socket) mkdir() error {
- if err := s.checkPermissions(); err == nil {
- return nil
- } else if !os.IsNotExist(err) {
- return err
- }
- if err := os.MkdirAll(s.Dir, 0700); err != nil {
- return err
- }
- return s.checkPermissions()
-}
-
-func (s Socket) checkPermissions() error {
- fi, err := os.Stat(s.Dir)
- if err != nil {
- return err
- }
- if !fi.IsDir() {
- return fmt.Errorf("socket: got file, want directory for %q", s.Dir)
- }
- if fi.Mode().Perm() != 0700 {
- return fmt.Errorf("socket: got perm %o, want 700 for %q", fi.Mode().Perm(), s.Dir)
- }
- if st := fi.Sys().(*syscall.Stat_t); int(st.Uid) != os.Getuid() {
- return fmt.Errorf("socket: got uid %d, want %d for %q", st.Uid, os.Getuid(), s.Dir)
- }
- return nil
-}
diff --git a/contrib/remote-helpers/README b/contrib/remote-helpers/README
deleted file mode 100644
index ac72332517..0000000000
--- a/contrib/remote-helpers/README
+++ /dev/null
@@ -1,15 +0,0 @@
-The remote-helper bridges to access data stored in Mercurial and
-Bazaar are maintained outside the git.git tree in the repositories
-of their primary author:
-
- https://github.com/felipec/git-remote-hg (for Mercurial)
- https://github.com/felipec/git-remote-bzr (for Bazaar)
-
-You can pick a directory on your $PATH and download them from these
-repositories, e.g.:
-
- $ wget -O $HOME/bin/git-remote-hg \
- https://raw.github.com/felipec/git-remote-hg/master/git-remote-hg
- $ wget -O $HOME/bin/git-remote-bzr \
- https://raw.github.com/felipec/git-remote-bzr/master/git-remote-bzr
- $ chmod +x $HOME/bin/git-remote-hg $HOME/bin/git-remote-bzr
diff --git a/contrib/remote-helpers/git-remote-bzr b/contrib/remote-helpers/git-remote-bzr
deleted file mode 100755
index 1c3d87f861..0000000000
--- a/contrib/remote-helpers/git-remote-bzr
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-
-cat >&2 <<'EOT'
-WARNING: git-remote-bzr is now maintained independently.
-WARNING: For more information visit https://github.com/felipec/git-remote-bzr
-WARNING:
-WARNING: You can pick a directory on your $PATH and download it, e.g.:
-WARNING: $ wget -O $HOME/bin/git-remote-bzr \
-WARNING: https://raw.github.com/felipec/git-remote-bzr/master/git-remote-bzr
-WARNING: $ chmod +x $HOME/bin/git-remote-bzr
-EOT
diff --git a/contrib/remote-helpers/git-remote-hg b/contrib/remote-helpers/git-remote-hg
deleted file mode 100755
index 8e9188364c..0000000000
--- a/contrib/remote-helpers/git-remote-hg
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-
-cat >&2 <<'EOT'
-WARNING: git-remote-hg is now maintained independently.
-WARNING: For more information visit https://github.com/felipec/git-remote-hg
-WARNING:
-WARNING: You can pick a directory on your $PATH and download it, e.g.:
-WARNING: $ wget -O $HOME/bin/git-remote-hg \
-WARNING: https://raw.github.com/felipec/git-remote-hg/master/git-remote-hg
-WARNING: $ chmod +x $HOME/bin/git-remote-hg
-EOT
diff --git a/contrib/remotes2config.sh b/contrib/remotes2config.sh
deleted file mode 100755
index 1cda19f66a..0000000000
--- a/contrib/remotes2config.sh
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/sh
-
-# Use this tool to rewrite your .git/remotes/ files into the config.
-
-. git-sh-setup
-
-if [ -d "$GIT_DIR"/remotes ]; then
- echo "Rewriting $GIT_DIR/remotes" >&2
- error=0
- # rewrite into config
- {
- cd "$GIT_DIR"/remotes
- ls | while read f; do
- name=$(printf "$f" | tr -c "A-Za-z0-9-" ".")
- sed -n \
- -e "s/^URL:[ ]*\(.*\)$/remote.$name.url \1 ./p" \
- -e "s/^Pull:[ ]*\(.*\)$/remote.$name.fetch \1 ^$ /p" \
- -e "s/^Push:[ ]*\(.*\)$/remote.$name.push \1 ^$ /p" \
- < "$f"
- done
- echo done
- } | while read key value regex; do
- case $key in
- done)
- if [ $error = 0 ]; then
- mv "$GIT_DIR"/remotes "$GIT_DIR"/remotes.old
- fi ;;
- *)
- echo "git config $key "$value" $regex"
- git config $key "$value" $regex || error=1 ;;
- esac
- done
-fi
diff --git a/contrib/stats/git-common-hash b/contrib/stats/git-common-hash
deleted file mode 100755
index e27fd088be..0000000000
--- a/contrib/stats/git-common-hash
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/sh
-
-# This script displays the distribution of longest common hash prefixes.
-# This can be used to determine the minimum prefix length to use
-# for object names to be unique.
-
-git rev-list --objects --all | sort | perl -lne '
- substr($_, 40) = "";
- # uncomment next line for a distribution of bits instead of hex chars
- # $_ = unpack("B*",pack("H*",$_));
- if (defined $p) {
- ($p ^ $_) =~ /^(\0*)/;
- $common = length $1;
- if (defined $pcommon) {
- $count[$pcommon > $common ? $pcommon : $common]++;
- } else {
- $count[$common]++; # first item
- }
- }
- $p = $_;
- $pcommon = $common;
- END {
- $count[$common]++; # last item
- print "$_: $count[$_]" for 0..$#count;
- }
-'
diff --git a/contrib/stats/mailmap.pl b/contrib/stats/mailmap.pl
deleted file mode 100755
index 9513f5e35b..0000000000
--- a/contrib/stats/mailmap.pl
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/perl
-
-use warnings 'all';
-use strict;
-use Getopt::Long;
-
-my $match_emails;
-my $match_names;
-my $order_by = 'count';
-Getopt::Long::Configure(qw(bundling));
-GetOptions(
- 'emails|e!' => \$match_emails,
- 'names|n!' => \$match_names,
- 'count|c' => sub { $order_by = 'count' },
- 'time|t' => sub { $order_by = 'stamp' },
-) or exit 1;
-$match_emails = 1 unless $match_names;
-
-my $email = {};
-my $name = {};
-
-open(my $fh, '-|', "git log --format='%at <%aE> %aN'");
-while(<$fh>) {
- my ($t, $e, $n) = /(\S+) <(\S+)> (.*)/;
- mark($email, $e, $n, $t);
- mark($name, $n, $e, $t);
-}
-close($fh);
-
-if ($match_emails) {
- foreach my $e (dups($email)) {
- foreach my $n (vals($email->{$e})) {
- show($n, $e, $email->{$e}->{$n});
- }
- print "\n";
- }
-}
-if ($match_names) {
- foreach my $n (dups($name)) {
- foreach my $e (vals($name->{$n})) {
- show($n, $e, $name->{$n}->{$e});
- }
- print "\n";
- }
-}
-exit 0;
-
-sub mark {
- my ($h, $k, $v, $t) = @_;
- my $e = $h->{$k}->{$v} ||= { count => 0, stamp => 0 };
- $e->{count}++;
- $e->{stamp} = $t unless $t < $e->{stamp};
-}
-
-sub dups {
- my $h = shift;
- return grep { keys($h->{$_}) > 1 } keys($h);
-}
-
-sub vals {
- my $h = shift;
- return sort {
- $h->{$b}->{$order_by} <=> $h->{$a}->{$order_by}
- } keys($h);
-}
-
-sub show {
- my ($n, $e, $h) = @_;
- print "$n <$e> ($h->{$order_by})\n";
-}
diff --git a/contrib/subtree/README b/contrib/subtree/README
index c686b4a69b..65d167b678 100644
--- a/contrib/subtree/README
+++ b/contrib/subtree/README
@@ -1,5 +1,5 @@
-Please read git-subtree.txt for documentation.
+Please read git-subtree.adoc for documentation.
Please don't contact me using github mail; it's slow, ugly, and worst of
all, redundant. Email me instead at apenwarr@gmail.com and I'll be happy to
diff --git a/contrib/subtree/git-subtree.adoc b/contrib/subtree/git-subtree.adoc
index 004abf415b..b2bcbcad0d 100644
--- a/contrib/subtree/git-subtree.adoc
+++ b/contrib/subtree/git-subtree.adoc
@@ -9,14 +9,14 @@ git-subtree - Merge subtrees together and split repository into subtrees
SYNOPSIS
--------
[verse]
-'git subtree' [<options>] -P <prefix> add <local-commit>
-'git subtree' [<options>] -P <prefix> add <repository> <remote-ref>
-'git subtree' [<options>] -P <prefix> merge <local-commit> [<repository>]
-'git subtree' [<options>] -P <prefix> split [<local-commit>]
+'git subtree' [<options>] -P <prefix> [-S[<keyid>]] add <local-commit>
+'git subtree' [<options>] -P <prefix> [-S[<keyid>]] add <repository> <remote-ref>
+'git subtree' [<options>] -P <prefix> [-S[<keyid>]] merge <local-commit> [<repository>]
+'git subtree' [<options>] -P <prefix> [-S[<keyid>]] split [<local-commit>]
[verse]
-'git subtree' [<options>] -P <prefix> pull <repository> <remote-ref>
-'git subtree' [<options>] -P <prefix> push <repository> <refspec>
+'git subtree' [<options>] -P <prefix> [-S[<keyid>]] pull <repository> <remote-ref>
+'git subtree' [<options>] -P <prefix> [-S[<keyid>]] push <repository> <refspec>
DESCRIPTION
-----------
@@ -149,6 +149,13 @@ OPTIONS FOR ALL COMMANDS
want to manipulate. This option is mandatory
for all commands.
+-S[<keyid>]::
+--gpg-sign[=<keyid>]::
+--no-gpg-sign::
+ GPG-sign commits. The `keyid` argument is optional and
+ defaults to the committer identity; `--no-gpg-sign` is useful to
+ countermand a `--gpg-sign` option given earlier on the command line.
+
OPTIONS FOR 'add' AND 'merge' (ALSO: 'pull', 'split --rejoin', AND 'push --rejoin')
-----------------------------------------------------------------------------------
These options for 'add' and 'merge' may also be given to 'pull' (which
diff --git a/contrib/subtree/git-subtree.sh b/contrib/subtree/git-subtree.sh
index 15ae86db1b..3fddba797c 100755
--- a/contrib/subtree/git-subtree.sh
+++ b/contrib/subtree/git-subtree.sh
@@ -26,12 +26,12 @@ then
fi
OPTS_SPEC="\
-git subtree add --prefix=<prefix> <commit>
-git subtree add --prefix=<prefix> <repository> <ref>
-git subtree merge --prefix=<prefix> <commit>
-git subtree split --prefix=<prefix> [<commit>]
-git subtree pull --prefix=<prefix> <repository> <ref>
-git subtree push --prefix=<prefix> <repository> <refspec>
+git subtree add --prefix=<prefix> [-S[=<key-id>]] <commit>
+git subtree add --prefix=<prefix> [-S[=<key-id>]] <repository> <ref>
+git subtree merge --prefix=<prefix> [-S[=<key-id>]] <commit>
+git subtree split --prefix=<prefix> [-S[=<key-id>]] [<commit>]
+git subtree pull --prefix=<prefix> [-S[=<key-id>]] <repository> <ref>
+git subtree push --prefix=<prefix> [-S[=<key-id>]] <repository> <refspec>
--
h,help! show the help
q,quiet! quiet
@@ -46,6 +46,7 @@ rejoin merge the new branch back into HEAD
options for 'add' and 'merge' (also: 'pull', 'split --rejoin', and 'push --rejoin')
squash merge subtree changes as a single commit
m,message!= use the given message as the commit message for the merge commit
+S,gpg-sign?key-id GPG-sign commits. The keyid argument is optional and defaults to the committer identity
"
indent=0
@@ -115,7 +116,7 @@ main () {
then
set -- -h
fi
- set_args="$(echo "$OPTS_SPEC" | git rev-parse --parseopt -- "$@" || echo exit $?)"
+ set_args="$(echo "$OPTS_SPEC" | git rev-parse --parseopt --stuck-long -- "$@" || echo exit $?)"
eval "$set_args"
. git-sh-setup
require_work_tree
@@ -131,9 +132,6 @@ main () {
opt="$1"
shift
case "$opt" in
- --annotate|-b|-P|-m|--onto)
- shift
- ;;
--rejoin)
arg_split_rejoin=1
;;
@@ -171,48 +169,44 @@ main () {
arg_split_annotate=
arg_addmerge_squash=
arg_addmerge_message=
+ arg_gpg_sign=
while test $# -gt 0
do
opt="$1"
shift
case "$opt" in
- -q)
+ --quiet)
arg_quiet=1
;;
- -d)
+ --debug)
arg_debug=1
;;
- --annotate)
+ --annotate=*)
test -n "$allow_split" || die_incompatible_opt "$opt" "$arg_command"
- arg_split_annotate="$1"
- shift
+ arg_split_annotate="${opt#*=}"
;;
--no-annotate)
test -n "$allow_split" || die_incompatible_opt "$opt" "$arg_command"
arg_split_annotate=
;;
- -b)
+ --branch=*)
test -n "$allow_split" || die_incompatible_opt "$opt" "$arg_command"
- arg_split_branch="$1"
- shift
+ arg_split_branch="${opt#*=}"
;;
- -P)
- arg_prefix="${1%/}"
- shift
+ --prefix=*)
+ arg_prefix="${opt#*=}"
;;
- -m)
+ --message=*)
test -n "$allow_addmerge" || die_incompatible_opt "$opt" "$arg_command"
- arg_addmerge_message="$1"
- shift
+ arg_addmerge_message="${opt#*=}"
;;
--no-prefix)
arg_prefix=
;;
- --onto)
+ --onto=*)
test -n "$allow_split" || die_incompatible_opt "$opt" "$arg_command"
- arg_split_onto="$1"
- shift
+ arg_split_onto="${opt#*=}"
;;
--no-onto)
test -n "$allow_split" || die_incompatible_opt "$opt" "$arg_command"
@@ -240,6 +234,9 @@ main () {
test -n "$allow_addmerge" || die_incompatible_opt "$opt" "$arg_command"
arg_addmerge_squash=
;;
+ --gpg-sign=* | --gpg-sign | --no-gpg-sign)
+ arg_gpg_sign="$opt"
+ ;;
--)
break
;;
@@ -272,6 +269,7 @@ main () {
debug "quiet: {$arg_quiet}"
debug "dir: {$dir}"
debug "opts: {$*}"
+ debug "gpg-sign: {$arg_gpg_sign}"
debug
"cmd_$arg_command" "$@"
@@ -537,7 +535,7 @@ copy_commit () {
printf "%s" "$arg_split_annotate"
cat
) |
- git commit-tree "$2" $3 # reads the rest of stdin
+ git commit-tree $arg_gpg_sign "$2" $3 # reads the rest of stdin
) || die "fatal: can't copy commit $1"
}
@@ -683,10 +681,10 @@ new_squash_commit () {
if test -n "$old"
then
squash_msg "$dir" "$oldsub" "$newsub" |
- git commit-tree "$tree" -p "$old" || exit $?
+ git commit-tree $arg_gpg_sign "$tree" -p "$old" || exit $?
else
squash_msg "$dir" "" "$newsub" |
- git commit-tree "$tree" || exit $?
+ git commit-tree $arg_gpg_sign "$tree" || exit $?
fi
}
@@ -925,11 +923,11 @@ cmd_add_commit () {
then
rev=$(new_squash_commit "" "" "$rev") || exit $?
commit=$(add_squashed_msg "$rev" "$dir" |
- git commit-tree "$tree" $headp -p "$rev") || exit $?
+ git commit-tree $arg_gpg_sign "$tree" $headp -p "$rev") || exit $?
else
revp=$(peel_committish "$rev") || exit $?
commit=$(add_msg "$dir" $headrev "$rev" |
- git commit-tree "$tree" $headp -p "$revp") || exit $?
+ git commit-tree $arg_gpg_sign "$tree" $headp -p "$revp") || exit $?
fi
git reset "$commit" || exit $?
@@ -1080,9 +1078,9 @@ cmd_merge () {
if test -n "$arg_addmerge_message"
then
git merge --no-ff -Xsubtree="$arg_prefix" \
- --message="$arg_addmerge_message" "$rev"
+ --message="$arg_addmerge_message" $arg_gpg_sign "$rev"
else
- git merge --no-ff -Xsubtree="$arg_prefix" $rev
+ git merge --no-ff -Xsubtree="$arg_prefix" $arg_gpg_sign $rev
fi
}
diff --git a/contrib/subtree/meson.build b/contrib/subtree/meson.build
index 63714166a6..98dd8e0c8e 100644
--- a/contrib/subtree/meson.build
+++ b/contrib/subtree/meson.build
@@ -21,7 +21,7 @@ if get_option('tests')
env: subtree_test_environment,
workdir: meson.current_source_dir() / 't',
depends: test_dependencies + bin_wrappers + [ git_subtree ],
- timeout: 0,
+ kwargs: test_kwargs,
)
endif
diff --git a/contrib/subtree/t/t7900-subtree.sh b/contrib/subtree/t/t7900-subtree.sh
index 3c6103f6d2..3edbb33af4 100755
--- a/contrib/subtree/t/t7900-subtree.sh
+++ b/contrib/subtree/t/t7900-subtree.sh
@@ -11,6 +11,7 @@ and push subcommands of git subtree.
TEST_DIRECTORY=$(pwd)/../../../t
. "$TEST_DIRECTORY"/test-lib.sh
+. "$TEST_DIRECTORY"/lib-gpg.sh
# Use our own wrapper around test-lib.sh's test_create_repo, in order
# to set log.date=relative. `git subtree` parses the output of `git
@@ -1563,4 +1564,116 @@ test_expect_success 'subtree descendant check' '
)
'
+test_expect_success GPG 'add subproj with GPG signing using -S flag' '
+ subtree_test_create_repo "$test_count" &&
+ subtree_test_create_repo "$test_count/sub proj" &&
+ test_create_commit "$test_count" main1 &&
+ test_create_commit "$test_count/sub proj" sub1 &&
+ (
+ cd "$test_count" &&
+ git fetch ./"sub proj" HEAD &&
+ git subtree add --prefix="sub dir" -S FETCH_HEAD &&
+ git verify-commit HEAD &&
+ test "$(last_commit_subject)" = "Add '\''sub dir/'\'' from commit '\''$(git rev-parse FETCH_HEAD)'\''"
+ )
+'
+
+test_expect_success GPG 'add subproj with GPG signing using --gpg-sign flag' '
+ subtree_test_create_repo "$test_count" &&
+ subtree_test_create_repo "$test_count/sub proj" &&
+ test_create_commit "$test_count" main1 &&
+ test_create_commit "$test_count/sub proj" sub1 &&
+ (
+ cd "$test_count" &&
+ git fetch ./"sub proj" HEAD &&
+ git subtree add --prefix="sub dir" --gpg-sign FETCH_HEAD &&
+ git verify-commit HEAD &&
+ test "$(last_commit_subject)" = "Add '\''sub dir/'\'' from commit '\''$(git rev-parse FETCH_HEAD)'\''"
+ )
+'
+
+test_expect_success GPG 'add subproj with GPG signing using specific key ID' '
+ subtree_test_create_repo "$test_count" &&
+ subtree_test_create_repo "$test_count/sub proj" &&
+ test_create_commit "$test_count" main1 &&
+ test_create_commit "$test_count/sub proj" sub1 &&
+ (
+ cd "$test_count" &&
+ git fetch ./"sub proj" HEAD &&
+ git subtree add --prefix="sub dir" -S"$GIT_COMMITTER_EMAIL" FETCH_HEAD &&
+ git verify-commit HEAD &&
+ test "$(last_commit_subject)" = "Add '\''sub dir/'\'' from commit '\''$(git rev-parse FETCH_HEAD)'\''"
+ )
+'
+
+test_expect_success GPG 'merge with GPG signing' '
+ subtree_test_create_repo "$test_count" &&
+ subtree_test_create_repo "$test_count/sub proj" &&
+ test_create_commit "$test_count" main1 &&
+ test_create_commit "$test_count/sub proj" sub1 &&
+ (
+ cd "$test_count" &&
+ git fetch ./"sub proj" HEAD &&
+ git subtree add --prefix="sub dir" FETCH_HEAD
+ ) &&
+ test_create_commit "$test_count/sub proj" sub2 &&
+ (
+ cd "$test_count" &&
+ git fetch ./"sub proj" HEAD &&
+ git subtree merge --prefix="sub dir" -S FETCH_HEAD &&
+ git verify-commit HEAD
+ )
+'
+
+test_expect_success GPG 'split with GPG signing and --rejoin' '
+ subtree_test_create_repo "$test_count" &&
+ subtree_test_create_repo "$test_count/sub proj" &&
+ test_create_commit "$test_count" main1 &&
+ test_create_commit "$test_count/sub proj" sub1 &&
+ (
+ cd "$test_count" &&
+ git fetch ./"sub proj" HEAD &&
+ git subtree add --prefix="sub dir" FETCH_HEAD
+ ) &&
+ test_create_commit "$test_count" "sub dir/main-sub1" &&
+ (
+ cd "$test_count" &&
+ git subtree split --prefix="sub dir" --rejoin -S &&
+ git verify-commit HEAD
+ )
+'
+
+test_expect_success GPG 'add with --squash and GPG signing' '
+ subtree_test_create_repo "$test_count" &&
+ subtree_test_create_repo "$test_count/sub proj" &&
+ test_create_commit "$test_count" main1 &&
+ test_create_commit "$test_count/sub proj" sub1 &&
+ (
+ cd "$test_count" &&
+ git fetch ./"sub proj" HEAD &&
+ git subtree add --prefix="sub dir" --squash -S FETCH_HEAD &&
+ git verify-commit HEAD &&
+ # With --squash, the commit subject should reference the squash commit (first parent of merge)
+ squash_commit=$(git rev-parse HEAD^2) &&
+ test "$(last_commit_subject)" = "Merge commit '\''$squash_commit'\'' as '\''sub dir'\''"
+ )
+'
+
+test_expect_success GPG 'pull with GPG signing' '
+ subtree_test_create_repo "$test_count" &&
+ subtree_test_create_repo "$test_count/sub proj" &&
+ test_create_commit "$test_count" main1 &&
+ test_create_commit "$test_count/sub proj" sub1 &&
+ (
+ cd "$test_count" &&
+ git subtree add --prefix="sub dir" ./"sub proj" HEAD
+ ) &&
+ test_create_commit "$test_count/sub proj" sub2 &&
+ (
+ cd "$test_count" &&
+ git subtree pull --prefix="sub dir" -S ./"sub proj" HEAD &&
+ git verify-commit HEAD
+ )
+'
+
test_done
diff --git a/contrib/thunderbird-patch-inline/README b/contrib/thunderbird-patch-inline/README
deleted file mode 100644
index 000147bbe4..0000000000
--- a/contrib/thunderbird-patch-inline/README
+++ /dev/null
@@ -1,20 +0,0 @@
-appp.sh is a script that is supposed to be used together with ExternalEditor
-for Mozilla Thunderbird. It will let you include patches inline in e-mails
-in an easy way.
-
-Usage:
-- Generate the patch with git format-patch.
-- Start writing a new e-mail in Thunderbird.
-- Press the external editor button (or Ctrl-E) to run appp.sh
-- Select the previously generated patch file.
-- Finish editing the e-mail.
-
-Any text that is entered into the message editor before appp.sh is called
-will be moved to the section between the --- and the diffstat.
-
-All S-O-B:s and Cc:s in the patch will be added to the CC list.
-
-To set it up, just install External Editor and tell it to use appp.sh as the
-editor.
-
-Zenity is a required dependency.
diff --git a/contrib/thunderbird-patch-inline/appp.sh b/contrib/thunderbird-patch-inline/appp.sh
deleted file mode 100755
index fdcc948352..0000000000
--- a/contrib/thunderbird-patch-inline/appp.sh
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/bin/sh
-# Copyright 2008 Lukas Sandström <luksan@gmail.com>
-#
-# AppendPatch - A script to be used together with ExternalEditor
-# for Mozilla Thunderbird to properly include patches inline in e-mails.
-
-# ExternalEditor can be downloaded at http://globs.org/articles.php?lng=en&pg=2
-
-CONFFILE=~/.appprc
-
-SEP="-=-=-=-=-=-=-=-=-=# Don't remove this line #=-=-=-=-=-=-=-=-=-"
-if [ -e "$CONFFILE" ] ; then
- LAST_DIR=$(grep -m 1 "^LAST_DIR=" "${CONFFILE}"|sed -e 's/^LAST_DIR=//')
- cd "${LAST_DIR}"
-else
- cd > /dev/null
-fi
-
-PATCH=$(zenity --file-selection)
-
-if [ "$?" != "0" ] ; then
- #zenity --error --text "No patchfile given."
- exit 1
-fi
-
-cd - > /dev/null
-
-SUBJECT=$(sed -n -e '/^Subject: /p' "${PATCH}")
-HEADERS=$(sed -e '/^'"${SEP}"'$/,$d' $1)
-BODY=$(sed -e "1,/${SEP}/d" $1)
-CMT_MSG=$(sed -e '1,/^$/d' -e '/^---$/,$d' "${PATCH}")
-DIFF=$(sed -e '1,/^---$/d' "${PATCH}")
-
-CCS=$(printf '%s\n%s\n' "$CMT_MSG" "$HEADERS" | sed -n -e 's/^Cc: \(.*\)$/\1,/gp' \
- -e 's/^Signed-off-by: \(.*\)/\1,/gp')
-
-echo "$SUBJECT" > $1
-echo "Cc: $CCS" >> $1
-echo "$HEADERS" | sed -e '/^Subject: /d' -e '/^Cc: /d' >> $1
-echo "$SEP" >> $1
-
-echo "$CMT_MSG" >> $1
-echo "---" >> $1
-if [ "x${BODY}x" != "xx" ] ; then
- echo >> $1
- echo "$BODY" >> $1
- echo >> $1
-fi
-echo "$DIFF" >> $1
-
-LAST_DIR=$(dirname "${PATCH}")
-
-grep -v "^LAST_DIR=" "${CONFFILE}" > "${CONFFILE}_"
-echo "LAST_DIR=${LAST_DIR}" >> "${CONFFILE}_"
-mv "${CONFFILE}_" "${CONFFILE}"
diff --git a/contrib/workdir/.gitattributes b/contrib/workdir/.gitattributes
deleted file mode 100644
index 1f78c5d1bd..0000000000
--- a/contrib/workdir/.gitattributes
+++ /dev/null
@@ -1 +0,0 @@
-/git-new-workdir eol=lf
diff --git a/contrib/workdir/git-new-workdir b/contrib/workdir/git-new-workdir
deleted file mode 100755
index 989197aace..0000000000
--- a/contrib/workdir/git-new-workdir
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/bin/sh
-
-usage () {
- echo "usage:" $@
- exit 127
-}
-
-die () {
- echo $@
- exit 128
-}
-
-failed () {
- die "unable to create new workdir '$new_workdir'!"
-}
-
-if test $# -lt 2 || test $# -gt 3
-then
- usage "$0 <repository> <new_workdir> [<branch>]"
-fi
-
-orig_git=$1
-new_workdir=$2
-branch=$3
-
-# want to make sure that what is pointed to has a .git directory ...
-git_dir=$(cd "$orig_git" 2>/dev/null &&
- git rev-parse --git-dir 2>/dev/null) ||
- die "Not a git repository: \"$orig_git\""
-
-case "$git_dir" in
-.git)
- git_dir="$orig_git/.git"
- ;;
-.)
- git_dir=$orig_git
- ;;
-esac
-
-# don't link to a configured bare repository
-isbare=$(git --git-dir="$git_dir" config --bool --get core.bare)
-if test ztrue = "z$isbare"
-then
- die "\"$git_dir\" has core.bare set to true," \
- " remove from \"$git_dir/config\" to use $0"
-fi
-
-# don't link to a workdir
-if test -h "$git_dir/config"
-then
- die "\"$orig_git\" is a working directory only, please specify" \
- "a complete repository."
-fi
-
-# make sure the links in the workdir have full paths to the original repo
-git_dir=$(cd "$git_dir" && pwd) || exit 1
-
-# don't recreate a workdir over an existing directory, unless it's empty
-if test -d "$new_workdir"
-then
- if test $(ls -a1 "$new_workdir/." | wc -l) -ne 2
- then
- die "destination directory '$new_workdir' is not empty."
- fi
- cleandir="$new_workdir/.git"
-else
- cleandir="$new_workdir"
-fi
-
-mkdir -p "$new_workdir/.git" || failed
-cleandir=$(cd "$cleandir" && pwd) || failed
-
-cleanup () {
- rm -rf "$cleandir"
-}
-siglist="0 1 2 15"
-trap cleanup $siglist
-
-# create the links to the original repo. explicitly exclude index, HEAD and
-# logs/HEAD from the list since they are purely related to the current working
-# directory, and should not be shared.
-for x in config refs logs/refs objects info hooks packed-refs remotes rr-cache svn reftable
-do
- # create a containing directory if needed
- case $x in
- */*)
- mkdir -p "$new_workdir/.git/${x%/*}"
- ;;
- esac
-
- ln -s "$git_dir/$x" "$new_workdir/.git/$x" || failed
-done
-
-# commands below this are run in the context of the new workdir
-cd "$new_workdir" || failed
-
-# copy the HEAD from the original repository as a default branch
-cp "$git_dir/HEAD" .git/HEAD || failed
-
-# the workdir is set up. if the checkout fails, the user can fix it.
-trap - $siglist
-
-# checkout the branch (either the same as HEAD from the original repository,
-# or the one that was asked for)
-git checkout -f $branch
diff --git a/daemon.c b/daemon.c
index d1be61fd57..1f3e23b6a8 100644
--- a/daemon.c
+++ b/daemon.c
@@ -915,11 +915,9 @@ static void handle(int incoming, struct sockaddr *addr, socklen_t addrlen)
static void child_handler(int signo UNUSED)
{
/*
- * Otherwise empty handler because systemcalls will get interrupted
- * upon signal receipt
- * SysV needs the handler to be rearmed
+ * Otherwise empty handler because systemcalls should get interrupted
+ * upon signal receipt.
*/
- signal(SIGCHLD, child_handler);
}
static int set_reuse_addr(int sockfd)
@@ -990,11 +988,6 @@ static int setup_named_sock(char *listen_addr, int listen_port, struct socketlis
sockfd = socket(ai->ai_family, ai->ai_socktype, ai->ai_protocol);
if (sockfd < 0)
continue;
- if (sockfd >= FD_SETSIZE) {
- logerror("Socket descriptor too large");
- close(sockfd);
- continue;
- }
#ifdef IPV6_V6ONLY
if (ai->ai_family == AF_INET6) {
@@ -1120,6 +1113,7 @@ static void socksetup(struct string_list *listen_addr, int listen_port, struct s
static int service_loop(struct socketlist *socklist)
{
+ struct sigaction sa;
struct pollfd *pfd;
CALLOC_ARRAY(pfd, socklist->nr);
@@ -1129,7 +1123,10 @@ static int service_loop(struct socketlist *socklist)
pfd[i].events = POLLIN;
}
- signal(SIGCHLD, child_handler);
+ sigemptyset(&sa.sa_mask);
+ sa.sa_flags = SA_NOCLDSTOP;
+ sa.sa_handler = child_handler;
+ sigaction(SIGCHLD, &sa, NULL);
for (;;) {
check_dead_children();
@@ -1153,11 +1150,19 @@ static int service_loop(struct socketlist *socklist)
#endif
} ss;
socklen_t sslen = sizeof(ss);
- int incoming = accept(pfd[i].fd, &ss.sa, &sslen);
+ int incoming;
+ int retry = 3;
+
+ redo:
+ incoming = accept(pfd[i].fd, &ss.sa, &sslen);
if (incoming < 0) {
switch (errno) {
- case EAGAIN:
case EINTR:
+ if (--retry)
+ goto redo;
+
+ /* fallthrough */
+ case EAGAIN:
case ECONNABORTED:
continue;
default:
diff --git a/diagnose.c b/diagnose.c
index b1be74be98..5092bf80d3 100644
--- a/diagnose.c
+++ b/diagnose.c
@@ -7,7 +7,7 @@
#include "gettext.h"
#include "hex.h"
#include "strvec.h"
-#include "object-store.h"
+#include "odb.h"
#include "packfile.h"
#include "parse-options.h"
#include "repository.h"
@@ -59,13 +59,13 @@ static void dir_file_stats_objects(const char *full_path,
(uintmax_t)st.st_size);
}
-static int dir_file_stats(struct object_directory *object_dir, void *data)
+static int dir_file_stats(struct odb_source *source, void *data)
{
struct strbuf *buf = data;
- strbuf_addf(buf, "Contents of %s:\n", object_dir->path);
+ strbuf_addf(buf, "Contents of %s:\n", source->path);
- for_each_file_in_pack_dir(object_dir->path, dir_file_stats_objects,
+ for_each_file_in_pack_dir(source->path, dir_file_stats_objects,
data);
return 0;
@@ -228,8 +228,8 @@ int create_diagnostics_archive(struct repository *r,
strbuf_reset(&buf);
strbuf_addstr(&buf, "--add-virtual-file=packs-local.txt:");
- dir_file_stats(r->objects->odb, &buf);
- foreach_alt_odb(dir_file_stats, &buf);
+ dir_file_stats(r->objects->sources, &buf);
+ odb_for_each_alternate(r->objects, dir_file_stats, &buf);
strvec_push(&archiver_args, buf.buf);
strbuf_reset(&buf);
diff --git a/diff-no-index.c b/diff-no-index.c
index 9739b2b268..88ae4cee56 100644
--- a/diff-no-index.c
+++ b/diff-no-index.c
@@ -15,20 +15,57 @@
#include "gettext.h"
#include "revision.h"
#include "parse-options.h"
+#include "pathspec.h"
#include "string-list.h"
#include "dir.h"
-static int read_directory_contents(const char *path, struct string_list *list)
+static int read_directory_contents(const char *path, struct string_list *list,
+ const struct pathspec *pathspec,
+ int skip)
{
+ struct strbuf match = STRBUF_INIT;
+ int len;
DIR *dir;
struct dirent *e;
if (!(dir = opendir(path)))
return error("Could not open directory %s", path);
- while ((e = readdir_skip_dot_and_dotdot(dir)))
+ if (pathspec) {
+ strbuf_addstr(&match, path);
+ strbuf_complete(&match, '/');
+ strbuf_remove(&match, 0, skip);
+
+ len = match.len;
+ }
+
+ while ((e = readdir_skip_dot_and_dotdot(dir))) {
+ if (pathspec) {
+ int is_dir = 0;
+
+ strbuf_setlen(&match, len);
+ strbuf_addstr(&match, e->d_name);
+ if (NOT_CONSTANT(DTYPE(e)) != DT_UNKNOWN) {
+ is_dir = (DTYPE(e) == DT_DIR);
+ } else {
+ struct strbuf pathbuf = STRBUF_INIT;
+
+ strbuf_addstr(&pathbuf, path);
+ strbuf_complete(&pathbuf, '/');
+ is_dir = get_dtype(e, &pathbuf, 0) == DT_DIR;
+ strbuf_release(&pathbuf);
+ }
+
+ if (!match_leading_pathspec(NULL, pathspec,
+ match.buf, match.len,
+ 0, NULL, is_dir))
+ continue;
+ }
+
string_list_insert(list, e->d_name);
+ }
+ strbuf_release(&match);
closedir(dir);
return 0;
}
@@ -131,7 +168,8 @@ static struct diff_filespec *noindex_filespec(const struct git_hash_algo *algop,
}
static int queue_diff(struct diff_options *o, const struct git_hash_algo *algop,
- const char *name1, const char *name2, int recursing)
+ const char *name1, const char *name2, int recursing,
+ const struct pathspec *ps, int skip1, int skip2)
{
int mode1 = 0, mode2 = 0;
enum special special1 = SPECIAL_NONE, special2 = SPECIAL_NONE;
@@ -171,9 +209,9 @@ static int queue_diff(struct diff_options *o, const struct git_hash_algo *algop,
int i1, i2, ret = 0;
size_t len1 = 0, len2 = 0;
- if (name1 && read_directory_contents(name1, &p1))
+ if (name1 && read_directory_contents(name1, &p1, ps, skip1))
return -1;
- if (name2 && read_directory_contents(name2, &p2)) {
+ if (name2 && read_directory_contents(name2, &p2, ps, skip2)) {
string_list_clear(&p1, 0);
return -1;
}
@@ -218,7 +256,7 @@ static int queue_diff(struct diff_options *o, const struct git_hash_algo *algop,
n2 = buffer2.buf;
}
- ret = queue_diff(o, algop, n1, n2, 1);
+ ret = queue_diff(o, algop, n1, n2, 1, ps, skip1, skip2);
}
string_list_clear(&p1, 0);
string_list_clear(&p2, 0);
@@ -258,8 +296,10 @@ static void append_basename(struct strbuf *path, const char *dir, const char *fi
* DWIM "diff D F" into "diff D/F F" and "diff F D" into "diff F D/F"
* Note that we append the basename of F to D/, so "diff a/b/file D"
* becomes "diff a/b/file D/file", not "diff a/b/file D/a/b/file".
+ *
+ * Return 1 if both paths are directories, 0 otherwise.
*/
-static void fixup_paths(const char **path, struct strbuf *replacement)
+static int fixup_paths(const char **path, struct strbuf *replacement)
{
struct stat st;
unsigned int isdir0 = 0, isdir1 = 0;
@@ -282,26 +322,31 @@ static void fixup_paths(const char **path, struct strbuf *replacement)
if ((isdir0 && ispipe1) || (ispipe0 && isdir1))
die(_("cannot compare a named pipe to a directory"));
- if (isdir0 == isdir1)
- return;
+ /* if both paths are directories, we will enable pathspecs */
+ if (isdir0 && isdir1)
+ return 1;
+
if (isdir0) {
append_basename(replacement, path[0], path[1]);
path[0] = replacement->buf;
- } else {
+ } else if (isdir1) {
append_basename(replacement, path[1], path[0]);
path[1] = replacement->buf;
}
+
+ return 0;
}
static const char * const diff_no_index_usage[] = {
- N_("git diff --no-index [<options>] <path> <path>"),
+ N_("git diff --no-index [<options>] <path> <path> [<pathspec>...]"),
NULL
};
int diff_no_index(struct rev_info *revs, const struct git_hash_algo *algop,
int implicit_no_index, int argc, const char **argv)
{
- int i, no_index;
+ struct pathspec pathspec, *ps = NULL;
+ int i, no_index, skip1 = 0, skip2 = 0;
int ret = 1;
const char *paths[2];
char *to_free[ARRAY_SIZE(paths)] = { 0 };
@@ -317,13 +362,12 @@ int diff_no_index(struct rev_info *revs, const struct git_hash_algo *algop,
options = add_diff_options(no_index_options, &revs->diffopt);
argc = parse_options(argc, argv, revs->prefix, options,
diff_no_index_usage, 0);
- if (argc != 2) {
+ if (argc < 2) {
if (implicit_no_index)
warning(_("Not a git repository. Use --no-index to "
"compare two paths outside a working tree"));
usage_with_options(diff_no_index_usage, options);
}
- FREE_AND_NULL(options);
for (i = 0; i < 2; i++) {
const char *p = argv[i];
if (!strcmp(p, "-"))
@@ -337,7 +381,23 @@ int diff_no_index(struct rev_info *revs, const struct git_hash_algo *algop,
paths[i] = p;
}
- fixup_paths(paths, &replacement);
+ if (fixup_paths(paths, &replacement)) {
+ parse_pathspec(&pathspec, PATHSPEC_FROMTOP | PATHSPEC_ATTR,
+ PATHSPEC_PREFER_FULL | PATHSPEC_NO_REPOSITORY,
+ NULL, &argv[2]);
+ if (pathspec.nr)
+ ps = &pathspec;
+
+ skip1 = strlen(paths[0]);
+ skip1 += paths[0][skip1] == '/' ? 0 : 1;
+ skip2 = strlen(paths[1]);
+ skip2 += paths[1][skip2] == '/' ? 0 : 1;
+ } else if (argc > 2) {
+ warning(_("Limiting comparison with pathspecs is only "
+ "supported if both paths are directories."));
+ usage_with_options(diff_no_index_usage, options);
+ }
+ FREE_AND_NULL(options);
revs->diffopt.skip_stat_unmatch = 1;
if (!revs->diffopt.output_format)
@@ -354,7 +414,8 @@ int diff_no_index(struct rev_info *revs, const struct git_hash_algo *algop,
setup_diff_pager(&revs->diffopt);
revs->diffopt.flags.exit_with_status = 1;
- if (queue_diff(&revs->diffopt, algop, paths[0], paths[1], 0))
+ if (queue_diff(&revs->diffopt, algop, paths[0], paths[1], 0, ps,
+ skip1, skip2))
goto out;
diff_set_mnemonic_prefix(&revs->diffopt, "1/", "2/");
diffcore_std(&revs->diffopt);
@@ -370,5 +431,7 @@ out:
for (i = 0; i < ARRAY_SIZE(to_free); i++)
free(to_free[i]);
strbuf_release(&replacement);
+ if (ps)
+ clear_pathspec(ps);
return ret;
}
diff --git a/diff.c b/diff.c
index 90e8003dd1..dca87e164f 100644
--- a/diff.c
+++ b/diff.c
@@ -23,7 +23,7 @@
#include "color.h"
#include "run-command.h"
#include "utf8.h"
-#include "object-store.h"
+#include "odb.h"
#include "userdiff.h"
#include "submodule.h"
#include "hashmap.h"
@@ -4230,14 +4230,14 @@ int diff_populate_filespec(struct repository *r,
info.contentp = &s->data;
if (options && options->missing_object_cb) {
- if (!oid_object_info_extended(r, &s->oid, &info,
- OBJECT_INFO_LOOKUP_REPLACE |
- OBJECT_INFO_SKIP_FETCH_OBJECT))
+ if (!odb_read_object_info_extended(r->objects, &s->oid, &info,
+ OBJECT_INFO_LOOKUP_REPLACE |
+ OBJECT_INFO_SKIP_FETCH_OBJECT))
goto object_read;
options->missing_object_cb(options->missing_object_data);
}
- if (oid_object_info_extended(r, &s->oid, &info,
- OBJECT_INFO_LOOKUP_REPLACE))
+ if (odb_read_object_info_extended(r->objects, &s->oid, &info,
+ OBJECT_INFO_LOOKUP_REPLACE))
die("unable to read %s", oid_to_hex(&s->oid));
object_read:
@@ -4252,8 +4252,8 @@ object_read:
}
if (!info.contentp) {
info.contentp = &s->data;
- if (oid_object_info_extended(r, &s->oid, &info,
- OBJECT_INFO_LOOKUP_REPLACE))
+ if (odb_read_object_info_extended(r->objects, &s->oid, &info,
+ OBJECT_INFO_LOOKUP_REPLACE))
die("unable to read %s", oid_to_hex(&s->oid));
}
s->should_free = 1;
@@ -7019,8 +7019,8 @@ void diff_add_if_missing(struct repository *r,
{
if (filespec && filespec->oid_valid &&
!S_ISGITLINK(filespec->mode) &&
- oid_object_info_extended(r, &filespec->oid, NULL,
- OBJECT_INFO_FOR_PREFETCH))
+ odb_read_object_info_extended(r->objects, &filespec->oid, NULL,
+ OBJECT_INFO_FOR_PREFETCH))
oid_array_append(to_fetch, &filespec->oid);
}
diff --git a/dir.c b/dir.c
index a374972b62..02873f59ea 100644
--- a/dir.c
+++ b/dir.c
@@ -302,7 +302,7 @@ static int do_read_blob(const struct object_id *oid, struct oid_stat *oid_stat,
*size_out = 0;
*data_out = NULL;
- data = repo_read_object_file(the_repository, oid, &type, &sz);
+ data = odb_read_object(the_repository->objects, oid, &type, &sz);
if (!data || type != OBJ_BLOB) {
free(data);
return -1;
@@ -397,9 +397,12 @@ static int match_pathspec_item(struct index_state *istate,
strncmp(item->match, name - prefix, item->prefix))
return 0;
- if (item->attr_match_nr &&
- !match_pathspec_attrs(istate, name - prefix, namelen + prefix, item))
- return 0;
+ if (item->attr_match_nr) {
+ if (!istate)
+ BUG("magic PATHSPEC_ATTR requires an index");
+ if (!match_pathspec_attrs(istate, name - prefix, namelen + prefix, item))
+ return 0;
+ }
/* If the match was just the prefix, we matched */
if (!*match)
@@ -577,6 +580,16 @@ int match_pathspec(struct index_state *istate,
prefix, seen, flags);
}
+int match_leading_pathspec(struct index_state *istate,
+ const struct pathspec *ps,
+ const char *name, int namelen,
+ int prefix, char *seen, int is_dir)
+{
+ unsigned flags = is_dir ? DO_MATCH_DIRECTORY | DO_MATCH_LEADING_PATHSPEC : 0;
+ return match_pathspec_with_flags(istate, ps, name, namelen,
+ prefix, seen, flags);
+}
+
/**
* Check if a submodule is a superset of the pathspec
*/
diff --git a/dir.h b/dir.h
index d7e71aa8da..fc9be7b427 100644
--- a/dir.h
+++ b/dir.h
@@ -676,4 +676,27 @@ static inline int starts_with_dot_dot_slash_native(const char *const path)
return path_match_flags(path, what | PATH_MATCH_NATIVE);
}
+/**
+ * starts_with_dot_slash: convenience wrapper for
+ * patch_match_flags() with PATH_MATCH_STARTS_WITH_DOT_SLASH and
+ * PATH_MATCH_XPLATFORM.
+ */
+static inline int starts_with_dot_slash(const char *const path)
+{
+ const enum path_match_flags what = PATH_MATCH_STARTS_WITH_DOT_SLASH;
+
+ return path_match_flags(path, what | PATH_MATCH_XPLATFORM);
+}
+
+/**
+ * starts_with_dot_dot_slash: convenience wrapper for
+ * patch_match_flags() with PATH_MATCH_STARTS_WITH_DOT_DOT_SLASH and
+ * PATH_MATCH_XPLATFORM.
+ */
+static inline int starts_with_dot_dot_slash(const char *const path)
+{
+ const enum path_match_flags what = PATH_MATCH_STARTS_WITH_DOT_DOT_SLASH;
+
+ return path_match_flags(path, what | PATH_MATCH_XPLATFORM);
+}
#endif
diff --git a/entry.c b/entry.c
index f36ec5ad24..cae02eb503 100644
--- a/entry.c
+++ b/entry.c
@@ -1,7 +1,7 @@
#define USE_THE_REPOSITORY_VARIABLE
#include "git-compat-util.h"
-#include "object-store.h"
+#include "odb.h"
#include "dir.h"
#include "environment.h"
#include "gettext.h"
@@ -93,8 +93,8 @@ void *read_blob_entry(const struct cache_entry *ce, size_t *size)
{
enum object_type type;
unsigned long ul;
- void *blob_data = repo_read_object_file(the_repository, &ce->oid,
- &type, &ul);
+ void *blob_data = odb_read_object(the_repository->objects, &ce->oid,
+ &type, &ul);
*size = ul;
if (blob_data) {
diff --git a/environment.c b/environment.c
index c61d773e7e..7c2480b22e 100644
--- a/environment.c
+++ b/environment.c
@@ -37,7 +37,6 @@ int ignore_case;
int assume_unchanged;
int is_bare_repository_cfg = -1; /* unspecified */
int warn_on_object_refname_ambiguity = 1;
-int repository_format_precious_objects;
char *git_commit_encoding;
char *git_log_output_encoding;
char *apply_default_whitespace;
@@ -113,9 +112,6 @@ const char *comment_line_str = "#";
char *comment_line_str_to_free;
int auto_comment_line_char;
-/* Parallel index stat data preload? */
-int core_preload_index = 1;
-
/* This is set by setup_git_directory_gently() and/or git_default_config() */
char *git_work_tree_cfg;
diff --git a/environment.h b/environment.h
index 3d98461a06..3d806ced6e 100644
--- a/environment.h
+++ b/environment.h
@@ -155,7 +155,6 @@ extern int pack_compression_level;
extern unsigned long pack_size_limit_cfg;
extern int max_allowed_tree_depth;
-extern int core_preload_index;
extern int precomposed_unicode;
extern int protect_hfs;
extern int protect_ntfs;
@@ -190,8 +189,6 @@ extern enum object_creation_mode object_creation_mode;
extern int grafts_keep_true_parents;
-extern int repository_format_precious_objects;
-
const char *get_log_output_encoding(void);
const char *get_commit_output_encoding(void);
diff --git a/fetch-pack.c b/fetch-pack.c
index fa4231fee7..c1be9b76eb 100644
--- a/fetch-pack.c
+++ b/fetch-pack.c
@@ -24,7 +24,7 @@
#include "oid-array.h"
#include "oidset.h"
#include "packfile.h"
-#include "object-store.h"
+#include "odb.h"
#include "path.h"
#include "connected.h"
#include "fetch-negotiator.h"
@@ -34,6 +34,7 @@
#include "commit-graph.h"
#include "sigchain.h"
#include "mergesort.h"
+#include "prio-queue.h"
static int transfer_unpack_limit = -1;
static int fetch_unpack_limit = -1;
@@ -115,7 +116,8 @@ static void for_each_cached_alternate(struct fetch_negotiator *negotiator,
size_t i;
if (!initialized) {
- for_each_alternate_ref(cache_one_alternate, &cache);
+ odb_for_each_alternate_ref(the_repository->objects,
+ cache_one_alternate, &cache);
initialized = 1;
}
@@ -141,15 +143,15 @@ static struct commit *deref_without_lazy_fetch(const struct object_id *oid,
commit = lookup_commit_in_graph(the_repository, oid);
if (commit) {
if (mark_tags_complete_and_check_obj_db) {
- if (!has_object(the_repository, oid, 0))
+ if (!odb_has_object(the_repository->objects, oid, 0))
die_in_commit_graph_only(oid);
}
return commit;
}
while (1) {
- if (oid_object_info_extended(the_repository, oid, &info,
- OBJECT_INFO_SKIP_FETCH_OBJECT | OBJECT_INFO_QUICK))
+ if (odb_read_object_info_extended(the_repository->objects, oid, &info,
+ OBJECT_INFO_SKIP_FETCH_OBJECT | OBJECT_INFO_QUICK))
return NULL;
if (type == OBJ_TAG) {
struct tag *tag = (struct tag *)
@@ -600,7 +602,7 @@ done:
return count ? retval : 0;
}
-static struct commit_list *complete;
+static struct prio_queue complete = { compare_commits_by_commit_date };
static int mark_complete(const struct object_id *oid)
{
@@ -608,7 +610,7 @@ static int mark_complete(const struct object_id *oid)
if (commit && !(commit->object.flags & COMPLETE)) {
commit->object.flags |= COMPLETE;
- commit_list_insert(commit, &complete);
+ prio_queue_put(&complete, commit);
}
return 0;
}
@@ -625,9 +627,12 @@ static int mark_complete_oid(const char *refname UNUSED,
static void mark_recent_complete_commits(struct fetch_pack_args *args,
timestamp_t cutoff)
{
- while (complete && cutoff <= complete->item->date) {
+ while (complete.nr) {
+ struct commit *item = prio_queue_peek(&complete);
+ if (item->date < cutoff)
+ break;
print_verbose(args, _("Marking %s as complete"),
- oid_to_hex(&complete->item->object.oid));
+ oid_to_hex(&item->object.oid));
pop_most_recent_commit(&complete, COMPLETE);
}
}
@@ -769,7 +774,7 @@ static void mark_complete_and_common_ref(struct fetch_negotiator *negotiator,
if (!commit) {
struct object *o;
- if (!has_object(the_repository, &ref->old_oid, 0))
+ if (!odb_has_object(the_repository->objects, &ref->old_oid, 0))
continue;
o = parse_object(the_repository, &ref->old_oid);
if (!o || o->type != OBJ_COMMIT)
@@ -797,7 +802,6 @@ static void mark_complete_and_common_ref(struct fetch_negotiator *negotiator,
refs_for_each_rawref(get_main_ref_store(the_repository),
mark_complete_oid, NULL);
for_each_cached_alternate(NULL, mark_alternate_complete);
- commit_list_sort_by_date(&complete);
if (cutoff)
mark_recent_complete_commits(args, cutoff);
}
@@ -1342,7 +1346,7 @@ static void write_fetch_command_and_capabilities(struct strbuf *req_buf,
die(_("mismatched algorithms: client %s; server %s"),
the_hash_algo->name, hash_name);
packet_buf_write(req_buf, "object-format=%s", the_hash_algo->name);
- } else if (hash_algo_by_ptr(the_hash_algo) != GIT_HASH_SHA1) {
+ } else if (hash_algo_by_ptr(the_hash_algo) != GIT_HASH_SHA1_LEGACY) {
die(_("the server does not support algorithm '%s'"),
the_hash_algo->name);
}
@@ -1983,8 +1987,8 @@ static void update_shallow(struct fetch_pack_args *args,
struct oid_array extra = OID_ARRAY_INIT;
struct object_id *oid = si->shallow->oid;
for (i = 0; i < si->shallow->nr; i++)
- if (has_object(the_repository, &oid[i],
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
+ if (odb_has_object(the_repository->objects, &oid[i],
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
oid_array_append(&extra, &oid[i]);
if (extra.nr) {
setup_alternate_shallow(&shallow_lock,
diff --git a/fmt-merge-msg.c b/fmt-merge-msg.c
index 501b5acdd4..40174efa3d 100644
--- a/fmt-merge-msg.c
+++ b/fmt-merge-msg.c
@@ -6,7 +6,7 @@
#include "environment.h"
#include "refs.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "diff.h"
#include "diff-merges.h"
#include "hex.h"
@@ -526,8 +526,8 @@ static void fmt_merge_msg_sigs(struct strbuf *out)
struct object_id *oid = origins.items[i].util;
enum object_type type;
unsigned long size;
- char *buf = repo_read_object_file(the_repository, oid, &type,
- &size);
+ char *buf = odb_read_object(the_repository->objects, oid,
+ &type, &size);
char *origbuf = buf;
unsigned long len = size;
struct signature_check sigc = { NULL };
diff --git a/fsck.c b/fsck.c
index 8dc8472ceb..23965e1880 100644
--- a/fsck.c
+++ b/fsck.c
@@ -4,7 +4,7 @@
#include "date.h"
#include "dir.h"
#include "hex.h"
-#include "object-store.h"
+#include "odb.h"
#include "path.h"
#include "repository.h"
#include "object.h"
@@ -1293,7 +1293,7 @@ static int fsck_blobs(struct oidset *blobs_found, struct oidset *blobs_done,
if (oidset_contains(blobs_done, oid))
continue;
- buf = repo_read_object_file(the_repository, oid, &type, &size);
+ buf = odb_read_object(the_repository->objects, oid, &type, &size);
if (!buf) {
if (is_promisor_object(the_repository, oid))
continue;
diff --git a/git-compat-util.h b/git-compat-util.h
index 4678e21c4c..9408f463e3 100644
--- a/git-compat-util.h
+++ b/git-compat-util.h
@@ -460,6 +460,8 @@ void warning_errno(const char *err, ...) __attribute__((format (printf, 1, 2)));
void show_usage_if_asked(int ac, const char **av, const char *err);
+NORETURN void you_still_use_that(const char *command_name);
+
#ifndef NO_OPENSSL
#ifdef APPLE_COMMON_CRYPTO
#include "compat/apple-common-crypto.h"
@@ -895,16 +897,16 @@ static inline size_t xsize_t(off_t len)
* is done via tolower(), so it is strictly ASCII (no multi-byte characters or
* locale-specific conversions).
*/
-static inline int skip_iprefix(const char *str, const char *prefix,
+static inline bool skip_iprefix(const char *str, const char *prefix,
const char **out)
{
do {
if (!*prefix) {
*out = str;
- return 1;
+ return true;
}
} while (tolower(*str++) == tolower(*prefix++));
- return 0;
+ return false;
}
/*
@@ -912,7 +914,7 @@ static inline int skip_iprefix(const char *str, const char *prefix,
* comparison is done via tolower(), so it is strictly ASCII (no multi-byte
* characters or locale-specific conversions).
*/
-static inline int skip_iprefix_mem(const char *buf, size_t len,
+static inline bool skip_iprefix_mem(const char *buf, size_t len,
const char *prefix,
const char **out, size_t *outlen)
{
@@ -920,10 +922,10 @@ static inline int skip_iprefix_mem(const char *buf, size_t len,
if (!*prefix) {
*out = buf;
*outlen = len;
- return 1;
+ return true;
}
} while (len-- > 0 && tolower(*buf++) == tolower(*prefix++));
- return 0;
+ return false;
}
static inline int strtoul_ui(char const *s, int base, unsigned int *result)
diff --git a/git-gui/git-gui.sh b/git-gui/git-gui.sh
index 28572c889c..8bb121db4f 100755
--- a/git-gui/git-gui.sh
+++ b/git-gui/git-gui.sh
@@ -30,9 +30,7 @@ along with this program; if not, see <https://www.gnu.org/licenses/>.}]
##
## Tcl/Tk sanity check
-if {[catch {package require Tcl 8.5} err]
- || [catch {package require Tk 8.5} err]
-} {
+if {[catch {package require Tcl 8.6-8.8} err]} {
catch {wm withdraw .}
tk_messageBox \
-icon error \
@@ -77,99 +75,176 @@ proc is_Cygwin {} {
######################################################################
##
-## PATH lookup
+## PATH lookup. Sanitize $PATH, assure exec/open use only that
+
+if {[is_Windows]} {
+ set _path_sep {;}
+} else {
+ set _path_sep {:}
+}
+
+if {[is_Windows]} {
+ set gitguidir [file dirname [info script]]
+ regsub -all ";" $gitguidir "\\;" gitguidir
+ set env(PATH) "$gitguidir;$env(PATH)"
+}
set _search_path {}
-proc _which {what args} {
- global env _search_exe _search_path
+set _path_seen [dict create]
+foreach p [split $env(PATH) $_path_sep] {
+ # Keep only absolute paths, getting rid of ., empty, etc.
+ if {[file pathtype $p] ne {absolute}} {
+ continue
+ }
+ # Keep only the first occurence of any duplicates.
+ set norm_p [file normalize $p]
+ if {[dict exists $_path_seen $norm_p]} {
+ continue
+ }
+ dict set _path_seen $norm_p 1
+ lappend _search_path $norm_p
+}
+unset _path_seen
- if {$_search_path eq {}} {
- if {[is_Windows]} {
- set gitguidir [file dirname [info script]]
- regsub -all ";" $gitguidir "\\;" gitguidir
- set env(PATH) "$gitguidir;$env(PATH)"
- set _search_path [split $env(PATH) {;}]
- # Skip empty `PATH` elements
- set _search_path [lsearch -all -inline -not -exact \
- $_search_path ""]
- set _search_exe .exe
+set env(PATH) [join $_search_path $_path_sep]
+
+if {[is_Windows]} {
+ proc _which {what args} {
+ global _search_path
+
+ if {[lsearch -exact $args -script] >= 0} {
+ set suffix {}
+ } elseif {[string match *.exe [string tolower $what]]} {
+ # The search string already has the file extension
+ set suffix {}
} else {
- set _search_path [split $env(PATH) :]
- set _search_exe {}
+ set suffix .exe
}
- }
-
- if {[is_Windows] && [lsearch -exact $args -script] >= 0} {
- set suffix {}
- } else {
- set suffix $_search_exe
- }
- foreach p $_search_path {
- set p [file join $p $what$suffix]
- if {[file exists $p]} {
- return [file normalize $p]
+ foreach p $_search_path {
+ set p [file join $p $what$suffix]
+ if {[file exists $p]} {
+ return [file normalize $p]
+ }
}
+ return {}
}
- return {}
-}
-proc sanitize_command_line {command_line from_index} {
- set i $from_index
- while {$i < [llength $command_line]} {
- set cmd [lindex $command_line $i]
- if {[llength [file split $cmd]] < 2} {
- set fullpath [_which $cmd]
- if {$fullpath eq ""} {
- throw {NOT-FOUND} "$cmd not found in PATH"
+ proc sanitize_command_line {command_line from_index} {
+ set i $from_index
+ while {$i < [llength $command_line]} {
+ set cmd [lindex $command_line $i]
+ if {[llength [file split $cmd]] < 2} {
+ set fullpath [_which $cmd]
+ if {$fullpath eq ""} {
+ throw {NOT-FOUND} "$cmd not found in PATH"
+ }
+ lset command_line $i $fullpath
+ }
+
+ # handle piped commands, e.g. `exec A | B`
+ for {incr i} {$i < [llength $command_line]} {incr i} {
+ if {[lindex $command_line $i] eq "|"} {
+ incr i
+ break
+ }
}
- lset command_line $i $fullpath
}
+ return $command_line
+ }
- # handle piped commands, e.g. `exec A | B`
- for {incr i} {$i < [llength $command_line]} {incr i} {
- if {[lindex $command_line $i] eq "|"} {
+ # Override `exec` to avoid unsafe PATH lookup
+
+ rename exec real_exec
+
+ proc exec {args} {
+ # skip options
+ for {set i 0} {$i < [llength $args]} {incr i} {
+ set arg [lindex $args $i]
+ if {$arg eq "--"} {
incr i
break
}
+ if {[string range $arg 0 0] ne "-"} {
+ break
+ }
}
+ set args [sanitize_command_line $args $i]
+ uplevel 1 real_exec $args
}
- return $command_line
-}
-# Override `exec` to avoid unsafe PATH lookup
+ # Override `open` to avoid unsafe PATH lookup
-rename exec real_exec
+ rename open real_open
-proc exec {args} {
- # skip options
- for {set i 0} {$i < [llength $args]} {incr i} {
- set arg [lindex $args $i]
- if {$arg eq "--"} {
- incr i
- break
- }
- if {[string range $arg 0 0] ne "-"} {
- break
+ proc open {args} {
+ set arg0 [lindex $args 0]
+ if {[string range $arg0 0 0] eq "|"} {
+ set command_line [string trim [string range $arg0 1 end]]
+ lset args 0 "| [sanitize_command_line $command_line 0]"
}
+ uplevel 1 real_open $args
+ }
+
+} else {
+ # On non-Windows platforms, auto_execok, exec, and open are safe, and will
+ # use the sanitized search path. But, we need _which for these.
+
+ proc _which {what args} {
+ return [lindex [auto_execok $what] 0]
+ }
+}
+
+# Wrap exec/open to sanitize arguments
+
+# unsafe arguments begin with redirections or the pipe or background operators
+proc is_arg_unsafe {arg} {
+ regexp {^([<|>&]|2>)} $arg
+}
+
+proc make_arg_safe {arg} {
+ if {[is_arg_unsafe $arg]} {
+ set arg [file join . $arg]
}
- set args [sanitize_command_line $args $i]
- uplevel 1 real_exec $args
+ return $arg
}
-# Override `open` to avoid unsafe PATH lookup
+proc make_arglist_safe {arglist} {
+ set res {}
+ foreach arg $arglist {
+ lappend res [make_arg_safe $arg]
+ }
+ return $res
+}
-rename open real_open
+# executes one command
+# no redirections or pipelines are possible
+# cmd is a list that specifies the command and its arguments
+# calls `exec` and returns its value
+proc safe_exec {cmd} {
+ eval exec [make_arglist_safe $cmd]
+}
+
+# executes one command in the background
+# no redirections or pipelines are possible
+# cmd is a list that specifies the command and its arguments
+# calls `exec` and returns its value
+proc safe_exec_bg {cmd} {
+ eval exec [make_arglist_safe $cmd] &
+}
-proc open {args} {
- set arg0 [lindex $args 0]
- if {[string range $arg0 0 0] eq "|"} {
- set command_line [string trim [string range $arg0 1 end]]
- lset args 0 "| [sanitize_command_line $command_line 0]"
+proc safe_open_file {filename flags} {
+ # a file name starting with "|" would attempt to run a process
+ # but such a file name must be treated as a relative path
+ # hide the "|" behind "./"
+ if {[string index $filename 0] eq "|"} {
+ set filename [file join . $filename]
}
- uplevel 1 real_open $args
+ open $filename $flags
}
+# End exec/open wrappers
+
######################################################################
##
## locate our library
@@ -270,11 +345,11 @@ unset oguimsg
if {[tk windowingsystem] eq "aqua"} {
catch {
- exec osascript -e [format {
+ safe_exec [list osascript -e [format {
tell application "System Events"
set frontmost of processes whose unix id is %d to true
end tell
- } [pid]]
+ } [pid]]]
}
}
@@ -286,7 +361,6 @@ set _appname {Git Gui}
set _gitdir {}
set _gitworktree {}
set _isbare {}
-set _gitexec {}
set _githtmldir {}
set _reponame {}
set _shellpath {@@SHELL_PATH@@}
@@ -304,15 +378,37 @@ if {$_trace >= 0} {
# branches).
set _last_merged_branch {}
-proc shellpath {} {
- global _shellpath env
- if {[string match @@* $_shellpath]} {
- if {[info exists env(SHELL)]} {
- return $env(SHELL)
- } else {
- return /bin/sh
- }
+# for testing, allow unconfigured _shellpath
+if {[string match @@* $_shellpath]} {
+ if {[info exists env(SHELL)]} {
+ set _shellpath $env(SHELL)
+ } else {
+ set _shellpath /bin/sh
}
+}
+
+if {[is_Windows]} {
+ set _shellpath [safe_exec [list cygpath -m $_shellpath]]
+}
+
+if {![file executable $_shellpath] || \
+ !([file pathtype $_shellpath] eq {absolute})} {
+ set errmsg "The defined shell ('$_shellpath') is not usable, \
+ it must be an absolute path to an executable."
+ puts stderr $errmsg
+
+ catch {wm withdraw .}
+ tk_messageBox \
+ -icon error \
+ -type ok \
+ -title "git-gui: configuration error" \
+ -message $errmsg
+ exit 1
+}
+
+
+proc shellpath {} {
+ global _shellpath
return $_shellpath
}
@@ -329,20 +425,6 @@ proc gitdir {args} {
return [eval [list file join $_gitdir] $args]
}
-proc gitexec {args} {
- global _gitexec
- if {$_gitexec eq {}} {
- if {[catch {set _gitexec [git --exec-path]} err]} {
- error "Git not installed?\n\n$err"
- }
- set _gitexec [file normalize $_gitexec]
- }
- if {$args eq {}} {
- return $_gitexec
- }
- return [eval [list file join $_gitexec] $args]
-}
-
proc githtmldir {args} {
global _githtmldir
if {$_githtmldir eq {}} {
@@ -475,81 +557,13 @@ proc _trace_exec {cmd} {
#'" fix poor old emacs font-lock mode
-proc _git_cmd {name} {
- global _git_cmd_path
-
- if {[catch {set v $_git_cmd_path($name)}]} {
- switch -- $name {
- version -
- --version -
- --exec-path { return [list $::_git $name] }
- }
-
- set p [gitexec git-$name$::_search_exe]
- if {[file exists $p]} {
- set v [list $p]
- } elseif {[is_Windows] && [file exists [gitexec git-$name]]} {
- # Try to determine what sort of magic will make
- # git-$name go and do its thing, because native
- # Tcl on Windows doesn't know it.
- #
- set p [gitexec git-$name]
- set f [open $p r]
- set s [gets $f]
- close $f
-
- switch -glob -- [lindex $s 0] {
- #!*sh { set i sh }
- #!*perl { set i perl }
- #!*python { set i python }
- default { error "git-$name is not supported: $s" }
- }
-
- upvar #0 _$i interp
- if {![info exists interp]} {
- set interp [_which $i]
- }
- if {$interp eq {}} {
- error "git-$name requires $i (not in PATH)"
- }
- set v [concat [list $interp] [lrange $s 1 end] [list $p]]
- } else {
- # Assume it is builtin to git somehow and we
- # aren't actually able to see a file for it.
- #
- set v [list $::_git $name]
- }
- set _git_cmd_path($name) $v
- }
- return $v
-}
-
-# Test a file for a hashbang to identify executable scripts on Windows.
-proc is_shellscript {filename} {
- if {![file exists $filename]} {return 0}
- set f [open $filename r]
- fconfigure $f -encoding binary
- set magic [read $f 2]
- close $f
- return [expr {$magic eq "#!"}]
-}
-
-# Run a command connected via pipes on stdout.
# This is for use with textconv filters and uses sh -c "..." to allow it to
-# contain a command with arguments. On windows we must check for shell
-# scripts specifically otherwise just call the filter command.
+# contain a command with arguments. We presume this
+# to be a shellscript that the configured shell (/bin/sh by default) knows
+# how to run.
proc open_cmd_pipe {cmd path} {
- global env
- if {![file executable [shellpath]]} {
- set exe [auto_execok [lindex $cmd 0]]
- if {[is_shellscript [lindex $exe 0]]} {
- set run [linsert [auto_execok sh] end -c "$cmd \"\$0\"" $path]
- } else {
- set run [concat $exe [lrange $cmd 1 end] $path]
- }
- } else {
- set run [list [shellpath] -c "$cmd \"\$0\"" $path]
- }
+ set run [list [shellpath] -c "$cmd \"\$0\"" $path]
+ set run [make_arglist_safe $run]
return [open |$run r]
}
@@ -559,7 +573,7 @@ proc _lappend_nice {cmd_var} {
if {![info exists _nice]} {
set _nice [_which nice]
- if {[catch {exec $_nice git version}]} {
+ if {[catch {safe_exec [list $_nice git version]}]} {
set _nice {}
} elseif {[is_Windows] && [file dirname $_nice] ne [file dirname $::_git]} {
set _nice {}
@@ -571,7 +585,11 @@ proc _lappend_nice {cmd_var} {
}
proc git {args} {
- set fd [eval [list git_read] $args]
+ git_redir $args {}
+}
+
+proc git_redir {cmd redir} {
+ set fd [git_read $cmd $redir]
fconfigure $fd -translation binary -encoding utf-8
set result [string trimright [read $fd] "\n"]
close $fd
@@ -581,88 +599,47 @@ proc git {args} {
return $result
}
-proc _open_stdout_stderr {cmd} {
- _trace_exec $cmd
+proc safe_open_command {cmd {redir {}}} {
+ set cmd [make_arglist_safe $cmd]
+ _trace_exec [concat $cmd $redir]
if {[catch {
- set fd [open [concat [list | ] $cmd] r]
- } err]} {
- if { [lindex $cmd end] eq {2>@1}
- && $err eq {can not find channel named "1"}
- } {
- # Older versions of Tcl 8.4 don't have this 2>@1 IO
- # redirect operator. Fallback to |& cat for those.
- # The command was not actually started, so its safe
- # to try to start it a second time.
- #
- set fd [open [concat \
- [list | ] \
- [lrange $cmd 0 end-1] \
- [list |& cat] \
- ] r]
- } else {
- error $err
- }
+ set fd [open [concat [list | ] $cmd $redir] r]
+ } err]} {
+ error $err
}
fconfigure $fd -eofchar {}
return $fd
}
-proc git_read {args} {
- set opt [list]
-
- while {1} {
- switch -- [lindex $args 0] {
- --nice {
- _lappend_nice opt
- }
-
- --stderr {
- lappend args 2>@1
- }
-
- default {
- break
- }
+proc git_read {cmd {redir {}}} {
+ global _git
+ set cmdp [concat [list $_git] $cmd]
- }
-
- set args [lrange $args 1 end]
- }
-
- set cmdp [_git_cmd [lindex $args 0]]
- set args [lrange $args 1 end]
-
- return [_open_stdout_stderr [concat $opt $cmdp $args]]
+ return [safe_open_command $cmdp $redir]
}
-proc git_write {args} {
+proc git_read_nice {cmd} {
+ global _git
set opt [list]
- while {1} {
- switch -- [lindex $args 0] {
- --nice {
- _lappend_nice opt
- }
-
- default {
- break
- }
+ _lappend_nice opt
- }
+ set cmdp [concat [list $_git] $cmd]
- set args [lrange $args 1 end]
- }
+ return [safe_open_command [concat $opt $cmdp]]
+}
- set cmdp [_git_cmd [lindex $args 0]]
- set args [lrange $args 1 end]
+proc git_write {cmd} {
+ global _git
+ set cmd [make_arglist_safe $cmd]
+ set cmdp [concat [list $_git] $cmd]
- _trace_exec [concat $opt $cmdp $args]
- return [open [concat [list | ] $opt $cmdp $args] w]
+ _trace_exec $cmdp
+ return [open [concat [list | ] $cmdp] w]
}
proc githook_read {hook_name args} {
- set cmd [concat git hook run --ignore-missing $hook_name -- $args 2>@1]
- return [_open_stdout_stderr $cmd]
+ git_read [concat [list hook run --ignore-missing $hook_name --] $args] [list 2>@1]
}
proc kill_file_process {fd} {
@@ -670,9 +647,9 @@ proc kill_file_process {fd} {
catch {
if {[is_Windows]} {
- exec taskkill /pid $process
+ safe_exec [list taskkill /pid $process]
} else {
- exec kill $process
+ safe_exec [list kill $process]
}
}
}
@@ -698,27 +675,8 @@ proc sq {value} {
proc load_current_branch {} {
global current_branch is_detached
- set fd [open [gitdir HEAD] r]
- fconfigure $fd -translation binary -encoding utf-8
- if {[gets $fd ref] < 1} {
- set ref {}
- }
- close $fd
-
- set pfx {ref: refs/heads/}
- set len [string length $pfx]
- if {[string equal -length $len $pfx $ref]} {
- # We're on a branch. It might not exist. But
- # HEAD looks good enough to be a branch.
- #
- set current_branch [string range $ref $len end]
- set is_detached 0
- } else {
- # Assume this is a detached head.
- #
- set current_branch HEAD
- set is_detached 1
- }
+ set current_branch [git branch --show-current]
+ set is_detached [expr [string length $current_branch] == 0]
}
auto_load tk_optionMenu
@@ -868,18 +826,9 @@ proc apply_config {} {
font configure ${font}italic -slant italic
}
- global use_ttk NS
- set use_ttk 0
- set NS {}
- if {$repo_config(gui.usettk)} {
- set use_ttk [package vsatisfies [package provide Tk] 8.5]
- if {$use_ttk} {
- set NS ttk
- bind [winfo class .] <<ThemeChanged>> [list InitTheme]
- pave_toplevel .
- color::sync_with_theme
- }
- }
+ bind [winfo class .] <<ThemeChanged>> [list InitTheme]
+ pave_toplevel .
+ color::sync_with_theme
global comment_string
set comment_string [get_config core.commentstring]
@@ -946,6 +895,8 @@ if {$_git eq {}} {
##
## version check
+set MIN_GIT_VERSION 2.36
+
if {[catch {set _git_version [git --version]} err]} {
catch {wm withdraw .}
tk_messageBox \
@@ -956,9 +907,10 @@ if {[catch {set _git_version [git --version]} err]} {
$err
-[appname] requires Git 1.5.0 or later."
+[appname] requires Git $MIN_GIT_VERSION or later."
exit 1
}
+
if {![regsub {^git version } $_git_version {} _git_version]} {
catch {wm withdraw .}
tk_messageBox \
@@ -983,92 +935,28 @@ proc get_trimmed_version {s} {
set _real_git_version $_git_version
set _git_version [get_trimmed_version $_git_version]
-if {![regexp {^[1-9]+(\.[0-9]+)+$} $_git_version]} {
- catch {wm withdraw .}
- if {[tk_messageBox \
- -icon warning \
- -type yesno \
- -default no \
- -title "[appname]: warning" \
- -message [mc "Git version cannot be determined.
-
-%s claims it is version '%s'.
-
-%s requires at least Git 1.5.0 or later.
-
-Assume '%s' is version 1.5.0?
-" $_git $_real_git_version [appname] $_real_git_version]] eq {yes}} {
- set _git_version 1.5.0
- } else {
- exit 1
- }
-}
-unset _real_git_version
-
-proc git-version {args} {
- global _git_version
-
- switch [llength $args] {
- 0 {
- return $_git_version
- }
-
- 2 {
- set op [lindex $args 0]
- set vr [lindex $args 1]
- set cm [package vcompare $_git_version $vr]
- return [expr $cm $op 0]
- }
-
- 4 {
- set type [lindex $args 0]
- set name [lindex $args 1]
- set parm [lindex $args 2]
- set body [lindex $args 3]
-
- if {($type ne {proc} && $type ne {method})} {
- error "Invalid arguments to git-version"
- }
- if {[llength $body] < 2 || [lindex $body end-1] ne {default}} {
- error "Last arm of $type $name must be default"
- }
-
- foreach {op vr cb} [lrange $body 0 end-2] {
- if {[git-version $op $vr]} {
- return [uplevel [list $type $name $parm $cb]]
- }
- }
-
- return [uplevel [list $type $name $parm [lindex $body end]]]
- }
+if {[catch {set vcheck [package vcompare $_git_version $MIN_GIT_VERSION]}] ||
+ [expr $vcheck < 0] } {
- default {
- error "git-version >= x"
- }
-
- }
-}
-
-if {[git-version < 1.5]} {
+ set msg1 [mc "Insufficient git version, require: "]
+ set msg2 [mc "git returned:"]
+ set message "$msg1 $MIN_GIT_VERSION\n$msg2 $_real_git_version"
catch {wm withdraw .}
tk_messageBox \
-icon error \
-type ok \
-title [mc "git-gui: fatal error"] \
- -message "[appname] requires Git 1.5.0 or later.
-
-You are using [git-version]:
-
-[git --version]"
+ -message $message
exit 1
}
+unset _real_git_version
######################################################################
##
## configure our library
set idx [file join $oguilib tclIndex]
-if {[catch {set fd [open $idx r]} err]} {
+if {[catch {set fd [safe_open_file $idx r]} err]} {
catch {wm withdraw .}
tk_messageBox \
-icon error \
@@ -1106,53 +994,30 @@ unset -nocomplain idx fd
##
## config file parsing
-git-version proc _parse_config {arr_name args} {
- >= 1.5.3 {
- upvar $arr_name arr
- array unset arr
- set buf {}
- catch {
- set fd_rc [eval \
- [list git_read config] \
- $args \
- [list --null --list]]
- fconfigure $fd_rc -translation binary -encoding utf-8
- set buf [read $fd_rc]
- close $fd_rc
- }
- foreach line [split $buf "\0"] {
- if {[regexp {^([^\n]+)\n(.*)$} $line line name value]} {
- if {[is_many_config $name]} {
- lappend arr($name) $value
- } else {
- set arr($name) $value
- }
- } elseif {[regexp {^([^\n]+)$} $line line name]} {
- # no value given, but interpreting them as
- # boolean will be handled as true
- set arr($name) {}
- }
- }
- }
- default {
- upvar $arr_name arr
- array unset arr
- catch {
- set fd_rc [eval [list git_read config --list] $args]
- while {[gets $fd_rc line] >= 0} {
- if {[regexp {^([^=]+)=(.*)$} $line line name value]} {
- if {[is_many_config $name]} {
- lappend arr($name) $value
- } else {
- set arr($name) $value
- }
- } elseif {[regexp {^([^=]+)$} $line line name]} {
- # no value given, but interpreting them as
- # boolean will be handled as true
- set arr($name) {}
- }
+proc _parse_config {arr_name args} {
+ upvar $arr_name arr
+ array unset arr
+ set buf {}
+ catch {
+ set fd_rc [git_read \
+ [concat config \
+ $args \
+ --null --list]]
+ fconfigure $fd_rc -translation binary -encoding utf-8
+ set buf [read $fd_rc]
+ close $fd_rc
+ }
+ foreach line [split $buf "\0"] {
+ if {[regexp {^([^\n]+)\n(.*)$} $line line name value]} {
+ if {[is_many_config $name]} {
+ lappend arr($name) $value
+ } else {
+ set arr($name) $value
}
- close $fd_rc
+ } elseif {[regexp {^([^\n]+)$} $line line name]} {
+ # no value given, but interpreting them as
+ # boolean will be handled as true
+ set arr($name) {}
}
}
}
@@ -1247,11 +1112,9 @@ citool {
##
## execution environment
-set have_tk85 [expr {[package vcompare $tk_version "8.5"] >= 0}]
-
# Suggest our implementation of askpass, if none is set
if {![info exists env(SSH_ASKPASS)]} {
- set env(SSH_ASKPASS) [gitexec git-gui--askpass]
+ set env(SSH_ASKPASS) [file join [git --exec-path] git-gui--askpass]
}
######################################################################
@@ -1272,9 +1135,23 @@ if {[catch {
load_config 1
apply_config
choose_repository::pick
+ if {![file isdirectory $_gitdir]} {
+ exit 1
+ }
set picked 1
}
+# Use object format as hash algorithm (either "sha1" or "sha256")
+set hashalgorithm [git rev-parse --show-object-format]
+if {$hashalgorithm eq "sha1"} {
+ set hashlength 40
+} elseif {$hashalgorithm eq "sha256"} {
+ set hashlength 64
+} else {
+ puts stderr "Unknown hash algorithm: $hashalgorithm"
+ exit 1
+}
+
# we expand the _gitdir when it's just a single dot (i.e. when we're being
# run from the .git dir itself) lest the routines to find the worktree
# get confused
@@ -1291,20 +1168,7 @@ if {![file isdirectory $_gitdir]} {
load_config 0
apply_config
-# v1.7.0 introduced --show-toplevel to return the canonical work-tree
-if {[package vcompare $_git_version 1.7.0] >= 0} {
- set _gitworktree [git rev-parse --show-toplevel]
-} else {
- # try to set work tree from environment, core.worktree or use
- # cdup to obtain a relative path to the top of the worktree. If
- # run from the top, the ./ prefix ensures normalize expands pwd.
- if {[catch { set _gitworktree $env(GIT_WORK_TREE) }]} {
- set _gitworktree [get_config core.worktree]
- if {$_gitworktree eq ""} {
- set _gitworktree [file normalize ./[git rev-parse --show-cdup]]
- }
- }
-}
+set _gitworktree [git rev-parse --show-toplevel]
if {$_prefix ne {}} {
if {$_gitworktree eq {}} {
@@ -1368,8 +1232,8 @@ set is_conflict_diff 0
set last_revert {}
set last_revert_enc {}
-set nullid "0000000000000000000000000000000000000000"
-set nullid2 "0000000000000000000000000000000000000001"
+set nullid [string repeat 0 $hashlength]
+set nullid2 "[string repeat 0 [expr $hashlength - 1]]1"
######################################################################
##
@@ -1427,7 +1291,7 @@ proc repository_state {ctvar hdvar mhvar} {
set merge_head [gitdir MERGE_HEAD]
if {[file exists $merge_head]} {
set ct merge
- set fd_mh [open $merge_head r]
+ set fd_mh [safe_open_file $merge_head r]
while {[gets $fd_mh line] >= 0} {
lappend mh $line
}
@@ -1446,7 +1310,7 @@ proc PARENT {} {
return $p
}
if {$empty_tree eq {}} {
- set empty_tree [git mktree << {}]
+ set empty_tree [git_redir [list mktree] [list << {}]]
}
return $empty_tree
}
@@ -1505,12 +1369,12 @@ proc rescan {after {honor_trustmtime 1}} {
} else {
set rescan_active 1
ui_status [mc "Refreshing file status..."]
- set fd_rf [git_read update-index \
+ set fd_rf [git_read [list update-index \
-q \
--unmerged \
--ignore-missing \
--refresh \
- ]
+ ]]
fconfigure $fd_rf -blocking 0 -translation binary
fileevent $fd_rf readable \
[list rescan_stage2 $fd_rf $after]
@@ -1530,18 +1394,7 @@ proc rescan_stage2 {fd after} {
close $fd
}
- if {[package vcompare $::_git_version 1.6.3] >= 0} {
- set ls_others [list --exclude-standard]
- } else {
- set ls_others [list --exclude-per-directory=.gitignore]
- if {[have_info_exclude]} {
- lappend ls_others "--exclude-from=[gitdir info exclude]"
- }
- set user_exclude [get_config core.excludesfile]
- if {$user_exclude ne {} && [file readable $user_exclude]} {
- lappend ls_others "--exclude-from=[file normalize $user_exclude]"
- }
- }
+ set ls_others [list --exclude-standard]
set buf_rdi {}
set buf_rdf {}
@@ -1549,12 +1402,8 @@ proc rescan_stage2 {fd after} {
set rescan_active 2
ui_status [mc "Scanning for modified files ..."]
- if {[git-version >= "1.7.2"]} {
- set fd_di [git_read diff-index --cached --ignore-submodules=dirty -z [PARENT]]
- } else {
- set fd_di [git_read diff-index --cached -z [PARENT]]
- }
- set fd_df [git_read diff-files -z]
+ set fd_di [git_read [list diff-index --cached --ignore-submodules=dirty -z [PARENT]]]
+ set fd_df [git_read [list diff-files -z]]
fconfigure $fd_di -blocking 0 -translation binary -encoding binary
fconfigure $fd_df -blocking 0 -translation binary -encoding binary
@@ -1563,7 +1412,7 @@ proc rescan_stage2 {fd after} {
fileevent $fd_df readable [list read_diff_files $fd_df $after]
if {[is_config_true gui.displayuntracked]} {
- set fd_lo [eval git_read ls-files --others -z $ls_others]
+ set fd_lo [git_read [concat ls-files --others -z $ls_others]]
fconfigure $fd_lo -blocking 0 -translation binary -encoding binary
fileevent $fd_lo readable [list read_ls_others $fd_lo $after]
incr rescan_active
@@ -1575,7 +1424,7 @@ proc load_message {file {encoding {}}} {
set f [gitdir $file]
if {[file isfile $f]} {
- if {[catch {set fd [open $f r]}]} {
+ if {[catch {set fd [safe_open_file $f r]}]} {
return 0
}
fconfigure $fd -eofchar {}
@@ -1599,23 +1448,23 @@ proc run_prepare_commit_msg_hook {} {
# it will be .git/MERGE_MSG (merge), .git/SQUASH_MSG (squash), or an
# empty file but existent file.
- set fd_pcm [open [gitdir PREPARE_COMMIT_MSG] a]
+ set fd_pcm [safe_open_file [gitdir PREPARE_COMMIT_MSG] a]
if {[file isfile [gitdir MERGE_MSG]]} {
set pcm_source "merge"
- set fd_mm [open [gitdir MERGE_MSG] r]
+ set fd_mm [safe_open_file [gitdir MERGE_MSG] r]
fconfigure $fd_mm -encoding utf-8
puts -nonewline $fd_pcm [read $fd_mm]
close $fd_mm
} elseif {[file isfile [gitdir SQUASH_MSG]]} {
set pcm_source "squash"
- set fd_sm [open [gitdir SQUASH_MSG] r]
+ set fd_sm [safe_open_file [gitdir SQUASH_MSG] r]
fconfigure $fd_sm -encoding utf-8
puts -nonewline $fd_pcm [read $fd_sm]
close $fd_sm
} elseif {[file isfile [get_config commit.template]]} {
set pcm_source "template"
- set fd_sm [open [get_config commit.template] r]
+ set fd_sm [safe_open_file [get_config commit.template] r]
fconfigure $fd_sm -encoding utf-8
puts -nonewline $fd_pcm [read $fd_sm]
close $fd_sm
@@ -1822,10 +1671,9 @@ proc short_path {path} {
}
set next_icon_id 0
-set null_sha1 [string repeat 0 40]
proc merge_state {path new_state {head_info {}} {index_info {}}} {
- global file_states next_icon_id null_sha1
+ global file_states next_icon_id nullid
set s0 [string index $new_state 0]
set s1 [string index $new_state 1]
@@ -1847,7 +1695,7 @@ proc merge_state {path new_state {head_info {}} {index_info {}}} {
elseif {$s1 eq {_}} {set s1 _}
if {$s0 eq {A} && $s1 eq {_} && $head_info eq {}} {
- set head_info [list 0 $null_sha1]
+ set head_info [list 0 $nullid]
} elseif {$s0 ne {_} && [string index $state 0] eq {_}
&& $head_info eq {}} {
set head_info $index_info
@@ -2205,7 +2053,7 @@ proc do_gitk {revs {is_submodule false}} {
unset env(GIT_DIR)
unset env(GIT_WORK_TREE)
}
- eval exec $cmd $revs "--" "--" &
+ safe_exec_bg [concat $cmd $revs "--" "--"]
set env(GIT_DIR) $_gitdir
set env(GIT_WORK_TREE) $_gitworktree
@@ -2242,7 +2090,7 @@ proc do_git_gui {} {
set pwd [pwd]
cd $current_diff_path
- eval exec $exe gui &
+ safe_exec_bg [concat $exe gui]
set env(GIT_DIR) $_gitdir
set env(GIT_WORK_TREE) $_gitworktree
@@ -2273,16 +2121,18 @@ proc get_explorer {} {
proc do_explore {} {
global _gitworktree
- set explorer [get_explorer]
- eval exec $explorer [list [file nativename $_gitworktree]] &
+ set cmd [get_explorer]
+ lappend cmd [file nativename $_gitworktree]
+ safe_exec_bg $cmd
}
# Open file relative to the working tree by the default associated app.
proc do_file_open {file} {
global _gitworktree
- set explorer [get_explorer]
+ set cmd [get_explorer]
set full_file_path [file join $_gitworktree $file]
- exec $explorer [file nativename $full_file_path] &
+ lappend cmd [file nativename $full_file_path]
+ safe_exec_bg $cmd
}
set is_quitting 0
@@ -2298,7 +2148,7 @@ proc do_quit {{rc {1}}} {
global ui_comm is_quitting repo_config commit_type
global GITGUI_BCK_exists GITGUI_BCK_i
global ui_comm_spell
- global ret_code use_ttk
+ global ret_code
if {$is_quitting} return
set is_quitting 1
@@ -2316,7 +2166,7 @@ proc do_quit {{rc {1}}} {
if {![string match amend* $commit_type]
&& $msg ne {}} {
catch {
- set fd [open $save w]
+ set fd [safe_open_file $save w]
fconfigure $fd -encoding utf-8
puts -nonewline $fd $msg
close $fd
@@ -2356,13 +2206,8 @@ proc do_quit {{rc {1}}} {
}
set cfg_geometry [list]
lappend cfg_geometry [wm geometry .]
- if {$use_ttk} {
- lappend cfg_geometry [.vpane sashpos 0]
- lappend cfg_geometry [.vpane.files sashpos 0]
- } else {
- lappend cfg_geometry [lindex [.vpane sash coord 0] 0]
- lappend cfg_geometry [lindex [.vpane.files sash coord 0] 1]
- }
+ lappend cfg_geometry [.vpane sashpos 0]
+ lappend cfg_geometry [.vpane.files sashpos 0]
if {[catch {set rc_geometry $repo_config(gui.geometry)}]} {
set rc_geometry {}
}
@@ -2760,17 +2605,16 @@ if {![is_bare]} {
if {[is_Windows]} {
# Use /git-bash.exe if available
- set normalized [file normalize $::argv0]
- regsub "/mingw../libexec/git-core/git-gui$" \
- $normalized "/git-bash.exe" cmdLine
- if {$cmdLine != $normalized && [file exists $cmdLine]} {
- set cmdLine [list "Git Bash" $cmdLine &]
+ set _git_bash [safe_exec [list cygpath -m /git-bash.exe]]
+ if {[file executable $_git_bash]} {
+ set _bash_cmdline [list "Git Bash" $_git_bash]
} else {
- set cmdLine [list "Git Bash" bash --login -l &]
+ set _bash_cmdline [list "Git Bash" bash --login -l]
}
.mbar.repository add command \
-label [mc "Git Bash"] \
- -command {eval exec [auto_execok start] $cmdLine}
+ -command {safe_exec_bg [concat [list [_which cmd] /c start] $_bash_cmdline]}
+ unset _git_bash
}
if {[is_Windows] || ![is_bare]} {
@@ -3179,7 +3023,7 @@ blame {
if {$head eq {}} {
load_current_branch
} else {
- if {[regexp {^[0-9a-f]{1,39}$} $head]} {
+ if {[regexp [string map "@@ [expr $hashlength - 1]" {^[0-9a-f]{1,@@}$}] $head]} {
if {[catch {
set head [git rev-parse --verify $head]
} err]} {
@@ -3245,13 +3089,12 @@ default {
# -- Branch Control
#
-${NS}::frame .branch
-if {!$use_ttk} {.branch configure -borderwidth 1 -relief sunken}
-${NS}::label .branch.l1 \
+ttk::frame .branch
+ttk::label .branch.l1 \
-text [mc "Current Branch:"] \
-anchor w \
-justify left
-${NS}::label .branch.cb \
+ttk::label .branch.cb \
-textvariable current_branch \
-anchor w \
-justify left
@@ -3261,13 +3104,9 @@ pack .branch -side top -fill x
# -- Main Window Layout
#
-${NS}::panedwindow .vpane -orient horizontal
-${NS}::panedwindow .vpane.files -orient vertical
-if {$use_ttk} {
- .vpane add .vpane.files
-} else {
- .vpane add .vpane.files -sticky nsew -height 100 -width 200
-}
+ttk::panedwindow .vpane -orient horizontal
+ttk::panedwindow .vpane.files -orient vertical
+.vpane add .vpane.files
pack .vpane -anchor n -side top -fill both -expand 1
# -- Working Directory File List
@@ -3284,8 +3123,8 @@ ttext $ui_workdir \
-xscrollcommand {.vpane.files.workdir.sx set} \
-yscrollcommand {.vpane.files.workdir.sy set} \
-state disabled
-${NS}::scrollbar .vpane.files.workdir.sx -orient h -command [list $ui_workdir xview]
-${NS}::scrollbar .vpane.files.workdir.sy -orient v -command [list $ui_workdir yview]
+ttk::scrollbar .vpane.files.workdir.sx -orient h -command [list $ui_workdir xview]
+ttk::scrollbar .vpane.files.workdir.sy -orient v -command [list $ui_workdir yview]
pack .vpane.files.workdir.title -side top -fill x
pack .vpane.files.workdir.sx -side bottom -fill x
pack .vpane.files.workdir.sy -side right -fill y
@@ -3306,8 +3145,8 @@ ttext $ui_index \
-xscrollcommand {.vpane.files.index.sx set} \
-yscrollcommand {.vpane.files.index.sy set} \
-state disabled
-${NS}::scrollbar .vpane.files.index.sx -orient h -command [list $ui_index xview]
-${NS}::scrollbar .vpane.files.index.sy -orient v -command [list $ui_index yview]
+ttk::scrollbar .vpane.files.index.sx -orient h -command [list $ui_index xview]
+ttk::scrollbar .vpane.files.index.sy -orient v -command [list $ui_index yview]
pack .vpane.files.index.title -side top -fill x
pack .vpane.files.index.sx -side bottom -fill x
pack .vpane.files.index.sy -side right -fill y
@@ -3317,10 +3156,6 @@ pack $ui_index -side left -fill both -expand 1
#
.vpane.files add .vpane.files.workdir
.vpane.files add .vpane.files.index
-if {!$use_ttk} {
- .vpane.files paneconfigure .vpane.files.workdir -sticky news
- .vpane.files paneconfigure .vpane.files.index -sticky news
-}
proc set_selection_colors {w has_focus} {
foreach tag [list in_diff in_sel] {
@@ -3341,78 +3176,63 @@ unset i
# -- Diff and Commit Area
#
-if {$have_tk85} {
- ${NS}::panedwindow .vpane.lower -orient vertical
- ${NS}::frame .vpane.lower.commarea
- ${NS}::frame .vpane.lower.diff -relief sunken -borderwidth 1 -height 500
- .vpane.lower add .vpane.lower.diff
- .vpane.lower add .vpane.lower.commarea
- .vpane add .vpane.lower
- if {$use_ttk} {
- .vpane.lower pane .vpane.lower.diff -weight 1
- .vpane.lower pane .vpane.lower.commarea -weight 0
- } else {
- .vpane.lower paneconfigure .vpane.lower.diff -stretch always
- .vpane.lower paneconfigure .vpane.lower.commarea -stretch never
- }
-} else {
- frame .vpane.lower -height 300 -width 400
- frame .vpane.lower.commarea
- frame .vpane.lower.diff -relief sunken -borderwidth 1
- pack .vpane.lower.diff -fill both -expand 1
- pack .vpane.lower.commarea -side bottom -fill x
- .vpane add .vpane.lower
- .vpane paneconfigure .vpane.lower -sticky nsew
-}
+ttk::panedwindow .vpane.lower -orient vertical
+ttk::frame .vpane.lower.commarea
+ttk::frame .vpane.lower.diff -relief sunken -borderwidth 1 -height 500
+.vpane.lower add .vpane.lower.diff
+.vpane.lower add .vpane.lower.commarea
+.vpane add .vpane.lower
+.vpane.lower pane .vpane.lower.diff -weight 1
+.vpane.lower pane .vpane.lower.commarea -weight 0
# -- Commit Area Buttons
#
-${NS}::frame .vpane.lower.commarea.buttons
-${NS}::label .vpane.lower.commarea.buttons.l -text {} \
+ttk::frame .vpane.lower.commarea.buttons
+ttk::label .vpane.lower.commarea.buttons.l -text {} \
-anchor w \
-justify left
pack .vpane.lower.commarea.buttons.l -side top -fill x
pack .vpane.lower.commarea.buttons -side left -fill y
-${NS}::button .vpane.lower.commarea.buttons.rescan -text [mc Rescan] \
+ttk::button .vpane.lower.commarea.buttons.rescan -text [mc Rescan] \
-command ui_do_rescan
pack .vpane.lower.commarea.buttons.rescan -side top -fill x
lappend disable_on_lock \
{.vpane.lower.commarea.buttons.rescan conf -state}
-${NS}::button .vpane.lower.commarea.buttons.incall -text [mc "Stage Changed"] \
+ttk::button .vpane.lower.commarea.buttons.incall -text [mc "Stage Changed"] \
-command do_add_all
pack .vpane.lower.commarea.buttons.incall -side top -fill x
lappend disable_on_lock \
{.vpane.lower.commarea.buttons.incall conf -state}
if {![is_enabled nocommitmsg]} {
- ${NS}::button .vpane.lower.commarea.buttons.signoff -text [mc "Sign Off"] \
+ ttk::button .vpane.lower.commarea.buttons.signoff -text [mc "Sign Off"] \
-command do_signoff
pack .vpane.lower.commarea.buttons.signoff -side top -fill x
}
-${NS}::button .vpane.lower.commarea.buttons.commit -text [commit_btn_caption] \
+ttk::button .vpane.lower.commarea.buttons.commit -text [commit_btn_caption] \
-command do_commit
pack .vpane.lower.commarea.buttons.commit -side top -fill x
lappend disable_on_lock \
{.vpane.lower.commarea.buttons.commit conf -state}
if {![is_enabled nocommit]} {
- ${NS}::button .vpane.lower.commarea.buttons.push -text [mc Push] \
+ ttk::button .vpane.lower.commarea.buttons.push -text [mc Push] \
-command do_push_anywhere
pack .vpane.lower.commarea.buttons.push -side top -fill x
}
# -- Commit Message Buffer
#
-${NS}::frame .vpane.lower.commarea.buffer
-${NS}::frame .vpane.lower.commarea.buffer.header
+ttk::frame .vpane.lower.commarea.buffer
+ttk::frame .vpane.lower.commarea.buffer.header
set ui_comm .vpane.lower.commarea.buffer.frame.t
set ui_coml .vpane.lower.commarea.buffer.header.l
if {![is_enabled nocommit]} {
- ${NS}::checkbutton .vpane.lower.commarea.buffer.header.amend \
+ ttk::checkbutton .vpane.lower.commarea.buffer.header.amend \
-text [mc "Amend Last Commit"] \
-variable commit_type_is_amend \
-command do_select_commit_type
@@ -3420,7 +3240,7 @@ if {![is_enabled nocommit]} {
[list .vpane.lower.commarea.buffer.header.amend conf -state]
}
-${NS}::label $ui_coml \
+ttk::label $ui_coml \
-anchor w \
-justify left
proc trace_commit_type {varname args} {
@@ -3455,10 +3275,10 @@ ttext $ui_comm \
-font font_diff \
-xscrollcommand {.vpane.lower.commarea.buffer.frame.sbx set} \
-yscrollcommand {.vpane.lower.commarea.buffer.frame.sby set}
-${NS}::scrollbar .vpane.lower.commarea.buffer.frame.sbx \
+ttk::scrollbar .vpane.lower.commarea.buffer.frame.sbx \
-orient horizontal \
-command [list $ui_comm xview]
-${NS}::scrollbar .vpane.lower.commarea.buffer.frame.sby \
+ttk::scrollbar .vpane.lower.commarea.buffer.frame.sby \
-orient vertical \
-command [list $ui_comm yview]
@@ -3581,9 +3401,9 @@ ttext $ui_diff \
-yscrollcommand {.vpane.lower.diff.body.sby set} \
-state disabled
catch {$ui_diff configure -tabstyle wordprocessor}
-${NS}::scrollbar .vpane.lower.diff.body.sbx -orient horizontal \
+ttk::scrollbar .vpane.lower.diff.body.sbx -orient horizontal \
-command [list $ui_diff xview]
-${NS}::scrollbar .vpane.lower.diff.body.sby -orient vertical \
+ttk::scrollbar .vpane.lower.diff.body.sby -orient vertical \
-command [list $ui_diff yview]
pack .vpane.lower.diff.body.sbx -side bottom -fill x
pack .vpane.lower.diff.body.sby -side right -fill y
@@ -3884,29 +3704,14 @@ proc on_ttk_pane_mapped {w pane pos} {
bind $w <Map> {}
after 0 [list after idle [list $w sashpos $pane $pos]]
}
-proc on_tk_pane_mapped {w pane x y} {
- bind $w <Map> {}
- after 0 [list after idle [list $w sash place $pane $x $y]]
-}
proc on_application_mapped {} {
- global repo_config use_ttk
+ global repo_config
bind . <Map> {}
set gm $repo_config(gui.geometry)
- if {$use_ttk} {
- bind .vpane <Map> \
- [list on_ttk_pane_mapped %W 0 [lindex $gm 1]]
- bind .vpane.files <Map> \
- [list on_ttk_pane_mapped %W 0 [lindex $gm 2]]
- } else {
- bind .vpane <Map> \
- [list on_tk_pane_mapped %W 0 \
- [lindex $gm 1] \
- [lindex [.vpane sash coord 0] 1]]
- bind .vpane.files <Map> \
- [list on_tk_pane_mapped %W 0 \
- [lindex [.vpane.files sash coord 0] 0] \
- [lindex $gm 2]]
- }
+ bind .vpane <Map> \
+ [list on_ttk_pane_mapped %W 0 [lindex $gm 1]]
+ bind .vpane.files <Map> \
+ [list on_ttk_pane_mapped %W 0 [lindex $gm 2]]
wm geometry . [lindex $gm 0]
}
if {[info exists repo_config(gui.geometry)]} {
@@ -4079,7 +3884,7 @@ if {[winfo exists $ui_comm]} {
}
} elseif {$m} {
catch {
- set fd [open [gitdir GITGUI_BCK] w]
+ set fd [safe_open_file [gitdir GITGUI_BCK] w]
fconfigure $fd -encoding utf-8
puts -nonewline $fd $msg
close $fd
diff --git a/git-gui/lib/about.tcl b/git-gui/lib/about.tcl
index cfa50fca87..122ebfb71d 100644
--- a/git-gui/lib/about.tcl
+++ b/git-gui/lib/about.tcl
@@ -4,19 +4,19 @@
proc do_about {} {
global appvers copyright oguilib
global tcl_patchLevel tk_patchLevel
- global ui_comm_spell NS use_ttk
+ global ui_comm_spell
set w .about_dialog
Dialog $w
wm geometry $w "+[winfo rootx .]+[winfo rooty .]"
pack [git_logo $w.git_logo] -side left -fill y -padx 10 -pady 10
- ${NS}::label $w.header -text [mc "About %s" [appname]] \
+ ttk::label $w.header -text [mc "About %s" [appname]] \
-font font_uibold -anchor center
pack $w.header -side top -fill x
- ${NS}::frame $w.buttons
- ${NS}::button $w.buttons.close -text {Close} \
+ ttk::frame $w.buttons
+ ttk::button $w.buttons.close -text {Close} \
-default active \
-command [list destroy $w]
pack $w.buttons.close -side right
@@ -44,7 +44,7 @@ proc do_about {} {
set d {}
append d "git wrapper: $::_git\n"
- append d "git exec dir: [gitexec]\n"
+ append d "git exec dir: [git --exec-path]\n"
append d "git-gui lib: $oguilib"
paddedlabel $w.vers -text $v
diff --git a/git-gui/lib/blame.tcl b/git-gui/lib/blame.tcl
index 8441e109be..9d4d1ac872 100644
--- a/git-gui/lib/blame.tcl
+++ b/git-gui/lib/blame.tcl
@@ -63,7 +63,7 @@ field tooltip_timer {} ; # Current timer event for our tooltip
field tooltip_commit {} ; # Commit(s) in tooltip
constructor new {i_commit i_path i_jump} {
- global cursor_ptr M1B M1T have_tk85 use_ttk NS
+ global cursor_ptr M1B M1T
variable active_color
variable group_colors
@@ -203,18 +203,17 @@ constructor new {i_commit i_path i_jump} {
-width 80 \
-xscrollcommand [list $w.file_pane.out.sbx set] \
-font font_diff
- if {$have_tk85} {
$w_file configure -inactiveselectbackground darkblue
- }
+
$w_file tag conf found \
-background yellow
set w_columns [list $w_amov $w_asim $w_line $w_file]
- ${NS}::scrollbar $w.file_pane.out.sbx \
+ ttk::scrollbar $w.file_pane.out.sbx \
-orient h \
-command [list $w_file xview]
- ${NS}::scrollbar $w.file_pane.out.sby \
+ ttk::scrollbar $w.file_pane.out.sby \
-orient v \
-command [list scrollbar2many $w_columns yview]
eval grid $w_columns $w.file_pane.out.sby -sticky nsew
@@ -264,10 +263,10 @@ constructor new {i_commit i_path i_jump} {
-background $active_color \
-font font_ui
$w_cviewer tag raise sel
- ${NS}::scrollbar $w.file_pane.cm.sbx \
+ ttk::scrollbar $w.file_pane.cm.sbx \
-orient h \
-command [list $w_cviewer xview]
- ${NS}::scrollbar $w.file_pane.cm.sby \
+ ttk::scrollbar $w.file_pane.cm.sby \
-orient v \
-command [list $w_cviewer yview]
pack $w.file_pane.cm.sby -side right -fill y
@@ -426,6 +425,7 @@ method _kill {} {
method _load {jump} {
variable group_colors
+ global hashlength
_hide_tooltip $this
@@ -436,7 +436,7 @@ method _load {jump} {
$i conf -state normal
$i delete 0.0 end
foreach g [$i tag names] {
- if {[regexp {^g[0-9a-f]{40}$} $g]} {
+ if {[regexp [string map "@@ $hashlength" {^g[0-9a-f]{@@}$}] $g]} {
$i tag delete $g
}
}
@@ -470,7 +470,7 @@ method _load {jump} {
$w_path conf -text [escape_path $path]
set do_textconv 0
- if {![is_config_false gui.textconv] && [git-version >= 1.7.2]} {
+ if {![is_config_false gui.textconv]} {
set filter [gitattr $path diff set]
set textconv [get_config [join [list diff $filter textconv] .]]
if {$filter ne {set} && $textconv ne {}} {
@@ -481,14 +481,14 @@ method _load {jump} {
if {$do_textconv ne 0} {
set fd [open_cmd_pipe $textconv $path]
} else {
- set fd [open $path r]
+ set fd [safe_open_file $path r]
}
fconfigure $fd -eofchar {}
} else {
if {$do_textconv ne 0} {
- set fd [git_read cat-file --textconv "$commit:$path"]
+ set fd [git_read [list cat-file --textconv "$commit:$path"]]
} else {
- set fd [git_read cat-file blob "$commit:$path"]
+ set fd [git_read [list cat-file blob "$commit:$path"]]
}
}
fconfigure $fd \
@@ -500,6 +500,8 @@ method _load {jump} {
}
method _history_menu {} {
+ global hashlength
+
set m $w.backmenu
if {[winfo exists $m]} {
$m delete 0 end
@@ -513,7 +515,7 @@ method _history_menu {} {
set c [lindex $e 0]
set f [lindex $e 1]
- if {[regexp {^[0-9a-f]{40}$} $c]} {
+ if {[regexp [string map "@@ $hashlength" {^[0-9a-f]{@@}$}] $c]} {
set t [string range $c 0 8]...
} elseif {$c eq {}} {
set t {Working Directory}
@@ -617,7 +619,7 @@ method _exec_blame {cur_w cur_d options cur_s} {
}
lappend options -- $path
- set fd [eval git_read --nice blame $options]
+ set fd [git_read_nice [concat blame $options]]
fconfigure $fd -blocking 0 -translation lf -encoding utf-8
fileevent $fd readable [cb _read_blame $fd $cur_w $cur_d]
set current_fd $fd
@@ -627,6 +629,7 @@ method _exec_blame {cur_w cur_d options cur_s} {
method _read_blame {fd cur_w cur_d} {
upvar #0 $cur_d line_data
variable group_colors
+ global hashlength nullid
if {$fd ne $current_fd} {
catch {close $fd}
@@ -635,7 +638,7 @@ method _read_blame {fd cur_w cur_d} {
$cur_w conf -state normal
while {[gets $fd line] >= 0} {
- if {[regexp {^([a-z0-9]{40}) (\d+) (\d+) (\d+)$} $line line \
+ if {[regexp [string map "@@ $hashlength" {^([a-z0-9]{@@}) (\d+) (\d+) (\d+)$}] $line line \
cmit original_line final_line line_count]} {
set r_commit $cmit
set r_orig_line $original_line
@@ -648,7 +651,7 @@ method _read_blame {fd cur_w cur_d} {
set oln $r_orig_line
set cmit $r_commit
- if {[regexp {^0{40}$} $cmit]} {
+ if {$cmit eq $nullid} {
set commit_abbr work
set commit_type curr_commit
} elseif {$cmit eq $commit} {
@@ -807,9 +810,7 @@ method _read_blame {fd cur_w cur_d} {
# thorough copy search; insert before the threshold
set original_options [linsert $original_options 0 -C]
}
- if {[git-version >= 1.5.3]} {
- lappend original_options -w ; # ignore indentation changes
- }
+ lappend original_options -w ; # ignore indentation changes
_exec_blame $this $w_amov @amov_data \
$original_options \
@@ -857,9 +858,7 @@ method _fullcopyblame {} {
set threshold [get_config gui.copyblamethreshold]
set original_options [list -C -C "-C$threshold"]
- if {[git-version >= 1.5.3]} {
- lappend original_options -w ; # ignore indentation changes
- }
+ lappend original_options -w ; # ignore indentation changes
# Find the line range
set pos @$::cursorX,$::cursorY
@@ -986,7 +985,7 @@ method _showcommit {cur_w lno} {
if {[catch {set msg $header($cmit,message)}]} {
set msg {}
catch {
- set fd [git_read cat-file commit $cmit]
+ set fd [git_read [list cat-file commit $cmit]]
fconfigure $fd -encoding binary -translation lf
# By default commits are assumed to be in utf-8
set enc utf-8
@@ -1134,7 +1133,7 @@ method _blameparent {} {
} else {
set diffcmd [list diff-tree --unified=0 $cparent $cmit -- $new_path]
}
- if {[catch {set fd [eval git_read $diffcmd]} err]} {
+ if {[catch {set fd [git_read $diffcmd]} err]} {
$status_operation stop [mc "Unable to display parent"]
error_popup [strcat [mc "Error loading diff:"] "\n\n$err"]
return
@@ -1298,7 +1297,7 @@ method _open_tooltip {cur_w} {
# On MacOS raising a window causes it to acquire focus.
# Tk 8.5 on MacOS seems to properly support wm transient,
# so we can safely counter the effect there.
- if {$::have_tk85 && [is_MacOSX]} {
+ if {[is_MacOSX]} {
update
if {$w eq {}} {
raise .
diff --git a/git-gui/lib/branch.tcl b/git-gui/lib/branch.tcl
index 8b0c485889..39e0f2dc98 100644
--- a/git-gui/lib/branch.tcl
+++ b/git-gui/lib/branch.tcl
@@ -7,7 +7,7 @@ proc load_all_heads {} {
set rh refs/heads
set rh_len [expr {[string length $rh] + 1}]
set all_heads [list]
- set fd [git_read for-each-ref --format=%(refname) $rh]
+ set fd [git_read [list for-each-ref --format=%(refname) $rh]]
fconfigure $fd -translation binary -encoding utf-8
while {[gets $fd line] > 0} {
if {!$some_heads_tracking || ![is_tracking_branch $line]} {
@@ -21,10 +21,10 @@ proc load_all_heads {} {
proc load_all_tags {} {
set all_tags [list]
- set fd [git_read for-each-ref \
+ set fd [git_read [list for-each-ref \
--sort=-taggerdate \
--format=%(refname) \
- refs/tags]
+ refs/tags]]
fconfigure $fd -translation binary -encoding utf-8
while {[gets $fd line] > 0} {
if {![regsub ^refs/tags/ $line {} name]} continue
diff --git a/git-gui/lib/branch_checkout.tcl b/git-gui/lib/branch_checkout.tcl
index d06037decc..1e6b757b35 100644
--- a/git-gui/lib/branch_checkout.tcl
+++ b/git-gui/lib/branch_checkout.tcl
@@ -10,7 +10,6 @@ field opt_fetch 1; # refetch tracking branch if used?
field opt_detach 0; # force a detached head case?
constructor dialog {} {
- global use_ttk NS
make_dialog top w
wm withdraw $w
wm title $top [mc "%s (%s): Checkout Branch" [appname] [reponame]]
@@ -18,16 +17,16 @@ constructor dialog {} {
wm geometry $top "+[winfo rootx .]+[winfo rooty .]"
}
- ${NS}::label $w.header -text [mc "Checkout Branch"] \
+ ttk::label $w.header -text [mc "Checkout Branch"] \
-font font_uibold -anchor center
pack $w.header -side top -fill x
- ${NS}::frame $w.buttons
- ${NS}::button $w.buttons.create -text [mc Checkout] \
+ ttk::frame $w.buttons
+ ttk::button $w.buttons.create -text [mc Checkout] \
-default active \
-command [cb _checkout]
pack $w.buttons.create -side right
- ${NS}::button $w.buttons.cancel -text [mc Cancel] \
+ ttk::button $w.buttons.cancel -text [mc Cancel] \
-command [list destroy $w]
pack $w.buttons.cancel -side right -padx 5
pack $w.buttons -side bottom -fill x -pady 10 -padx 10
@@ -36,14 +35,14 @@ constructor dialog {} {
$w_rev bind_listbox <Double-Button-1> [cb _checkout]
pack $w.rev -anchor nw -fill both -expand 1 -pady 5 -padx 5
- ${NS}::labelframe $w.options -text [mc Options]
+ ttk::labelframe $w.options -text [mc Options]
- ${NS}::checkbutton $w.options.fetch \
+ ttk::checkbutton $w.options.fetch \
-text [mc "Fetch Tracking Branch"] \
-variable @opt_fetch
pack $w.options.fetch -anchor nw
- ${NS}::checkbutton $w.options.detach \
+ ttk::checkbutton $w.options.detach \
-text [mc "Detach From Local Branch"] \
-variable @opt_detach
pack $w.options.detach -anchor nw
diff --git a/git-gui/lib/branch_create.tcl b/git-gui/lib/branch_create.tcl
index ba367d551d..9fded28b5c 100644
--- a/git-gui/lib/branch_create.tcl
+++ b/git-gui/lib/branch_create.tcl
@@ -16,7 +16,7 @@ field opt_fetch 1; # refetch tracking branch if used?
field reset_ok 0; # did the user agree to reset?
constructor dialog {} {
- global repo_config use_ttk NS
+ global repo_config
make_dialog top w
wm withdraw $w
@@ -25,39 +25,37 @@ constructor dialog {} {
wm geometry $top "+[winfo rootx .]+[winfo rooty .]"
}
- ${NS}::label $w.header -text [mc "Create New Branch"] \
+ ttk::label $w.header -text [mc "Create New Branch"] \
-font font_uibold -anchor center
pack $w.header -side top -fill x
- ${NS}::frame $w.buttons
- ${NS}::button $w.buttons.create -text [mc Create] \
+ ttk::frame $w.buttons
+ ttk::button $w.buttons.create -text [mc Create] \
-default active \
-command [cb _create]
pack $w.buttons.create -side right
- ${NS}::button $w.buttons.cancel -text [mc Cancel] \
+ ttk::button $w.buttons.cancel -text [mc Cancel] \
-command [list destroy $w]
pack $w.buttons.cancel -side right -padx 5
pack $w.buttons -side bottom -fill x -pady 10 -padx 10
- ${NS}::labelframe $w.desc -text [mc "Branch Name"]
- ${NS}::radiobutton $w.desc.name_r \
+ ttk::labelframe $w.desc -text [mc "Branch Name"]
+ ttk::radiobutton $w.desc.name_r \
-text [mc "Name:"] \
-value user \
-variable @name_type
- if {!$use_ttk} {$w.desc.name_r configure -anchor w}
set w_name $w.desc.name_t
- ${NS}::entry $w_name \
+ ttk::entry $w_name \
-width 40 \
-textvariable @name \
-validate key \
-validatecommand [cb _validate %d %S]
grid $w.desc.name_r $w_name -sticky we -padx {0 5}
- ${NS}::radiobutton $w.desc.match_r \
+ ttk::radiobutton $w.desc.match_r \
-text [mc "Match Tracking Branch Name"] \
-value match \
-variable @name_type
- if {!$use_ttk} {$w.desc.match_r configure -anchor w}
grid $w.desc.match_r -sticky we -padx {0 5} -columnspan 2
grid columnconfigure $w.desc 1 -weight 1
@@ -66,34 +64,34 @@ constructor dialog {} {
set w_rev [::choose_rev::new $w.rev [mc "Starting Revision"]]
pack $w.rev -anchor nw -fill both -expand 1 -pady 5 -padx 5
- ${NS}::labelframe $w.options -text [mc Options]
+ ttk::labelframe $w.options -text [mc Options]
- ${NS}::frame $w.options.merge
- ${NS}::label $w.options.merge.l -text [mc "Update Existing Branch:"]
+ ttk::frame $w.options.merge
+ ttk::label $w.options.merge.l -text [mc "Update Existing Branch:"]
pack $w.options.merge.l -side left
- ${NS}::radiobutton $w.options.merge.no \
+ ttk::radiobutton $w.options.merge.no \
-text [mc No] \
-value none \
-variable @opt_merge
pack $w.options.merge.no -side left
- ${NS}::radiobutton $w.options.merge.ff \
+ ttk::radiobutton $w.options.merge.ff \
-text [mc "Fast Forward Only"] \
-value ff \
-variable @opt_merge
pack $w.options.merge.ff -side left
- ${NS}::radiobutton $w.options.merge.reset \
+ ttk::radiobutton $w.options.merge.reset \
-text [mc Reset] \
-value reset \
-variable @opt_merge
pack $w.options.merge.reset -side left
pack $w.options.merge -anchor nw
- ${NS}::checkbutton $w.options.fetch \
+ ttk::checkbutton $w.options.fetch \
-text [mc "Fetch Tracking Branch"] \
-variable @opt_fetch
pack $w.options.fetch -anchor nw
- ${NS}::checkbutton $w.options.checkout \
+ ttk::checkbutton $w.options.checkout \
-text [mc "Checkout After Creation"] \
-variable @opt_checkout
pack $w.options.checkout -anchor nw
diff --git a/git-gui/lib/branch_delete.tcl b/git-gui/lib/branch_delete.tcl
index a5051637bb..deac74a644 100644
--- a/git-gui/lib/branch_delete.tcl
+++ b/git-gui/lib/branch_delete.tcl
@@ -9,7 +9,7 @@ field w_check ; # revision picker for merge test
field w_delete ; # delete button
constructor dialog {} {
- global current_branch use_ttk NS
+ global current_branch
make_dialog top w
wm withdraw $w
@@ -18,25 +18,25 @@ constructor dialog {} {
wm geometry $top "+[winfo rootx .]+[winfo rooty .]"
}
- ${NS}::label $w.header -text [mc "Delete Local Branch"] \
+ ttk::label $w.header -text [mc "Delete Local Branch"] \
-font font_uibold -anchor center
pack $w.header -side top -fill x
- ${NS}::frame $w.buttons
+ ttk::frame $w.buttons
set w_delete $w.buttons.delete
- ${NS}::button $w_delete \
+ ttk::button $w_delete \
-text [mc Delete] \
-default active \
-state disabled \
-command [cb _delete]
pack $w_delete -side right
- ${NS}::button $w.buttons.cancel \
+ ttk::button $w.buttons.cancel \
-text [mc Cancel] \
-command [list destroy $w]
pack $w.buttons.cancel -side right -padx 5
pack $w.buttons -side bottom -fill x -pady 10 -padx 10
- ${NS}::labelframe $w.list -text [mc "Local Branches"]
+ ttk::labelframe $w.list -text [mc "Local Branches"]
set w_heads $w.list.l
slistbox $w_heads \
-height 10 \
diff --git a/git-gui/lib/branch_rename.tcl b/git-gui/lib/branch_rename.tcl
index 3a2d79a9cc..7a3b39d6a3 100644
--- a/git-gui/lib/branch_rename.tcl
+++ b/git-gui/lib/branch_rename.tcl
@@ -8,7 +8,7 @@ field oldname
field newname
constructor dialog {} {
- global current_branch use_ttk NS
+ global current_branch
make_dialog top w
wm withdraw $w
@@ -20,31 +20,27 @@ constructor dialog {} {
set oldname $current_branch
set newname [get_config gui.newbranchtemplate]
- ${NS}::label $w.header -text [mc "Rename Branch"]\
+ ttk::label $w.header -text [mc "Rename Branch"]\
-font font_uibold -anchor center
pack $w.header -side top -fill x
- ${NS}::frame $w.buttons
- ${NS}::button $w.buttons.rename -text [mc Rename] \
+ ttk::frame $w.buttons
+ ttk::button $w.buttons.rename -text [mc Rename] \
-default active \
-command [cb _rename]
pack $w.buttons.rename -side right
- ${NS}::button $w.buttons.cancel -text [mc Cancel] \
+ ttk::button $w.buttons.cancel -text [mc Cancel] \
-command [list destroy $w]
pack $w.buttons.cancel -side right -padx 5
pack $w.buttons -side bottom -fill x -pady 10 -padx 10
- ${NS}::frame $w.rename
- ${NS}::label $w.rename.oldname_l -text [mc "Branch:"]
- if {$use_ttk} {
- ttk::combobox $w.rename.oldname_m -textvariable @oldname \
- -values [load_all_heads] -state readonly
- } else {
- eval tk_optionMenu $w.rename.oldname_m @oldname [load_all_heads]
- }
+ ttk::frame $w.rename
+ ttk::label $w.rename.oldname_l -text [mc "Branch:"]
+ ttk::combobox $w.rename.oldname_m -textvariable @oldname \
+ -values [load_all_heads] -state readonly
- ${NS}::label $w.rename.newname_l -text [mc "New Name:"]
- ${NS}::entry $w.rename.newname_t \
+ ttk::label $w.rename.newname_l -text [mc "New Name:"]
+ ttk::entry $w.rename.newname_t \
-width 40 \
-textvariable @newname \
-validate key \
diff --git a/git-gui/lib/browser.tcl b/git-gui/lib/browser.tcl
index a982983667..f53eb952cf 100644
--- a/git-gui/lib/browser.tcl
+++ b/git-gui/lib/browser.tcl
@@ -21,7 +21,7 @@ field browser_busy 1
field ls_buf {}; # Buffered record output from ls-tree
constructor new {commit {path {}}} {
- global cursor_ptr M1B use_ttk NS
+ global cursor_ptr M1B
make_dialog top w
wm withdraw $top
wm title $top [mc "%s (%s): File Browser" [appname] [reponame]]
@@ -35,15 +35,14 @@ constructor new {commit {path {}}} {
set browser_commit $commit
set browser_path "$browser_commit:[escape_path $path]"
- ${NS}::label $w.path \
+ ttk::label $w.path \
-textvariable @browser_path \
-anchor w \
-justify left \
-font font_uibold
- if {!$use_ttk} { $w.path configure -borderwidth 1 -relief sunken}
pack $w.path -anchor w -side top -fill x
- ${NS}::frame $w.list
+ ttk::frame $w.list
set w_list $w.list.l
text $w_list -background white -foreground black \
-borderwidth 0 \
@@ -55,18 +54,17 @@ constructor new {commit {path {}}} {
-xscrollcommand [list $w.list.sbx set] \
-yscrollcommand [list $w.list.sby set]
rmsel_tag $w_list
- ${NS}::scrollbar $w.list.sbx -orient h -command [list $w_list xview]
- ${NS}::scrollbar $w.list.sby -orient v -command [list $w_list yview]
+ ttk::scrollbar $w.list.sbx -orient h -command [list $w_list xview]
+ ttk::scrollbar $w.list.sby -orient v -command [list $w_list yview]
pack $w.list.sbx -side bottom -fill x
pack $w.list.sby -side right -fill y
pack $w_list -side left -fill both -expand 1
pack $w.list -side top -fill both -expand 1
- ${NS}::label $w.status \
+ ttk::label $w.status \
-textvariable @browser_status \
-anchor w \
-justify left
- if {!$use_ttk} { $w.status configure -borderwidth 1 -relief sunken}
pack $w.status -anchor w -side bottom -fill x
bind $w_list <Button-1> "[cb _click 0 @%x,%y];break"
@@ -196,7 +194,7 @@ method _ls {tree_id {name {}}} {
lappend browser_stack [list $tree_id $name]
$w conf -state disabled
- set fd [git_read ls-tree -z $tree_id]
+ set fd [git_read [list ls-tree -z $tree_id]]
fconfigure $fd -blocking 0 -translation binary -encoding utf-8
fileevent $fd readable [cb _read $fd]
}
@@ -269,7 +267,6 @@ field w ; # widget path
field w_rev ; # mega-widget to pick the initial revision
constructor dialog {} {
- global use_ttk NS
make_dialog top w
wm withdraw $top
wm title $top [mc "%s (%s): Browse Branch Files" [appname] [reponame]]
@@ -278,18 +275,18 @@ constructor dialog {} {
wm transient $top .
}
- ${NS}::label $w.header \
+ ttk::label $w.header \
-text [mc "Browse Branch Files"] \
-font font_uibold \
-anchor center
pack $w.header -side top -fill x
- ${NS}::frame $w.buttons
- ${NS}::button $w.buttons.browse -text [mc Browse] \
+ ttk::frame $w.buttons
+ ttk::button $w.buttons.browse -text [mc Browse] \
-default active \
-command [cb _open]
pack $w.buttons.browse -side right
- ${NS}::button $w.buttons.cancel -text [mc Cancel] \
+ ttk::button $w.buttons.cancel -text [mc Cancel] \
-command [list destroy $w]
pack $w.buttons.cancel -side right -padx 5
pack $w.buttons -side bottom -fill x -pady 10 -padx 10
diff --git a/git-gui/lib/checkout_op.tcl b/git-gui/lib/checkout_op.tcl
index 21ea768d80..987486a4b6 100644
--- a/git-gui/lib/checkout_op.tcl
+++ b/git-gui/lib/checkout_op.tcl
@@ -151,7 +151,7 @@ method _finish_fetch {ok} {
}
method _update_ref {} {
- global null_sha1 current_branch repo_config
+ global nullid current_branch repo_config
set ref $new_ref
set new $new_hash
@@ -177,7 +177,7 @@ method _update_ref {} {
}
set reflog_msg "branch: Created from $new_expr"
- set cur $null_sha1
+ set cur $nullid
if {($repo_config(branch.autosetupmerge) eq {true}
|| $repo_config(branch.autosetupmerge) eq {always})
@@ -304,12 +304,12 @@ The rescan will be automatically started now.
_readtree $this
} else {
ui_status [mc "Refreshing file status..."]
- set fd [git_read update-index \
+ set fd [git_read [list update-index \
-q \
--unmerged \
--ignore-missing \
--refresh \
- ]
+ ]]
fconfigure $fd -blocking 0 -translation binary
fileevent $fd readable [cb _refresh_wait $fd]
}
@@ -345,14 +345,15 @@ method _readtree {} {
[mc "Updating working directory to '%s'..." [_name $this]] \
[mc "files checked out"]]
- set fd [git_read --stderr read-tree \
+ set fd [git_read [list read-tree \
-m \
-u \
-v \
--exclude-per-directory=.gitignore \
$HEAD \
$new_hash \
- ]
+ ] \
+ [list 2>@1]]
fconfigure $fd -blocking 0 -translation binary
fileevent $fd readable [cb _readtree_wait $fd $status_bar_operation]
}
@@ -510,18 +511,8 @@ method _update_repo_state {} {
delete_this
}
-git-version proc _detach_HEAD {log new} {
- >= 1.5.3 {
- git update-ref --no-deref -m $log HEAD $new
- }
- default {
- set p [gitdir HEAD]
- file delete $p
- set fd [open $p w]
- fconfigure $fd -translation lf -encoding utf-8
- puts $fd $new
- close $fd
- }
+proc _detach_HEAD {log new} {
+ git update-ref --no-deref -m $log HEAD $new
}
method _confirm_reset {cur} {
@@ -582,7 +573,7 @@ method _confirm_reset {cur} {
pack $w.buttons.cancel -side right -padx 5
pack $w.buttons -side bottom -fill x -pady 10 -padx 10
- set fd [git_read rev-list --pretty=oneline $cur ^$new_hash]
+ set fd [git_read [list rev-list --pretty=oneline $cur ^$new_hash]]
while {[gets $fd line] > 0} {
set abbr [string range $line 0 7]
set subj [string range $line 41 end]
diff --git a/git-gui/lib/choose_font.tcl b/git-gui/lib/choose_font.tcl
index ebe50bd7d0..a90908a8ec 100644
--- a/git-gui/lib/choose_font.tcl
+++ b/git-gui/lib/choose_font.tcl
@@ -17,7 +17,6 @@ variable all_families [list] ; # All fonts known to Tk
constructor pick {path title a_family a_size} {
variable all_families
- global use_ttk NS
set v_family $a_family
set v_size $a_size
@@ -33,25 +32,25 @@ constructor pick {path title a_family a_size} {
wm title $top "[appname] ([reponame]): $title"
wm geometry $top "+[winfo rootx $path]+[winfo rooty $path]"
- ${NS}::label $w.header -text $title -font font_uibold -anchor center
+ ttk::label $w.header -text $title -font font_uibold -anchor center
pack $w.header -side top -fill x
- ${NS}::frame $w.buttons
- ${NS}::button $w.buttons.select \
+ ttk::frame $w.buttons
+ ttk::button $w.buttons.select \
-text [mc Select] \
-default active \
-command [cb _select]
- ${NS}::button $w.buttons.cancel \
+ ttk::button $w.buttons.cancel \
-text [mc Cancel] \
-command [list destroy $w]
pack $w.buttons.select -side right
pack $w.buttons.cancel -side right -padx 5
pack $w.buttons -side bottom -fill x -pady 10 -padx 10
- ${NS}::frame $w.inner
+ ttk::frame $w.inner
- ${NS}::frame $w.inner.family
- ${NS}::label $w.inner.family.l \
+ ttk::frame $w.inner.family
+ ttk::label $w.inner.family.l \
-text [mc "Font Family"] \
-anchor w
set w_family $w.inner.family.v
@@ -66,13 +65,13 @@ constructor pick {path title a_family a_size} {
-height 10 \
-yscrollcommand [list $w.inner.family.sby set]
rmsel_tag $w_family
- ${NS}::scrollbar $w.inner.family.sby -command [list $w_family yview]
+ ttk::scrollbar $w.inner.family.sby -command [list $w_family yview]
pack $w.inner.family.l -side top -fill x
pack $w.inner.family.sby -side right -fill y
pack $w_family -fill both -expand 1
- ${NS}::frame $w.inner.size
- ${NS}::label $w.inner.size.l \
+ ttk::frame $w.inner.size
+ ttk::label $w.inner.size.l \
-text [mc "Font Size"] \
-anchor w
tspinbox $w.inner.size.v \
@@ -88,8 +87,8 @@ constructor pick {path title a_family a_size} {
grid columnconfigure $w.inner 0 -weight 1
pack $w.inner -fill both -expand 1 -padx 5 -pady 5
- ${NS}::frame $w.example
- ${NS}::label $w.example.l \
+ ttk::frame $w.example
+ ttk::label $w.example.l \
-text [mc "Font Example"] \
-anchor w
set w_example $w.example.t
diff --git a/git-gui/lib/choose_repository.tcl b/git-gui/lib/choose_repository.tcl
index d23abedcb3..7e1462a20c 100644
--- a/git-gui/lib/choose_repository.tcl
+++ b/git-gui/lib/choose_repository.tcl
@@ -10,22 +10,12 @@ field w_next ; # Next button
field w_quit ; # Quit button
field o_cons ; # Console object (if active)
-# Status mega-widget instance during _do_clone2 (used by _copy_files and
-# _link_files). Widget is destroyed before _do_clone2 calls
-# _do_clone_checkout
-field o_status
-
-# Operation displayed by status mega-widget during _do_clone_checkout =>
-# _readtree_wait => _postcheckout_wait => _do_clone_submodules =>
-# _do_validate_submodule_cloning. The status mega-widget is a different
-# instance than that stored in $o_status in earlier operations.
-field o_status_op
-
field w_types ; # List of type buttons in clone
field w_recentlist ; # Listbox containing recent repositories
field w_localpath ; # Entry widget bound to local_path
field done 0 ; # Finished picking the repository?
+field clone_ok false ; # clone succeeeded
field local_path {} ; # Where this repository is locally
field origin_url {} ; # Where we are cloning from
field origin_name origin ; # What we shall call 'origin'
@@ -35,7 +25,7 @@ field readtree_err ; # Error output from read-tree (if any)
field sorted_recent ; # recent repositories (sorted)
constructor pick {} {
- global M1T M1B use_ttk NS
+ global M1T M1B
if {[set maxrecent [get_config gui.maxrecentrepo]] eq {}} {
set maxrecent 10
@@ -88,7 +78,7 @@ constructor pick {} {
set w_body $w.body
set opts $w_body.options
- ${NS}::frame $w_body
+ ttk::frame $w_body
text $opts \
-cursor $::cursor_ptr \
-relief flat \
@@ -158,8 +148,8 @@ constructor pick {} {
set lenrecent $maxrecent
}
- ${NS}::label $w_body.space
- ${NS}::label $w_body.recentlabel \
+ ttk::label $w_body.space
+ ttk::label $w_body.recentlabel \
-anchor w \
-text [mc "Open Recent Repository:"]
set w_recentlist $w_body.recentlist
@@ -199,10 +189,10 @@ constructor pick {} {
}
pack $w_body -fill x -padx 10 -pady 10
- ${NS}::frame $w.buttons
+ ttk::frame $w.buttons
set w_next $w.buttons.next
set w_quit $w.buttons.quit
- ${NS}::button $w_quit \
+ ttk::button $w_quit \
-text [mc "Quit"] \
-command exit
pack $w_quit -side right -padx 5
@@ -303,10 +293,9 @@ method _open_recent_path {p} {
}
method _next {action} {
- global NS
destroy $w_body
if {![winfo exists $w_next]} {
- ${NS}::button $w_next -default active
+ ttk::button $w_next -default active
set pos -before
if {[tk windowingsystem] eq "win32"} { set pos -after }
pack $w_next -side right -padx 5 $pos $w_quit
@@ -323,7 +312,7 @@ method _write_local_path {args} {
}
method _git_init {} {
- if {[catch {file mkdir $local_path} err]} {
+ if {[catch {git init $local_path} err]} {
error_popup [strcat \
[mc "Failed to create repository %s:" $local_path] \
"\n\n$err"]
@@ -337,13 +326,6 @@ method _git_init {} {
return 0
}
- if {[catch {git init} err]} {
- error_popup [strcat \
- [mc "Failed to create repository %s:" $local_path] \
- "\n\n$err"]
- return 0
- }
-
_append_recentrepos [pwd]
set ::_gitdir .git
set ::_prefix {}
@@ -360,44 +342,29 @@ proc _is_git {path {outdir_var ""}} {
return 1
}
-proc _objdir {path} {
- set objdir [file join $path .git objects]
- if {[file isdirectory $objdir]} {
- return $objdir
- }
-
- set objdir [file join $path objects]
- if {[file isdirectory $objdir]} {
- return $objdir
- }
-
- return {}
-}
-
######################################################################
##
## Create New Repository
method _do_new {} {
- global use_ttk NS
$w_next conf \
-state disabled \
-command [cb _do_new2] \
-text [mc "Create"]
- ${NS}::frame $w_body
- ${NS}::label $w_body.h \
+ ttk::frame $w_body
+ ttk::label $w_body.h \
-font font_uibold -anchor center \
-text [mc "Create New Repository"]
pack $w_body.h -side top -fill x -pady 10
pack $w_body -fill x -padx 10
- ${NS}::frame $w_body.where
- ${NS}::label $w_body.where.l -text [mc "Directory:"]
- ${NS}::entry $w_body.where.t \
+ ttk::frame $w_body.where
+ ttk::label $w_body.where.l -text [mc "Directory:"]
+ ttk::entry $w_body.where.t \
-textvariable @local_path \
-width 50
- ${NS}::button $w_body.where.b \
+ ttk::button $w_body.where.b \
-text [mc "Browse"] \
-command [cb _new_local_path]
set w_localpath $w_body.where.t
@@ -463,56 +430,55 @@ proc _new_ok {p} {
## Clone Existing Repository
method _do_clone {} {
- global use_ttk NS
$w_next conf \
-state disabled \
-command [cb _do_clone2] \
-text [mc "Clone"]
- ${NS}::frame $w_body
- ${NS}::label $w_body.h \
+ ttk::frame $w_body
+ ttk::label $w_body.h \
-font font_uibold -anchor center \
-text [mc "Clone Existing Repository"]
pack $w_body.h -side top -fill x -pady 10
pack $w_body -fill x -padx 10
set args $w_body.args
- ${NS}::frame $w_body.args
+ ttk::frame $w_body.args
pack $args -fill both
- ${NS}::label $args.origin_l -text [mc "Source Location:"]
- ${NS}::entry $args.origin_t \
+ ttk::label $args.origin_l -text [mc "Source Location:"]
+ ttk::entry $args.origin_t \
-textvariable @origin_url \
-width 50
- ${NS}::button $args.origin_b \
+ ttk::button $args.origin_b \
-text [mc "Browse"] \
-command [cb _open_origin]
grid $args.origin_l $args.origin_t $args.origin_b -sticky ew
- ${NS}::label $args.where_l -text [mc "Target Directory:"]
- ${NS}::entry $args.where_t \
+ ttk::label $args.where_l -text [mc "Target Directory:"]
+ ttk::entry $args.where_t \
-textvariable @local_path \
-width 50
- ${NS}::button $args.where_b \
+ ttk::button $args.where_b \
-text [mc "Browse"] \
-command [cb _new_local_path]
grid $args.where_l $args.where_t $args.where_b -sticky ew
set w_localpath $args.where_t
- ${NS}::label $args.type_l -text [mc "Clone Type:"]
- ${NS}::frame $args.type_f
+ ttk::label $args.type_l -text [mc "Clone Type:"]
+ ttk::frame $args.type_f
set w_types [list]
- lappend w_types [${NS}::radiobutton $args.type_f.hardlink \
+ lappend w_types [ttk::radiobutton $args.type_f.hardlink \
-state disabled \
-text [mc "Standard (Fast, Semi-Redundant, Hardlinks)"] \
-variable @clone_type \
-value hardlink]
- lappend w_types [${NS}::radiobutton $args.type_f.full \
+ lappend w_types [ttk::radiobutton $args.type_f.full \
-state disabled \
-text [mc "Full Copy (Slower, Redundant Backup)"] \
-variable @clone_type \
-value full]
- lappend w_types [${NS}::radiobutton $args.type_f.shared \
+ lappend w_types [ttk::radiobutton $args.type_f.shared \
-state disabled \
-text [mc "Shared (Fastest, Not Recommended, No Backup)"] \
-variable @clone_type \
@@ -520,7 +486,7 @@ method _do_clone {} {
foreach r $w_types {
pack $r -anchor w
}
- ${NS}::checkbutton $args.type_f.recursive \
+ ttk::checkbutton $args.type_f.recursive \
-text [mc "Recursively clone submodules too"] \
-variable @recursive \
-onvalue true -offvalue false
@@ -588,6 +554,25 @@ method _update_clone {args} {
method _do_clone2 {} {
if {[file isdirectory $origin_url]} {
set origin_url [file normalize $origin_url]
+ if {$clone_type eq {hardlink}} {
+ # cannot use hardlinks if this is a linked worktree (.gitfile or git-new-workdir)
+ if {[git -C $origin_url rev-parse --is-inside-work-tree] == {true}} {
+ set islink 0
+ set dotgit [file join $origin_url .git]
+ if {[file isfile $dotgit]} {
+ set islink 1
+ } else {
+ set objdir [file join $dotgit objects]
+ if {[file exists $objdir] && [file type $objdir] == {link}} {
+ set islink 1
+ }
+ }
+ if {$islink} {
+ info_popup [mc "Hardlinks are unavailable. Falling back to copying."]
+ set clone_type full
+ }
+ }
+ }
}
if {$clone_type eq {hardlink} && ![file isdirectory $origin_url]} {
@@ -599,14 +584,6 @@ method _do_clone2 {} {
return
}
- if {$clone_type eq {hardlink} || $clone_type eq {shared}} {
- set objdir [_objdir $origin_url]
- if {$objdir eq {}} {
- error_popup [mc "Not a Git repository: %s" [file tail $origin_url]]
- return
- }
- }
-
set giturl $origin_url
if {[file exists $local_path]} {
@@ -614,458 +591,86 @@ method _do_clone2 {} {
return
}
- if {![_git_init $this]} return
- set local_path [pwd]
-
- if {[catch {
- git config remote.$origin_name.url $giturl
- git config remote.$origin_name.fetch +refs/heads/*:refs/remotes/$origin_name/*
- } err]} {
- error_popup [strcat [mc "Failed to configure origin"] "\n\n$err"]
- return
+ set clone_options {--progress}
+ if {$recursive} {
+ append clone_options { --recurse-submodules}
}
destroy $w_body $w_next
switch -exact -- $clone_type {
- hardlink {
- set o_status [status_bar::two_line $w_body]
- pack $w_body -fill x -padx 10 -pady 10
-
- set status_op [$o_status start \
- [mc "Counting objects"] \
- [mc "buckets"]]
- update
-
- if {[file exists [file join $objdir info alternates]]} {
- set pwd [pwd]
- if {[catch {
- file mkdir [gitdir objects info]
- set f_in [open [file join $objdir info alternates] r]
- set f_cp [open [gitdir objects info alternates] w]
- fconfigure $f_in -translation binary -encoding binary
- fconfigure $f_cp -translation binary -encoding binary
- cd $objdir
- while {[gets $f_in line] >= 0} {
- puts $f_cp [file normalize $line]
- }
- close $f_in
- close $f_cp
- cd $pwd
- } err]} {
- catch {cd $pwd}
- _clone_failed $this [mc "Unable to copy objects/info/alternates: %s" $err]
- $status_op stop
- return
- }
+ full {
+ append clone_options { --no-hardlinks --no-local}
}
-
- set tolink [list]
- set buckets [glob \
- -tails \
- -nocomplain \
- -directory [file join $objdir] ??]
- set bcnt [expr {[llength $buckets] + 2}]
- set bcur 1
- $status_op update $bcur $bcnt
- update
-
- file mkdir [file join .git objects pack]
- foreach i [glob -tails -nocomplain \
- -directory [file join $objdir pack] *] {
- lappend tolink [file join pack $i]
- }
- $status_op update [incr bcur] $bcnt
- update
-
- foreach i $buckets {
- file mkdir [file join .git objects $i]
- foreach j [glob -tails -nocomplain \
- -directory [file join $objdir $i] *] {
- lappend tolink [file join $i $j]
- }
- $status_op update [incr bcur] $bcnt
- update
- }
- $status_op stop
-
- if {$tolink eq {}} {
- info_popup [strcat \
- [mc "Nothing to clone from %s." $origin_url] \
- "\n" \
- [mc "The 'master' branch has not been initialized."] \
- ]
- destroy $w_body
- set done 1
- return
- }
-
- set i [lindex $tolink 0]
- if {[catch {
- file link -hard \
- [file join .git objects $i] \
- [file join $objdir $i]
- } err]} {
- info_popup [mc "Hardlinks are unavailable. Falling back to copying."]
- set i [_copy_files $this $objdir $tolink]
- } else {
- set i [_link_files $this $objdir [lrange $tolink 1 end]]
+ shared {
+ append clone_options { --shared}
}
- if {!$i} return
-
- destroy $w_body
-
- set o_status {}
}
- full {
+
+ if {[catch {
set o_cons [console::embed \
$w_body \
[mc "Cloning from %s" $origin_url]]
pack $w_body -fill both -expand 1 -padx 10
$o_cons exec \
- [list git fetch --no-tags -k $origin_name] \
- [cb _do_clone_tags]
- }
- shared {
- set fd [open [gitdir objects info alternates] w]
- fconfigure $fd -translation binary
- puts $fd $objdir
- close $fd
- }
- }
-
- if {$clone_type eq {hardlink} || $clone_type eq {shared}} {
- if {![_clone_refs $this]} return
- set pwd [pwd]
- if {[catch {
- cd $origin_url
- set HEAD [git rev-parse --verify HEAD^0]
- } err]} {
- _clone_failed $this [mc "Not a Git repository: %s" [file tail $origin_url]]
- return 0
- }
- cd $pwd
- _do_clone_checkout $this $HEAD
- }
-}
-
-method _copy_files {objdir tocopy} {
- set status_op [$o_status start \
- [mc "Copying objects"] \
- [mc "KiB"]]
- set tot 0
- set cmp 0
- foreach p $tocopy {
- incr tot [file size [file join $objdir $p]]
- }
- foreach p $tocopy {
- if {[catch {
- set f_in [open [file join $objdir $p] r]
- set f_cp [open [file join .git objects $p] w]
- fconfigure $f_in -translation binary -encoding binary
- fconfigure $f_cp -translation binary -encoding binary
-
- while {![eof $f_in]} {
- incr cmp [fcopy $f_in $f_cp -size 16384]
- $status_op update \
- [expr {$cmp / 1024}] \
- [expr {$tot / 1024}]
- update
- }
-
- close $f_in
- close $f_cp
- } err]} {
- _clone_failed $this [mc "Unable to copy object: %s" $err]
- $status_op stop
- return 0
- }
- }
- $status_op stop
- return 1
-}
-
-method _link_files {objdir tolink} {
- set total [llength $tolink]
- set status_op [$o_status start \
- [mc "Linking objects"] \
- [mc "objects"]]
- for {set i 0} {$i < $total} {} {
- set p [lindex $tolink $i]
- if {[catch {
- file link -hard \
- [file join .git objects $p] \
- [file join $objdir $p]
- } err]} {
- _clone_failed $this [mc "Unable to hardlink object: %s" $err]
- $status_op stop
- return 0
- }
-
- incr i
- if {$i % 5 == 0} {
- $status_op update $i $total
- update
- }
- }
- $status_op stop
- return 1
-}
-
-method _clone_refs {} {
- set pwd [pwd]
- if {[catch {cd $origin_url} err]} {
- error_popup [mc "Not a Git repository: %s" [file tail $origin_url]]
- return 0
- }
- set fd_in [git_read for-each-ref \
- --tcl \
- {--format=list %(refname) %(objectname) %(*objectname)}]
- cd $pwd
-
- set fd [open [gitdir packed-refs] w]
- fconfigure $fd -translation binary
- puts $fd "# pack-refs with: peeled"
- while {[gets $fd_in line] >= 0} {
- set line [eval $line]
- set refn [lindex $line 0]
- set robj [lindex $line 1]
- set tobj [lindex $line 2]
-
- if {[regsub ^refs/heads/ $refn \
- "refs/remotes/$origin_name/" refn]} {
- puts $fd "$robj $refn"
- } elseif {[string match refs/tags/* $refn]} {
- puts $fd "$robj $refn"
- if {$tobj ne {}} {
- puts $fd "^$tobj"
- }
- }
- }
- close $fd_in
- close $fd
- return 1
-}
-
-method _do_clone_tags {ok} {
- if {$ok} {
- $o_cons exec \
- [list git fetch --tags -k $origin_name] \
- [cb _do_clone_HEAD]
- } else {
- $o_cons done $ok
- _clone_failed $this [mc "Cannot fetch branches and objects. See console output for details."]
+ [list git clone {*}$clone_options $origin_url $local_path] \
+ [cb _do_clone2_done]
+ } err]} {
+ error_popup [strcat [mc "Clone failed."] "\n" $err]
+ return
}
-}
-method _do_clone_HEAD {ok} {
- if {$ok} {
- $o_cons exec \
- [list git fetch $origin_name HEAD] \
- [cb _do_clone_full_end]
- } else {
- $o_cons done $ok
- _clone_failed $this [mc "Cannot fetch tags. See console output for details."]
+ tkwait variable @done
+ if {!$clone_ok} {
+ error_popup [mc "Clone failed."]
+ return
}
}
-method _do_clone_full_end {ok} {
+method _do_clone2_done {ok} {
$o_cons done $ok
-
if {$ok} {
- destroy $w_body
-
- set HEAD {}
- if {[file exists [gitdir FETCH_HEAD]]} {
- set fd [open [gitdir FETCH_HEAD] r]
- while {[gets $fd line] >= 0} {
- if {[regexp "^(.{40})\t\t" $line line HEAD]} {
- break
- }
- }
- close $fd
- }
-
- catch {git pack-refs}
- _do_clone_checkout $this $HEAD
- } else {
- _clone_failed $this [mc "Cannot determine HEAD. See console output for details."]
- }
-}
-
-method _clone_failed {{why {}}} {
- if {[catch {file delete -force $local_path} err]} {
- set why [strcat \
- $why \
- "\n\n" \
- [mc "Unable to cleanup %s" $local_path] \
- "\n\n" \
- $err]
- }
- if {$why ne {}} {
- update
- error_popup [strcat [mc "Clone failed."] "\n" $why]
- }
-}
-
-method _do_clone_checkout {HEAD} {
- if {$HEAD eq {}} {
- info_popup [strcat \
- [mc "No default branch obtained."] \
- "\n" \
- [mc "The 'master' branch has not been initialized."] \
- ]
- set done 1
- return
- }
- if {[catch {
- git update-ref HEAD $HEAD^0
+ if {[catch {
+ cd $local_path
+ set ::_gitdir .git
+ set ::_prefix {}
+ _append_recentrepos [pwd]
} err]} {
- info_popup [strcat \
- [mc "Cannot resolve %s as a commit." $HEAD^0] \
- "\n $err" \
- "\n" \
- [mc "The 'master' branch has not been initialized."] \
- ]
- set done 1
- return
- }
-
- set status [status_bar::two_line $w_body]
- pack $w_body -fill x -padx 10 -pady 10
-
- # We start the status operation here.
- #
- # This function calls _readtree_wait as a callback.
- #
- # _readtree_wait in turn either calls _do_clone_submodules directly,
- # or calls _postcheckout_wait as a callback which then calls
- # _do_clone_submodules.
- #
- # _do_clone_submodules calls _do_validate_submodule_cloning.
- #
- # _do_validate_submodule_cloning stops the status operation.
- #
- # There are no other calls into this chain from other code.
-
- set o_status_op [$status start \
- [mc "Creating working directory"] \
- [mc "files"]]
-
- set readtree_err {}
- set fd [git_read --stderr read-tree \
- -m \
- -u \
- -v \
- HEAD \
- HEAD \
- ]
- fconfigure $fd -blocking 0 -translation binary
- fileevent $fd readable [cb _readtree_wait $fd]
-}
-
-method _readtree_wait {fd} {
- set buf [read $fd]
- $o_status_op update_meter $buf
- append readtree_err $buf
-
- fconfigure $fd -blocking 1
- if {![eof $fd]} {
- fconfigure $fd -blocking 0
- return
- }
-
- if {[catch {close $fd}]} {
- set err $readtree_err
- regsub {^fatal: } $err {} err
- error_popup [strcat \
- [mc "Initial file checkout failed."] \
- "\n\n$err"]
- return
- }
-
- # -- Run the post-checkout hook.
- #
- set fd_ph [githook_read post-checkout [string repeat 0 40] \
- [git rev-parse HEAD] 1]
- if {$fd_ph ne {}} {
- global pch_error
- set pch_error {}
- fconfigure $fd_ph -blocking 0 -translation binary -eofchar {}
- fileevent $fd_ph readable [cb _postcheckout_wait $fd_ph]
- } else {
- _do_clone_submodules $this
- }
-}
-
-method _postcheckout_wait {fd_ph} {
- global pch_error
-
- append pch_error [read $fd_ph]
- fconfigure $fd_ph -blocking 1
- if {[eof $fd_ph]} {
- if {[catch {close $fd_ph}]} {
- hook_failed_popup post-checkout $pch_error 0
+ set ok 0
}
- unset pch_error
- _do_clone_submodules $this
- return
}
- fconfigure $fd_ph -blocking 0
-}
-
-method _do_clone_submodules {} {
- if {$recursive eq {true}} {
- $o_status_op stop
- set o_status_op {}
-
- destroy $w_body
-
- set o_cons [console::embed \
- $w_body \
- [mc "Cloning submodules"]]
- pack $w_body -fill both -expand 1 -padx 10
- $o_cons exec \
- [list git submodule update --init --recursive] \
- [cb _do_validate_submodule_cloning]
- } else {
- set done 1
+ if {!$ok} {
+ set ::_gitdir {}
+ set ::_prefix {}
}
+ set clone_ok $ok
+ set done 1
}
-method _do_validate_submodule_cloning {ok} {
- if {$ok} {
- $o_cons done $ok
- set done 1
- } else {
- _clone_failed $this [mc "Cannot clone submodules."]
- }
-}
######################################################################
##
## Open Existing Repository
method _do_open {} {
- global NS
$w_next conf \
-state disabled \
-command [cb _do_open2] \
-text [mc "Open"]
- ${NS}::frame $w_body
- ${NS}::label $w_body.h \
+ ttk::frame $w_body
+ ttk::label $w_body.h \
-font font_uibold -anchor center \
-text [mc "Open Existing Repository"]
pack $w_body.h -side top -fill x -pady 10
pack $w_body -fill x -padx 10
- ${NS}::frame $w_body.where
- ${NS}::label $w_body.where.l -text [mc "Repository:"]
- ${NS}::entry $w_body.where.t \
+ ttk::frame $w_body.where
+ ttk::label $w_body.where.l -text [mc "Repository:"]
+ ttk::entry $w_body.where.t \
-textvariable @local_path \
-width 50
- ${NS}::button $w_body.where.b \
+ ttk::button $w_body.where.b \
-text [mc "Browse"] \
-command [cb _open_local_path]
diff --git a/git-gui/lib/choose_rev.tcl b/git-gui/lib/choose_rev.tcl
index 6dae7937d5..7a9e3c83bb 100644
--- a/git-gui/lib/choose_rev.tcl
+++ b/git-gui/lib/choose_rev.tcl
@@ -32,7 +32,7 @@ proc new_unmerged {path {title {}}} {
}
constructor _new {path unmerged_only title} {
- global current_branch is_detached use_ttk NS
+ global current_branch is_detached
if {![info exists ::all_remotes]} {
load_all_remotes
@@ -41,65 +41,60 @@ constructor _new {path unmerged_only title} {
set w $path
if {$title ne {}} {
- ${NS}::labelframe $w -text $title
+ ttk::labelframe $w -text $title
} else {
- ${NS}::frame $w
+ ttk::frame $w
}
bind $w <Destroy> [cb _delete %W]
if {$is_detached} {
- ${NS}::radiobutton $w.detachedhead_r \
+ ttk::radiobutton $w.detachedhead_r \
-text [mc "This Detached Checkout"] \
-value HEAD \
-variable @revtype
- if {!$use_ttk} {$w.detachedhead_r configure -anchor w}
grid $w.detachedhead_r -sticky we -padx {0 5} -columnspan 2
}
- ${NS}::radiobutton $w.expr_r \
+ ttk::radiobutton $w.expr_r \
-text [mc "Revision Expression:"] \
-value expr \
-variable @revtype
- ${NS}::entry $w.expr_t \
+ ttk::entry $w.expr_t \
-width 50 \
-textvariable @c_expr \
-validate key \
-validatecommand [cb _validate %d %S]
grid $w.expr_r $w.expr_t -sticky we -padx {0 5}
- ${NS}::frame $w.types
- ${NS}::radiobutton $w.types.head_r \
+ ttk::frame $w.types
+ ttk::radiobutton $w.types.head_r \
-text [mc "Local Branch"] \
-value head \
-variable @revtype
pack $w.types.head_r -side left
- ${NS}::radiobutton $w.types.trck_r \
+ ttk::radiobutton $w.types.trck_r \
-text [mc "Tracking Branch"] \
-value trck \
-variable @revtype
pack $w.types.trck_r -side left
- ${NS}::radiobutton $w.types.tag_r \
+ ttk::radiobutton $w.types.tag_r \
-text [mc "Tag"] \
-value tag \
-variable @revtype
pack $w.types.tag_r -side left
set w_filter $w.types.filter
- ${NS}::entry $w_filter \
+ ttk::entry $w_filter \
-width 12 \
-textvariable @filter \
-validate key \
-validatecommand [cb _filter %P]
pack $w_filter -side right
- pack [${NS}::label $w.types.filter_icon \
+ pack [ttk::label $w.types.filter_icon \
-image ::choose_rev::img_find \
] -side right
grid $w.types -sticky we -padx {0 5} -columnspan 2
- if {$use_ttk} {
- ttk::frame $w.list -style SListbox.TFrame -padding 2
- } else {
- frame $w.list
- }
+ ttk::frame $w.list -style SListbox.TFrame -padding 2
set w_list $w.list.l
listbox $w_list \
-font font_diff \
@@ -109,9 +104,7 @@ constructor _new {path unmerged_only title} {
-exportselection false \
-xscrollcommand [cb _sb_set $w.list.sbx h] \
-yscrollcommand [cb _sb_set $w.list.sby v]
- if {$use_ttk} {
- $w_list configure -relief flat -highlightthickness 0 -borderwidth 0
- }
+ $w_list configure -relief flat -highlightthickness 0 -borderwidth 0
pack $w_list -fill both -expand 1
grid $w.list -sticky nswe -padx {20 5} -columnspan 2
bind $w_list <Any-Motion> [cb _show_tooltip @%x,%y]
@@ -146,14 +139,14 @@ constructor _new {path unmerged_only title} {
append fmt { %(*subject)}
append fmt {]}
set all_refn [list]
- set fr_fd [git_read for-each-ref \
+ set fr_fd [git_read [list for-each-ref \
--tcl \
--sort=-taggerdate \
--format=$fmt \
refs/heads \
refs/remotes \
refs/tags \
- ]
+ ]]
fconfigure $fr_fd -translation lf -encoding utf-8
while {[gets $fr_fd line] > 0} {
set line [eval $line]
@@ -176,7 +169,7 @@ constructor _new {path unmerged_only title} {
close $fr_fd
if {$unmerged_only} {
- set fr_fd [git_read rev-list --all ^$::HEAD]
+ set fr_fd [git_read [list rev-list --all ^$::HEAD]]
while {[gets $fr_fd sha1] > 0} {
if {[catch {set rlst $cmt_refn($sha1)}]} continue
foreach refn $rlst {
@@ -238,12 +231,10 @@ constructor _new {path unmerged_only title} {
}
method none {text} {
- global NS use_ttk
if {![winfo exists $w.none_r]} {
- ${NS}::radiobutton $w.none_r \
+ ttk::radiobutton $w.none_r \
-value none \
-variable @revtype
- if {!$use_ttk} {$w.none_r configure -anchor w}
grid $w.none_r -sticky we -padx {0 5} -columnspan 2
}
$w.none_r configure -text $text
@@ -429,7 +420,6 @@ method _delete {current} {
}
method _sb_set {sb orient first last} {
- global NS
set old_focus [focus -lastfor $w]
if {$first == 0 && $last == 1} {
@@ -445,10 +435,10 @@ method _sb_set {sb orient first last} {
if {![winfo exists $sb]} {
if {$orient eq {h}} {
- ${NS}::scrollbar $sb -orient h -command [list $w_list xview]
+ ttk::scrollbar $sb -orient h -command [list $w_list xview]
pack $sb -fill x -side bottom -before $w_list
} else {
- ${NS}::scrollbar $sb -orient v -command [list $w_list yview]
+ ttk::scrollbar $sb -orient v -command [list $w_list yview]
pack $sb -fill y -side right -before $w_list
}
if {$old_focus ne {}} {
@@ -579,7 +569,7 @@ method _reflog_last {name} {
set last {}
if {[catch {set last [file mtime [gitdir $name]]}]
- && ![catch {set g [open [gitdir logs $name] r]}]} {
+ && ![catch {set g [safe_open_file [gitdir logs $name] r]}]} {
fconfigure $g -translation binary
while {[gets $g line] >= 0} {
if {[regexp {> ([1-9][0-9]*) } $line line when]} {
diff --git a/git-gui/lib/class.tcl b/git-gui/lib/class.tcl
index f08506f383..0b1e67103f 100644
--- a/git-gui/lib/class.tcl
+++ b/git-gui/lib/class.tcl
@@ -136,7 +136,6 @@ proc delete_this {{t {}}} {
proc make_dialog {t w args} {
upvar $t top $w pfx this this
- global use_ttk
uplevel [linsert $args 0 make_toplevel $t $w]
catch {wm attributes $top -type dialog}
pave_toplevel $pfx
diff --git a/git-gui/lib/commit.tcl b/git-gui/lib/commit.tcl
index a570f9cdc6..2fd57a51fb 100644
--- a/git-gui/lib/commit.tcl
+++ b/git-gui/lib/commit.tcl
@@ -27,7 +27,7 @@ You are currently in the middle of a merge that has not been fully completed. Y
if {[catch {
set name ""
set email ""
- set fd [git_read cat-file commit $curHEAD]
+ set fd [git_read [list cat-file commit $curHEAD]]
fconfigure $fd -encoding binary -translation lf
# By default commits are assumed to be in utf-8
set enc utf-8
@@ -214,12 +214,10 @@ You must stage at least 1 file before you can commit.
global comment_string
set cmt_rx [strcat {(^|\n)} [regsub -all {\W} $comment_string {\\&}] {[^\n]*}]
regsub -all $cmt_rx $msg {\1} msg
- # Strip leading empty lines
- regsub {^\n*} $msg {} msg
+ # Strip leading and trailing empty lines (puts adds one \n)
+ set msg [string trim $msg \n]
# Compress consecutive empty lines
regsub -all {\n{3,}} $msg "\n\n" msg
- # Strip trailing empty line
- regsub {\n\n$} $msg "\n" msg
if {$msg eq {}} {
error_popup [mc "Please supply a commit message.
@@ -236,7 +234,7 @@ A good commit message has the following format:
# -- Build the message file.
#
set msg_p [gitdir GITGUI_EDITMSG]
- set msg_wt [open $msg_p w]
+ set msg_wt [safe_open_file $msg_p w]
fconfigure $msg_wt -translation lf
setup_commit_encoding $msg_wt
puts $msg_wt $msg
@@ -336,7 +334,7 @@ proc commit_commitmsg_wait {fd_ph curHEAD msg_p} {
proc commit_writetree {curHEAD msg_p} {
ui_status [mc "Committing changes..."]
- set fd_wt [git_read write-tree]
+ set fd_wt [git_read [list write-tree]]
fileevent $fd_wt readable \
[list commit_committree $fd_wt $curHEAD $msg_p]
}
@@ -348,6 +346,7 @@ proc commit_committree {fd_wt curHEAD msg_p} {
global file_states selected_paths rescan_active
global repo_config
global env
+ global hashlength
gets $fd_wt tree_id
if {[catch {close $fd_wt} err]} {
@@ -361,13 +360,13 @@ proc commit_committree {fd_wt curHEAD msg_p} {
# -- Verify this wasn't an empty change.
#
if {$commit_type eq {normal}} {
- set fd_ot [git_read cat-file commit $PARENT]
+ set fd_ot [git_read [list cat-file commit $PARENT]]
fconfigure $fd_ot -encoding binary -translation lf
set old_tree [gets $fd_ot]
close $fd_ot
if {[string equal -length 5 {tree } $old_tree]
- && [string length $old_tree] == 45} {
+ && [string length $old_tree] == [expr {$hashlength + 5}]} {
set old_tree [string range $old_tree 5 end]
} else {
error [mc "Commit %s appears to be corrupt" $PARENT]
@@ -399,8 +398,8 @@ A rescan will be automatically started now.
foreach p [concat $PARENT $MERGE_HEAD] {
lappend cmd -p $p
}
- lappend cmd <$msg_p
- if {[catch {set cmt_id [eval git $cmd]} err]} {
+ set msgtxt [list <$msg_p]
+ if {[catch {set cmt_id [git_redir $cmd $msgtxt]} err]} {
catch {file delete $msg_p}
error_popup [strcat [mc "commit-tree failed:"] "\n\n$err"]
ui_status [mc "Commit failed."]
@@ -420,7 +419,7 @@ A rescan will be automatically started now.
if {$commit_type ne {normal}} {
append reflogm " ($commit_type)"
}
- set msg_fd [open $msg_p r]
+ set msg_fd [safe_open_file $msg_p r]
setup_commit_encoding $msg_fd 1
gets $msg_fd subject
close $msg_fd
diff --git a/git-gui/lib/console.tcl b/git-gui/lib/console.tcl
index fafafb81f1..267699408c 100644
--- a/git-gui/lib/console.tcl
+++ b/git-gui/lib/console.tcl
@@ -27,20 +27,20 @@ constructor embed {path title} {
}
method _init {} {
- global M1B use_ttk NS
+ global M1B
if {$is_toplevel} {
make_dialog top w -autodelete 0
wm title $top "[appname] ([reponame]): $t_short"
} else {
- ${NS}::frame $w
+ ttk::frame $w
}
set console_cr 1.0
set w_t $w.m.t
- ${NS}::frame $w.m
- ${NS}::label $w.m.l1 \
+ ttk::frame $w.m
+ ttk::label $w.m.l1 \
-textvariable @t_long \
-anchor w \
-justify left \
@@ -78,7 +78,7 @@ method _init {} {
"
if {$is_toplevel} {
- ${NS}::button $w.ok -text [mc "Close"] \
+ ttk::button $w.ok -text [mc "Close"] \
-state disabled \
-command [list destroy $w]
pack $w.ok -side bottom -anchor e -pady 10 -padx 10
@@ -92,10 +92,9 @@ method _init {} {
method exec {cmd {after {}}} {
if {[lindex $cmd 0] eq {git}} {
- set fd_f [eval git_read --stderr [lrange $cmd 1 end]]
+ set fd_f [git_read [lrange $cmd 1 end] [list 2>@1]]
} else {
- lappend cmd 2>@1
- set fd_f [_open_stdout_stderr $cmd]
+ set fd_f [safe_open_command $cmd [list 2>@1]]
}
fconfigure $fd_f -blocking 0 -translation binary -encoding [encoding system]
fileevent $fd_f readable [cb _read $fd_f $after]
@@ -208,14 +207,13 @@ method done {ok} {
}
method _sb_set {sb orient first last} {
- global NS
if {![winfo exists $sb]} {
if {$first == $last || ($first == 0 && $last == 1)} return
if {$orient eq {h}} {
- ${NS}::scrollbar $sb -orient h -command [list $w_t xview]
+ ttk::scrollbar $sb -orient h -command [list $w_t xview]
pack $sb -fill x -side bottom -before $w_t
} else {
- ${NS}::scrollbar $sb -orient v -command [list $w_t yview]
+ ttk::scrollbar $sb -orient v -command [list $w_t yview]
pack $sb -fill y -side right -before $w_t
}
}
diff --git a/git-gui/lib/database.tcl b/git-gui/lib/database.tcl
index 85783081e0..78732d8651 100644
--- a/git-gui/lib/database.tcl
+++ b/git-gui/lib/database.tcl
@@ -2,8 +2,7 @@
# Copyright (C) 2006, 2007 Shawn Pearce
proc do_stats {} {
- global use_ttk NS
- set fd [git_read count-objects -v]
+ set fd [git_read [list count-objects -v]]
while {[gets $fd line] > 0} {
if {[regexp {^([^:]+): (\d+)$} $line _ name value]} {
set stats($name) $value
@@ -26,18 +25,18 @@ proc do_stats {} {
wm withdraw $w
wm geometry $w "+[winfo rootx .]+[winfo rooty .]"
- ${NS}::frame $w.buttons
- ${NS}::button $w.buttons.close -text [mc Close] \
+ ttk::frame $w.buttons
+ ttk::button $w.buttons.close -text [mc Close] \
-default active \
-command [list destroy $w]
- ${NS}::button $w.buttons.gc -text [mc "Compress Database"] \
+ ttk::button $w.buttons.gc -text [mc "Compress Database"] \
-default normal \
-command "destroy $w;do_gc"
pack $w.buttons.close -side right
pack $w.buttons.gc -side left
pack $w.buttons -side bottom -fill x -pady 10 -padx 10
- ${NS}::labelframe $w.stat -text [mc "Database Statistics"]
+ ttk::labelframe $w.stat -text [mc "Database Statistics"]
foreach s {
{count {mc "Number of loose objects"}}
{size {mc "Disk space used by loose objects"} { KiB}}
@@ -54,8 +53,8 @@ proc do_stats {} {
set value "$value[lindex $s 2]"
}
- ${NS}::label $w.stat.l_$name -text [mc "%s:" $label] -anchor w
- ${NS}::label $w.stat.v_$name -text $value -anchor w
+ ttk::label $w.stat.l_$name -text [mc "%s:" $label] -anchor w
+ ttk::label $w.stat.v_$name -text $value -anchor w
grid $w.stat.l_$name $w.stat.v_$name -sticky we -padx {0 5}
}
pack $w.stat -pady 10 -padx 10
diff --git a/git-gui/lib/diff.tcl b/git-gui/lib/diff.tcl
index d657bfec05..1acd37bdb4 100644
--- a/git-gui/lib/diff.tcl
+++ b/git-gui/lib/diff.tcl
@@ -2,15 +2,13 @@
# Copyright (C) 2006, 2007 Shawn Pearce
proc apply_tab_size {{firsttab {}}} {
- global have_tk85 repo_config ui_diff
+ global repo_config ui_diff
set w [font measure font_diff "0"]
- if {$have_tk85 && $firsttab != 0} {
+ if {$firsttab != 0} {
$ui_diff configure -tabs [list [expr {$firsttab * $w}] [expr {($firsttab + $repo_config(gui.tabsize)) * $w}]]
- } elseif {$have_tk85 || $repo_config(gui.tabsize) != 8} {
- $ui_diff configure -tabs [expr {$repo_config(gui.tabsize) * $w}]
} else {
- $ui_diff configure -tabs {}
+ $ui_diff configure -tabs [expr {$repo_config(gui.tabsize) * $w}]
}
}
@@ -191,7 +189,7 @@ proc show_other_diff {path w m cont_info} {
set sz [string length $content]
}
file {
- set fd [open $path r]
+ set fd [safe_open_file $path r]
fconfigure $fd \
-eofchar {} \
-encoding [get_path_encoding $path]
@@ -215,7 +213,7 @@ proc show_other_diff {path w m cont_info} {
$ui_diff insert end \
"* [mc "Git Repository (subproject)"]\n" \
d_info
- } elseif {![catch {set type [exec file $path]}]} {
+ } elseif {![catch {set type [safe_exec [list file $path]]}]} {
set n [string length $path]
if {[string equal -length $n $path $type]} {
set type [string range $type $n end]
@@ -280,9 +278,7 @@ proc start_show_diff {cont_info {add_opts {}}} {
if {$w eq $ui_index} {
lappend cmd diff-index
lappend cmd --cached
- if {[git-version >= "1.7.2"]} {
- lappend cmd --ignore-submodules=dirty
- }
+ lappend cmd --ignore-submodules=dirty
} elseif {$w eq $ui_workdir} {
if {[string first {U} $m] >= 0} {
lappend cmd diff
@@ -290,17 +286,14 @@ proc start_show_diff {cont_info {add_opts {}}} {
lappend cmd diff-files
}
}
- if {![is_config_false gui.textconv] && [git-version >= 1.6.1]} {
+ if {![is_config_false gui.textconv]} {
lappend cmd --textconv
}
if {[string match {160000 *} [lindex $s 2]]
|| [string match {160000 *} [lindex $s 3]]} {
set is_submodule_diff 1
-
- if {[git-version >= "1.6.6"]} {
- lappend cmd --submodule
- }
+ lappend cmd --submodule
}
lappend cmd -p
@@ -319,15 +312,7 @@ proc start_show_diff {cont_info {add_opts {}}} {
lappend cmd $path
}
- if {$is_submodule_diff && [git-version < "1.6.6"]} {
- if {$w eq $ui_index} {
- set cmd [list submodule summary --cached -- $path]
- } else {
- set cmd [list submodule summary --files -- $path]
- }
- }
-
- if {[catch {set fd [eval git_read --nice $cmd]} err]} {
+ if {[catch {set fd [git_read_nice $cmd]} err]} {
set diff_active 0
unlock_index
ui_status [mc "Unable to display %s" [escape_path $path]]
@@ -603,7 +588,7 @@ proc apply_or_revert_hunk {x y revert} {
if {[catch {
set enc [get_path_encoding $current_diff_path]
- set p [eval git_write $apply_cmd]
+ set p [git_write $apply_cmd]
fconfigure $p -translation binary -encoding $enc
puts -nonewline $p $wholepatch
close $p} err]} {
@@ -839,7 +824,7 @@ proc apply_or_revert_range_or_line {x y revert} {
if {[catch {
set enc [get_path_encoding $current_diff_path]
- set p [eval git_write $apply_cmd]
+ set p [git_write $apply_cmd]
fconfigure $p -translation binary -encoding $enc
puts -nonewline $p $current_diff_header
puts -nonewline $p $wholepatch
@@ -876,7 +861,7 @@ proc undo_last_revert {} {
if {[catch {
set enc $last_revert_enc
- set p [eval git_write $apply_cmd]
+ set p [git_write $apply_cmd]
fconfigure $p -translation binary -encoding $enc
puts -nonewline $p $last_revert
close $p} err]} {
diff --git a/git-gui/lib/error.tcl b/git-gui/lib/error.tcl
index 8968a57f33..fc0b5ad5e0 100644
--- a/git-gui/lib/error.tcl
+++ b/git-gui/lib/error.tcl
@@ -71,13 +71,12 @@ proc ask_popup {msg} {
}
proc hook_failed_popup {hook msg {is_fatal 1}} {
- global use_ttk NS
set w .hookfail
Dialog $w
wm withdraw $w
- ${NS}::frame $w.m
- ${NS}::label $w.m.l1 -text [mc "%s hook failed:" $hook] \
+ ttk::frame $w.m
+ ttk::label $w.m.l1 -text [mc "%s hook failed:" $hook] \
-anchor w \
-justify left \
-font font_uibold
@@ -89,10 +88,10 @@ proc hook_failed_popup {hook msg {is_fatal 1}} {
-width 80 -height 10 \
-font font_diff \
-yscrollcommand [list $w.m.sby set]
- ${NS}::scrollbar $w.m.sby -command [list $w.m.t yview]
+ ttk::scrollbar $w.m.sby -command [list $w.m.t yview]
pack $w.m.l1 -side top -fill x
if {$is_fatal} {
- ${NS}::label $w.m.l2 \
+ ttk::label $w.m.l2 \
-text [mc "You must correct the above errors before committing."] \
-anchor w \
-justify left \
@@ -106,7 +105,7 @@ proc hook_failed_popup {hook msg {is_fatal 1}} {
$w.m.t insert 1.0 $msg
$w.m.t conf -state disabled
- ${NS}::button $w.ok -text OK \
+ ttk::button $w.ok -text OK \
-width 15 \
-command "destroy $w"
pack $w.ok -side bottom -anchor e -pady 10 -padx 10
diff --git a/git-gui/lib/index.tcl b/git-gui/lib/index.tcl
index d2ec24bd80..377547034b 100644
--- a/git-gui/lib/index.tcl
+++ b/git-gui/lib/index.tcl
@@ -22,8 +22,6 @@ proc _close_updateindex {fd} {
}
proc rescan_on_error {err {after {}}} {
- global use_ttk NS
-
set w .indexfried
Dialog $w
wm withdraw $w
@@ -35,14 +33,14 @@ proc rescan_on_error {err {after {}}} {
-borderwidth 0 -highlightthickness 0 \
-background [get_bg_color $w]
$w.msg tag configure bold -font font_uibold -justify center
- ${NS}::scrollbar $w.vs -command [list $w.msg yview]
+ ttk::scrollbar $w.vs -command [list $w.msg yview]
$w.msg insert end $s bold \n\n$err {}
$w.msg configure -state disabled
- ${NS}::button $w.continue \
+ ttk::button $w.continue \
-text [mc "Continue"] \
-command [list destroy $w]
- ${NS}::button $w.unlock \
+ ttk::button $w.unlock \
-text [mc "Unlock Index"] \
-command "destroy $w; _delete_indexlock"
grid $w.msg - $w.vs -sticky news
@@ -75,7 +73,7 @@ proc update_indexinfo {msg path_list after} {
if {$batch > 25} {set batch 25}
set status_bar_operation [$::main_status start $msg [mc "files"]]
- set fd [git_write update-index -z --index-info]
+ set fd [git_write [list update-index -z --index-info]]
fconfigure $fd \
-blocking 0 \
-buffering full \
@@ -144,7 +142,7 @@ proc update_index {msg path_list after} {
if {$batch > 25} {set batch 25}
set status_bar_operation [$::main_status start $msg [mc "files"]]
- set fd [git_write update-index --add --remove -z --stdin]
+ set fd [git_write [list update-index --add --remove -z --stdin]]
fconfigure $fd \
-blocking 0 \
-buffering full \
@@ -218,13 +216,13 @@ proc checkout_index {msg path_list after capture_error} {
if {$batch > 25} {set batch 25}
set status_bar_operation [$::main_status start $msg [mc "files"]]
- set fd [git_write checkout-index \
+ set fd [git_write [list checkout-index \
--index \
--quiet \
--force \
-z \
--stdin \
- ]
+ ]]
fconfigure $fd \
-blocking 0 \
-buffering full \
diff --git a/git-gui/lib/line.tcl b/git-gui/lib/line.tcl
index a026de954c..5980ae805c 100644
--- a/git-gui/lib/line.tcl
+++ b/git-gui/lib/line.tcl
@@ -9,18 +9,17 @@ field ctext
field linenum {}
constructor new {i_w i_text args} {
- global use_ttk NS
set w $i_w
set ctext $i_text
- ${NS}::frame $w
- ${NS}::label $w.l -text [mc "Goto Line:"]
+ ttk::frame $w
+ ttk::label $w.l -text [mc "Goto Line:"]
tentry $w.ent \
-textvariable ${__this}::linenum \
-background lightgreen \
-validate key \
-validatecommand [cb _validate %P]
- ${NS}::button $w.bn -text [mc Go] -command [cb _goto]
+ ttk::button $w.bn -text [mc Go] -command [cb _goto]
pack $w.l -side left
pack $w.bn -side right
diff --git a/git-gui/lib/merge.tcl b/git-gui/lib/merge.tcl
index 664803cf3f..3490beddae 100644
--- a/git-gui/lib/merge.tcl
+++ b/git-gui/lib/merge.tcl
@@ -93,7 +93,7 @@ method _start {} {
set spec [$w_rev get_tracking_branch]
set cmit [$w_rev get_commit]
- set fh [open [gitdir FETCH_HEAD] w]
+ set fh [safe_open_file [gitdir FETCH_HEAD] w]
fconfigure $fh -translation lf
if {$spec eq {}} {
set remote .
@@ -112,16 +112,7 @@ method _start {} {
close $fh
set _last_merged_branch $branch
- if {[git-version >= "2.5.0"]} {
- set cmd [list git merge --strategy=recursive FETCH_HEAD]
- } else {
- set cmd [list git]
- lappend cmd merge
- lappend cmd --strategy=recursive
- lappend cmd [git fmt-merge-msg <[gitdir FETCH_HEAD]]
- lappend cmd HEAD
- lappend cmd $name
- }
+ set cmd [list git merge --strategy=recursive FETCH_HEAD]
ui_status [mc "Merging %s and %s..." $current_branch $stitle]
set cons [console::new [mc "Merge"] "merge $stitle"]
@@ -145,7 +136,7 @@ method _finish {cons ok} {
constructor dialog {} {
global current_branch
- global M1B use_ttk NS
+ global M1B
if {![_can_merge $this]} {
delete_this
@@ -160,21 +151,21 @@ constructor dialog {} {
set _start [cb _start]
- ${NS}::label $w.header \
+ ttk::label $w.header \
-text [mc "Merge Into %s" $current_branch] \
-font font_uibold
pack $w.header -side top -fill x
- ${NS}::frame $w.buttons
- ${NS}::button $w.buttons.visualize \
+ ttk::frame $w.buttons
+ ttk::button $w.buttons.visualize \
-text [mc Visualize] \
-command [cb _visualize]
pack $w.buttons.visualize -side left
- ${NS}::button $w.buttons.merge \
+ ttk::button $w.buttons.merge \
-text [mc Merge] \
-command $_start
pack $w.buttons.merge -side right
- ${NS}::button $w.buttons.cancel \
+ ttk::button $w.buttons.cancel \
-text [mc "Cancel"] \
-command [cb _cancel]
pack $w.buttons.cancel -side right -padx 5
@@ -239,7 +230,7 @@ Continue with resetting the current changes?"]
}
if {[ask_popup $op_question] eq {yes}} {
- set fd [git_read --stderr read-tree --reset -u -v HEAD]
+ set fd [git_read [list read-tree --reset -u -v HEAD] [list 2>@1]]
fconfigure $fd -blocking 0 -translation binary
set status_bar_operation [$::main_status \
start \
diff --git a/git-gui/lib/mergetool.tcl b/git-gui/lib/mergetool.tcl
index 8b8c16b1d6..2c9bb3af40 100644
--- a/git-gui/lib/mergetool.tcl
+++ b/git-gui/lib/mergetool.tcl
@@ -88,7 +88,7 @@ proc merge_load_stages {path cont} {
set merge_stages(3) {}
set merge_stages_buf {}
- set merge_stages_fd [eval git_read ls-files -u -z -- {$path}]
+ set merge_stages_fd [git_read [list ls-files -u -z -- $path]]
fconfigure $merge_stages_fd -blocking 0 -translation binary -encoding binary
fileevent $merge_stages_fd readable [list read_merge_stages $merge_stages_fd $cont]
@@ -310,7 +310,7 @@ proc merge_tool_get_stages {target stages} {
foreach fname $stages {
if {$merge_stages($i) eq {}} {
file delete $fname
- catch { close [open $fname w] }
+ catch { close [safe_open_file $fname w] }
} else {
# A hack to support autocrlf properly
git checkout-index -f --stage=$i -- $target
@@ -360,9 +360,9 @@ proc merge_tool_start {cmdline target backup stages} {
# Force redirection to avoid interpreting output on stderr
# as an error, and launch the tool
- lappend cmdline {2>@1}
+ set redir [list {2>@1}]
- if {[catch { set mtool_fd [_open_stdout_stderr $cmdline] } err]} {
+ if {[catch { set mtool_fd [safe_open_command $cmdline $redir] } err]} {
delete_temp_files $mtool_tmpfiles
error_popup [mc "Could not start the merge tool:\n\n%s" $err]
return
diff --git a/git-gui/lib/option.tcl b/git-gui/lib/option.tcl
index e43971bfa3..487d70691d 100644
--- a/git-gui/lib/option.tcl
+++ b/git-gui/lib/option.tcl
@@ -91,7 +91,7 @@ proc save_config {} {
proc do_options {} {
global repo_config global_config font_descs
global repo_config_new global_config_new
- global ui_comm_spell use_ttk NS
+ global ui_comm_spell
array unset repo_config_new
array unset global_config_new
@@ -115,23 +115,23 @@ proc do_options {} {
wm transient $w [winfo parent $w]
wm geometry $w "+[winfo rootx .]+[winfo rooty .]"
- ${NS}::frame $w.buttons
- ${NS}::button $w.buttons.restore -text [mc "Restore Defaults"] \
+ ttk::frame $w.buttons
+ ttk::button $w.buttons.restore -text [mc "Restore Defaults"] \
-default normal \
-command do_restore_defaults
pack $w.buttons.restore -side left
- ${NS}::button $w.buttons.save -text [mc Save] \
+ ttk::button $w.buttons.save -text [mc Save] \
-default active \
-command [list do_save_config $w]
pack $w.buttons.save -side right
- ${NS}::button $w.buttons.cancel -text [mc "Cancel"] \
+ ttk::button $w.buttons.cancel -text [mc "Cancel"] \
-default normal \
-command [list destroy $w]
pack $w.buttons.cancel -side right -padx 5
pack $w.buttons -side bottom -fill x -pady 10 -padx 10
- ${NS}::labelframe $w.repo -text [mc "%s Repository" [reponame]]
- ${NS}::labelframe $w.global -text [mc "Global (All Repositories)"]
+ ttk::labelframe $w.repo -text [mc "%s Repository" [reponame]]
+ ttk::labelframe $w.global -text [mc "Global (All Repositories)"]
pack $w.repo -side left -fill both -expand 1 -pady 5 -padx 5
pack $w.global -side right -fill both -expand 1 -pady 5 -padx 5
@@ -170,7 +170,7 @@ proc do_options {} {
foreach f {repo global} {
switch -glob -- $type {
b {
- ${NS}::checkbutton $w.$f.$optid -text $text \
+ ttk::checkbutton $w.$f.$optid -text $text \
-variable ${f}_config_new($name) \
-onvalue true \
-offvalue false
@@ -178,8 +178,8 @@ proc do_options {} {
}
i-* {
regexp -- {-(\d+)\.\.(\d+)$} $type _junk min max
- ${NS}::frame $w.$f.$optid
- ${NS}::label $w.$f.$optid.l -text [mc "%s:" $text]
+ ttk::frame $w.$f.$optid
+ ttk::label $w.$f.$optid.l -text [mc "%s:" $text]
pack $w.$f.$optid.l -side left -anchor w -fill x
tspinbox $w.$f.$optid.v \
-textvariable ${f}_config_new($name) \
@@ -193,9 +193,9 @@ proc do_options {} {
}
c -
t {
- ${NS}::frame $w.$f.$optid
- ${NS}::label $w.$f.$optid.l -text [mc "%s:" $text]
- ${NS}::entry $w.$f.$optid.v \
+ ttk::frame $w.$f.$optid
+ ttk::label $w.$f.$optid.l -text [mc "%s:" $text]
+ ttk::entry $w.$f.$optid.v \
-width 20 \
-textvariable ${f}_config_new($name)
pack $w.$f.$optid.l -side left -anchor w
@@ -206,7 +206,7 @@ proc do_options {} {
menu $w.$f.$optid.m
build_encoding_menu $w.$f.$optid.m \
[list set ${f}_config_new($name)] 1
- ${NS}::button $w.$f.$optid.b \
+ ttk::button $w.$f.$optid.b \
-text [mc "Change"] \
-command [list popup_btn_menu \
$w.$f.$optid.m $w.$f.$optid.b]
@@ -216,17 +216,11 @@ proc do_options {} {
}
s {
set opts [eval [lindex $option 3]]
- ${NS}::frame $w.$f.$optid
- ${NS}::label $w.$f.$optid.l -text [mc "%s:" $text]
- if {$use_ttk} {
- ttk::combobox $w.$f.$optid.v \
- -textvariable ${f}_config_new($name) \
- -values $opts -state readonly
- } else {
- eval tk_optionMenu $w.$f.$optid.v \
- ${f}_config_new($name) \
- $opts
- }
+ ttk::frame $w.$f.$optid
+ ttk::label $w.$f.$optid.l -text [mc "%s:" $text]
+ ttk::combobox $w.$f.$optid.v \
+ -textvariable ${f}_config_new($name) \
+ -values $opts -state readonly
pack $w.$f.$optid.l -side left -anchor w -fill x
pack $w.$f.$optid.v -side right -anchor e -padx 5
pack $w.$f.$optid -side top -anchor w -fill x
@@ -250,17 +244,11 @@ proc do_options {} {
set ${f}_config_new(gui.spellingdictionary) $value
}
- ${NS}::frame $w.$f.$optid
- ${NS}::label $w.$f.$optid.l -text [mc "Spelling Dictionary:"]
- if {$use_ttk} {
- ttk::combobox $w.$f.$optid.v \
- -textvariable ${f}_config_new(gui.spellingdictionary) \
- -values $all_dicts -state readonly
- } else {
- eval tk_optionMenu $w.$f.$optid.v \
- ${f}_config_new(gui.spellingdictionary) \
- $all_dicts
- }
+ ttk::frame $w.$f.$optid
+ ttk::label $w.$f.$optid.l -text [mc "Spelling Dictionary:"]
+ ttk::combobox $w.$f.$optid.v \
+ -textvariable ${f}_config_new(gui.spellingdictionary) \
+ -values $all_dicts -state readonly
pack $w.$f.$optid.l -side left -anchor w -fill x
pack $w.$f.$optid.v -side right -anchor e -padx 5
pack $w.$f.$optid -side top -anchor w -fill x
@@ -278,9 +266,9 @@ proc do_options {} {
set global_config_new(gui.$font^^size) \
[font configure $font -size]
- ${NS}::frame $w.global.$name
- ${NS}::label $w.global.$name.l -text [mc "%s:" $text]
- ${NS}::button $w.global.$name.b \
+ ttk::frame $w.global.$name
+ ttk::label $w.global.$name.l -text [mc "%s:" $text]
+ ttk::button $w.global.$name.b \
-text [mc "Change Font"] \
-command [list \
tchoosefont \
@@ -289,9 +277,9 @@ proc do_options {} {
global_config_new(gui.$font^^family) \
global_config_new(gui.$font^^size) \
]
- ${NS}::label $w.global.$name.f -textvariable global_config_new(gui.$font^^family)
- ${NS}::label $w.global.$name.s -textvariable global_config_new(gui.$font^^size)
- ${NS}::label $w.global.$name.pt -text [mc "pt."]
+ ttk::label $w.global.$name.f -textvariable global_config_new(gui.$font^^family)
+ ttk::label $w.global.$name.s -textvariable global_config_new(gui.$font^^size)
+ ttk::label $w.global.$name.pt -text [mc "pt."]
pack $w.global.$name.l -side left -anchor w
pack $w.global.$name.b -side right -anchor e
pack $w.global.$name.pt -side right -anchor w
diff --git a/git-gui/lib/remote.tcl b/git-gui/lib/remote.tcl
index ef77ed7399..9b49b6e462 100644
--- a/git-gui/lib/remote.tcl
+++ b/git-gui/lib/remote.tcl
@@ -32,7 +32,7 @@ proc all_tracking_branches {} {
}
if {$pat ne {}} {
- set fd [eval git_read for-each-ref --format=%(refname) $cmd]
+ set fd [git_read [concat for-each-ref --format=%(refname) $cmd]]
while {[gets $fd n] > 0} {
foreach spec $pat {
set dst [string range [lindex $spec 0] 0 end-2]
@@ -75,7 +75,7 @@ proc load_all_remotes {} {
foreach name $all_remotes {
catch {
- set fd [open [file join $rm_dir $name] r]
+ set fd [safe_open_file [file join $rm_dir $name] r]
while {[gets $fd line] >= 0} {
if {[regexp {^URL:[ ]*(.+)$} $line line url]} {
set remote_url($name) $url
@@ -145,7 +145,7 @@ proc add_fetch_entry {r} {
}
} else {
catch {
- set fd [open [gitdir remotes $r] r]
+ set fd [safe_open_file [gitdir remotes $r] r]
while {[gets $fd n] >= 0} {
if {[regexp {^Pull:[ \t]*([^:]+):} $n]} {
set enable 1
@@ -182,7 +182,7 @@ proc add_push_entry {r} {
}
} else {
catch {
- set fd [open [gitdir remotes $r] r]
+ set fd [safe_open_file [gitdir remotes $r] r]
while {[gets $fd n] >= 0} {
if {[regexp {^Push:[ \t]*([^:]+):} $n]} {
set enable 1
@@ -233,8 +233,6 @@ proc make_sure_remote_submenues_exist {remote_m} {
proc update_all_remotes_menu_entry {} {
global all_remotes
- if {[git-version < 1.6.6]} { return }
-
set have_remote 0
foreach r $all_remotes {
incr have_remote
diff --git a/git-gui/lib/remote_add.tcl b/git-gui/lib/remote_add.tcl
index 480a6b30d0..bff1376cb3 100644
--- a/git-gui/lib/remote_add.tcl
+++ b/git-gui/lib/remote_add.tcl
@@ -13,7 +13,7 @@ field location {}; # location of the remote the user has chosen
field opt_action fetch; # action to do after registering the remote locally
constructor dialog {} {
- global repo_config use_ttk NS
+ global repo_config
make_dialog top w
wm withdraw $top
@@ -22,34 +22,34 @@ constructor dialog {} {
wm geometry $top "+[winfo rootx .]+[winfo rooty .]"
}
- ${NS}::label $w.header -text [mc "Add New Remote"] \
+ ttk::label $w.header -text [mc "Add New Remote"] \
-font font_uibold -anchor center
pack $w.header -side top -fill x
- ${NS}::frame $w.buttons
- ${NS}::button $w.buttons.create -text [mc Add] \
+ ttk::frame $w.buttons
+ ttk::button $w.buttons.create -text [mc Add] \
-default active \
-command [cb _add]
pack $w.buttons.create -side right
- ${NS}::button $w.buttons.cancel -text [mc Cancel] \
+ ttk::button $w.buttons.cancel -text [mc Cancel] \
-command [list destroy $w]
pack $w.buttons.cancel -side right -padx 5
pack $w.buttons -side bottom -fill x -pady 10 -padx 10
- ${NS}::labelframe $w.desc -text [mc "Remote Details"]
+ ttk::labelframe $w.desc -text [mc "Remote Details"]
- ${NS}::label $w.desc.name_l -text [mc "Name:"]
+ ttk::label $w.desc.name_l -text [mc "Name:"]
set w_name $w.desc.name_t
- ${NS}::entry $w_name \
+ ttk::entry $w_name \
-width 40 \
-textvariable @name \
-validate key \
-validatecommand [cb _validate_name %d %S]
grid $w.desc.name_l $w_name -sticky we -padx {0 5}
- ${NS}::label $w.desc.loc_l -text [mc "Location:"]
+ ttk::label $w.desc.loc_l -text [mc "Location:"]
set w_loc $w.desc.loc_t
- ${NS}::entry $w_loc \
+ ttk::entry $w_loc \
-width 40 \
-textvariable @location
grid $w.desc.loc_l $w_loc -sticky we -padx {0 5}
@@ -57,21 +57,21 @@ constructor dialog {} {
grid columnconfigure $w.desc 1 -weight 1
pack $w.desc -anchor nw -fill x -pady 5 -padx 5
- ${NS}::labelframe $w.action -text [mc "Further Action"]
+ ttk::labelframe $w.action -text [mc "Further Action"]
- ${NS}::radiobutton $w.action.fetch \
+ ttk::radiobutton $w.action.fetch \
-text [mc "Fetch Immediately"] \
-value fetch \
-variable @opt_action
pack $w.action.fetch -anchor nw
- ${NS}::radiobutton $w.action.push \
+ ttk::radiobutton $w.action.push \
-text [mc "Initialize Remote Repository and Push"] \
-value push \
-variable @opt_action
pack $w.action.push -anchor nw
- ${NS}::radiobutton $w.action.none \
+ ttk::radiobutton $w.action.none \
-text [mc "Do Nothing Else Now"] \
-value none \
-variable @opt_action
diff --git a/git-gui/lib/remote_branch_delete.tcl b/git-gui/lib/remote_branch_delete.tcl
index 5ba9fcadd1..349d31edf3 100644
--- a/git-gui/lib/remote_branch_delete.tcl
+++ b/git-gui/lib/remote_branch_delete.tcl
@@ -23,7 +23,7 @@ field full_cache
field cached
constructor dialog {} {
- global all_remotes M1B use_ttk NS
+ global all_remotes M1B
make_dialog top w
wm title $top [mc "%s (%s): Delete Branch Remotely" [appname] [reponame]]
@@ -31,32 +31,28 @@ constructor dialog {} {
wm geometry $top "+[winfo rootx .]+[winfo rooty .]"
}
- ${NS}::label $w.header -text [mc "Delete Branch Remotely"] \
+ ttk::label $w.header -text [mc "Delete Branch Remotely"] \
-font font_uibold -anchor center
pack $w.header -side top -fill x
- ${NS}::frame $w.buttons
- ${NS}::button $w.buttons.delete -text [mc Delete] \
+ ttk::frame $w.buttons
+ ttk::button $w.buttons.delete -text [mc Delete] \
-default active \
-command [cb _delete]
pack $w.buttons.delete -side right
- ${NS}::button $w.buttons.cancel -text [mc "Cancel"] \
+ ttk::button $w.buttons.cancel -text [mc "Cancel"] \
-command [list destroy $w]
pack $w.buttons.cancel -side right -padx 5
pack $w.buttons -side bottom -fill x -pady 10 -padx 10
- ${NS}::labelframe $w.dest -text [mc "From Repository"]
+ ttk::labelframe $w.dest -text [mc "From Repository"]
if {$all_remotes ne {}} {
- ${NS}::radiobutton $w.dest.remote_r \
+ ttk::radiobutton $w.dest.remote_r \
-text [mc "Remote:"] \
-value remote \
-variable @urltype
- if {$use_ttk} {
- ttk::combobox $w.dest.remote_m -textvariable @remote \
- -values $all_remotes -state readonly
- } else {
- eval tk_optionMenu $w.dest.remote_m @remote $all_remotes
- }
+ ttk::combobox $w.dest.remote_m -textvariable @remote \
+ -values $all_remotes -state readonly
grid $w.dest.remote_r $w.dest.remote_m -sticky w
if {[lsearch -sorted -exact $all_remotes origin] != -1} {
set remote origin
@@ -68,11 +64,11 @@ constructor dialog {} {
} else {
set urltype url
}
- ${NS}::radiobutton $w.dest.url_r \
+ ttk::radiobutton $w.dest.url_r \
-text [mc "Arbitrary Location:"] \
-value url \
-variable @urltype
- ${NS}::entry $w.dest.url_t \
+ ttk::entry $w.dest.url_t \
-width 50 \
-textvariable @url \
-validate key \
@@ -85,19 +81,19 @@ constructor dialog {} {
grid columnconfigure $w.dest 1 -weight 1
pack $w.dest -anchor nw -fill x -pady 5 -padx 5
- ${NS}::labelframe $w.heads -text [mc "Branches"]
+ ttk::labelframe $w.heads -text [mc "Branches"]
slistbox $w.heads.l \
-height 10 \
-width 70 \
-listvariable @head_list \
-selectmode extended
- ${NS}::frame $w.heads.footer
- ${NS}::label $w.heads.footer.status \
+ ttk::frame $w.heads.footer
+ ttk::label $w.heads.footer.status \
-textvariable @status \
-anchor w \
-justify left
- ${NS}::button $w.heads.footer.rescan \
+ ttk::button $w.heads.footer.rescan \
-text [mc "Rescan"] \
-command [cb _rescan]
pack $w.heads.footer.status -side left -fill x
@@ -107,8 +103,8 @@ constructor dialog {} {
pack $w.heads.l -side left -fill both -expand 1
pack $w.heads -fill both -expand 1 -pady 5 -padx 5
- ${NS}::labelframe $w.validate -text [mc "Delete Only If"]
- ${NS}::radiobutton $w.validate.head_r \
+ ttk::labelframe $w.validate -text [mc "Delete Only If"]
+ ttk::radiobutton $w.validate.head_r \
-text [mc "Merged Into:"] \
-value head \
-variable @checktype
@@ -116,7 +112,7 @@ constructor dialog {} {
trace add variable @head_list write [cb _write_head_list]
trace add variable @check_head write [cb _write_check_head]
grid $w.validate.head_r $w.validate.head_m -sticky w
- ${NS}::radiobutton $w.validate.always_r \
+ ttk::radiobutton $w.validate.always_r \
-text [mc "Always (Do not perform merge checks)"] \
-value always \
-variable @checktype
@@ -308,7 +304,7 @@ method _load {cache uri} {
set full_list [list]
set head_cache($cache) [list]
set full_cache($cache) [list]
- set active_ls [git_read ls-remote $uri]
+ set active_ls [git_read [list ls-remote $uri]]
fconfigure $active_ls \
-blocking 0 \
-translation lf \
@@ -323,6 +319,8 @@ method _load {cache uri} {
}
method _read {cache fd} {
+ global hashlength
+
if {$fd ne $active_ls} {
catch {close $fd}
return
@@ -330,7 +328,7 @@ method _read {cache fd} {
while {[gets $fd line] >= 0} {
if {[string match {*^{}} $line]} continue
- if {[regexp {^([0-9a-f]{40}) (.*)$} $line _junk obj ref]} {
+ if {[regexp [string map "@@ $hashlength" {^([0-9a-f]{@@}) (.*)$}] $line _junk obj ref]} {
if {[regsub ^refs/heads/ $ref {} abr]} {
lappend head_list $abr
lappend head_cache($cache) $abr
diff --git a/git-gui/lib/search.tcl b/git-gui/lib/search.tcl
index ef1e55521d..47a0d8c961 100644
--- a/git-gui/lib/search.tcl
+++ b/git-gui/lib/search.tcl
@@ -21,7 +21,6 @@ field smarktop
field smarkbot
constructor new {i_w i_text args} {
- global use_ttk NS
set w $i_w
set ctext $i_text
@@ -44,14 +43,14 @@ constructor new {i_w i_text args} {
set history [list]
- ${NS}::frame $w
- ${NS}::label $w.l -text [mc Find:]
+ ttk::frame $w
+ ttk::label $w.l -text [mc Find:]
tentry $w.ent -textvariable ${__this}::searchstring -background lightgreen
- ${NS}::button $w.bn -text [mc Next] -command [cb find_next]
- ${NS}::button $w.bp -text [mc Prev] -command [cb find_prev]
- ${NS}::checkbutton $w.re -text [mc RegExp] \
+ ttk::button $w.bn -text [mc Next] -command [cb find_next]
+ ttk::button $w.bp -text [mc Prev] -command [cb find_prev]
+ ttk::checkbutton $w.re -text [mc RegExp] \
-variable ${__this}::regexpsearch -command [cb _incrsearch]
- ${NS}::checkbutton $w.cs -text [mc Case] \
+ ttk::checkbutton $w.cs -text [mc Case] \
-variable ${__this}::casesensitive -command [cb _incrsearch]
pack $w.l -side left
pack $w.cs -side right
diff --git a/git-gui/lib/shortcut.tcl b/git-gui/lib/shortcut.tcl
index 674a41f5e0..1d01d9cbfa 100644
--- a/git-gui/lib/shortcut.tcl
+++ b/git-gui/lib/shortcut.tcl
@@ -12,7 +12,7 @@ proc do_windows_shortcut {} {
set fn ${fn}.lnk
}
# Use git-gui.exe if available (ie: git-for-windows)
- set cmdLine [auto_execok git-gui.exe]
+ set cmdLine [list [_which git-gui]]
if {$cmdLine eq {}} {
set cmdLine [list [info nameofexecutable] \
[file normalize $::argv0]]
@@ -30,8 +30,8 @@ proc do_cygwin_shortcut {} {
global argv0 _gitworktree oguilib
if {[catch {
- set desktop [exec cygpath \
- --desktop]
+ set desktop [safe_exec [list cygpath \
+ --desktop]]
}]} {
set desktop .
}
@@ -50,14 +50,14 @@ proc do_cygwin_shortcut {} {
"CHERE_INVOKING=1 \
source /etc/profile; \
git gui"}
- exec /bin/mkshortcut.exe \
+ safe_exec [list /bin/mkshortcut.exe \
--arguments $shargs \
--desc "git-gui on $repodir" \
--icon $oguilib/git-gui.ico \
--name $fn \
--show min \
--workingdir $repodir \
- /bin/sh.exe
+ /bin/sh.exe]
} err]} {
error_popup [strcat [mc "Cannot write shortcut:"] "\n\n$err"]
}
@@ -83,7 +83,7 @@ proc do_macosx_app {} {
file mkdir $MacOS
- set fd [open [file join $Contents Info.plist] w]
+ set fd [safe_open_file [file join $Contents Info.plist] w]
puts $fd {<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
@@ -108,7 +108,7 @@ proc do_macosx_app {} {
</plist>}
close $fd
- set fd [open $exe w]
+ set fd [safe_open_file $exe w]
puts $fd "#!/bin/sh"
foreach name [lsort [array names env]] {
set value $env($name)
diff --git a/git-gui/lib/sshkey.tcl b/git-gui/lib/sshkey.tcl
index 589ff8f78a..7a6526d3db 100644
--- a/git-gui/lib/sshkey.tcl
+++ b/git-gui/lib/sshkey.tcl
@@ -7,7 +7,7 @@ proc find_ssh_key {} {
~/.ssh/id_rsa.pub ~/.ssh/identity.pub
} {
if {[file exists $name]} {
- set fh [open $name r]
+ set fh [safe_open_file $name r]
set cont [read $fh]
close $fh
return [list $name $cont]
@@ -18,7 +18,7 @@ proc find_ssh_key {} {
}
proc do_ssh_key {} {
- global sshkey_title have_tk85 sshkey_fd use_ttk NS
+ global sshkey_title sshkey_fd
set w .sshkey_dialog
if {[winfo exists $w]} {
@@ -38,9 +38,9 @@ proc do_ssh_key {} {
set gen_state disabled
}
- ${NS}::frame $w.header
- ${NS}::label $w.header.lbl -textvariable sshkey_title -anchor w
- ${NS}::button $w.header.gen -text [mc "Generate Key"] \
+ ttk::frame $w.header
+ ttk::label $w.header.lbl -textvariable sshkey_title -anchor w
+ ttk::button $w.header.gen -text [mc "Generate Key"] \
-command [list make_ssh_key $w] -state $gen_state
pack $w.header.lbl -side left -expand 1 -fill x
pack $w.header.gen -side right
@@ -48,17 +48,14 @@ proc do_ssh_key {} {
text $w.contents -width 60 -height 10 -wrap char -relief sunken
pack $w.contents -fill both -expand 1
- if {$have_tk85} {
- set clr darkblue
- if {$use_ttk} { set clr [ttk::style lookup . -selectbackground] }
- $w.contents configure -inactiveselectbackground $clr
- }
+ set clr [ttk::style lookup . -selectbackground]
+ $w.contents configure -inactiveselectbackground $clr
- ${NS}::frame $w.buttons
- ${NS}::button $w.buttons.close -text [mc Close] \
+ ttk::frame $w.buttons
+ ttk::button $w.buttons.close -text [mc Close] \
-default active -command [list destroy $w]
pack $w.buttons.close -side right
- ${NS}::button $w.buttons.copy -text [mc "Copy To Clipboard"] \
+ ttk::button $w.buttons.copy -text [mc "Copy To Clipboard"] \
-command [list tk_textCopy $w.contents]
pack $w.buttons.copy -side left
pack $w.buttons -side bottom -fill x -pady 5 -padx 5
@@ -83,9 +80,10 @@ proc make_ssh_key {w} {
set sshkey_title [mc "Generating..."]
$w.header.gen configure -state disabled
- set cmdline [list sh -c {echo | ssh-keygen -q -t rsa -f ~/.ssh/id_rsa 2>&1}]
+ set cmdline [list [shellpath] -c \
+ {echo | ssh-keygen -q -t rsa -f ~/.ssh/id_rsa 2>&1}]
- if {[catch { set sshkey_fd [_open_stdout_stderr $cmdline] } err]} {
+ if {[catch { set sshkey_fd [safe_open_command $cmdline] } err]} {
error_popup [mc "Could not start ssh-keygen:\n\n%s" $err]
return
}
diff --git a/git-gui/lib/status_bar.tcl b/git-gui/lib/status_bar.tcl
index d32b14142f..f5c0204a2d 100644
--- a/git-gui/lib/status_bar.tcl
+++ b/git-gui/lib/status_bar.tcl
@@ -39,7 +39,6 @@ field operations ; # list of current ongoing operations
field completed_operation_count
constructor new {path} {
- global use_ttk NS
set w $path
set w_l $w.l
set w_c $w.c
@@ -51,11 +50,8 @@ constructor new {path} {
set operations [list]
set completed_operation_count 0
- ${NS}::frame $w
- if {!$use_ttk} {
- $w configure -borderwidth 1 -relief sunken
- }
- ${NS}::label $w_l \
+ ttk::frame $w
+ ttk::label $w_l \
-textvariable @status_bar_text \
-anchor w \
-justify left
@@ -72,7 +68,6 @@ method _oneline_pack {} {
}
constructor two_line {path} {
- global NS
set w $path
set w_l $w.l
set w_c $w.c
@@ -84,8 +79,8 @@ constructor two_line {path} {
set operations [list]
set completed_operation_count 0
- ${NS}::frame $w
- ${NS}::label $w_l \
+ ttk::frame $w
+ ttk::label $w_l \
-textvariable @status_bar_text \
-anchor w \
-justify left
diff --git a/git-gui/lib/themed.tcl b/git-gui/lib/themed.tcl
index f43d84e54f..8c4a0c2ee7 100644
--- a/git-gui/lib/themed.tcl
+++ b/git-gui/lib/themed.tcl
@@ -190,8 +190,7 @@ proc InitEntryFrame {} {
}
proc gold_frame {w args} {
- global use_ttk
- if {$use_ttk && ![is_MacOSX]} {
+ if {![is_MacOSX]} {
eval [linsert $args 0 ttk::frame $w -style Gold.TFrame]
} else {
eval [linsert $args 0 frame $w -background gold]
@@ -199,8 +198,7 @@ proc gold_frame {w args} {
}
proc tlabel {w args} {
- global use_ttk
- if {$use_ttk && ![is_MacOSX]} {
+ if {![is_MacOSX]} {
set cmd [list ttk::label $w -style Color.TLabel]
foreach {k v} $args {
switch -glob -- $k {
@@ -216,17 +214,7 @@ proc tlabel {w args} {
# The padded label gets used in the about class.
proc paddedlabel {w args} {
- global use_ttk
- if {$use_ttk} {
- eval [linsert $args 0 ttk::label $w -style Padded.TLabel]
- } else {
- eval [linsert $args 0 label $w \
- -padx 5 -pady 5 \
- -justify left \
- -anchor w \
- -borderwidth 1 \
- -relief solid]
- }
+ eval [linsert $args 0 ttk::label $w -style Padded.TLabel]
}
# Create a toplevel for use as a dialog.
@@ -242,8 +230,7 @@ proc Dialog {w args} {
# Tk toplevels are not themed - so pave it over with a themed frame to get
# the base color correct per theme.
proc pave_toplevel {w} {
- global use_ttk
- if {$use_ttk && ![winfo exists $w.!paving]} {
+ if {![winfo exists $w.!paving]} {
set paving [ttk::frame $w.!paving]
place $paving -x 0 -y 0 -relwidth 1 -relheight 1
lower $paving
@@ -254,20 +241,11 @@ proc pave_toplevel {w} {
# On many themes the border for a scrolled listbox needs to go around the
# listbox and the scrollbar.
proc slistbox {w args} {
- global use_ttk NS
- if {$use_ttk} {
- set f [ttk::frame $w -style SListbox.TFrame -padding 2]
- } else {
- set f [frame $w -relief flat]
- }
+ set f [ttk::frame $w -style SListbox.TFrame -padding 2]
if {[catch {
- if {$use_ttk} {
- eval [linsert $args 0 listbox $f.list -relief flat \
- -highlightthickness 0 -borderwidth 0]
- } else {
- eval [linsert $args 0 listbox $f.list]
- }
- ${NS}::scrollbar $f.vs -command [list $f.list yview]
+ eval [linsert $args 0 listbox $f.list -relief flat \
+ -highlightthickness 0 -borderwidth 0]
+ ttk::scrollbar $f.vs -command [list $f.list yview]
$f.list configure -yscrollcommand [list $f.vs set]
grid $f.list $f.vs -sticky news
grid rowconfigure $f 0 -weight 1
@@ -285,67 +263,42 @@ proc slistbox {w args} {
# fetch the background color from a widget.
proc get_bg_color {w} {
- global use_ttk
- if {$use_ttk} {
- set bg [ttk::style lookup [winfo class $w] -background]
- } else {
- set bg [$w cget -background]
- }
+ set bg [ttk::style lookup [winfo class $w] -background]
return $bg
}
-# ttk::spinbox didn't get added until 8.6
+# ttk::spinbox
proc tspinbox {w args} {
- global use_ttk
- if {$use_ttk && [llength [info commands ttk::spinbox]] > 0} {
- eval [linsert $args 0 ttk::spinbox $w]
- } else {
- eval [linsert $args 0 spinbox $w]
- }
+ eval [linsert $args 0 ttk::spinbox $w]
}
# Create a text widget with any theme specific properties.
proc ttext {w args} {
- global use_ttk
- if {$use_ttk} {
- switch -- [ttk_get_current_theme] {
- "vista" - "xpnative" {
- lappend args -highlightthickness 0 -borderwidth 0
- }
+ switch -- [ttk_get_current_theme] {
+ "vista" - "xpnative" {
+ lappend args -highlightthickness 0 -borderwidth 0
}
}
set w [eval [linsert $args 0 text $w]]
- if {$use_ttk} {
- if {[winfo class [winfo parent $w]] eq "EntryFrame"} {
- bind $w <FocusIn> {[winfo parent %W] state focus}
- bind $w <FocusOut> {[winfo parent %W] state !focus}
- }
+ if {[winfo class [winfo parent $w]] eq "EntryFrame"} {
+ bind $w <FocusIn> {[winfo parent %W] state focus}
+ bind $w <FocusOut> {[winfo parent %W] state !focus}
}
return $w
}
# themed frame suitable for surrounding a text field.
proc textframe {w args} {
- global use_ttk
- if {$use_ttk} {
- if {[catch {ttk::style layout EntryFrame}]} {
- InitEntryFrame
- }
- eval [linsert $args 0 ttk::frame $w -class EntryFrame -style EntryFrame]
- } else {
- eval [linsert $args 0 frame $w]
+ if {[catch {ttk::style layout EntryFrame}]} {
+ InitEntryFrame
}
+ eval [linsert $args 0 ttk::frame $w -class EntryFrame -style EntryFrame]
return $w
}
proc tentry {w args} {
- global use_ttk
- if {$use_ttk} {
- InitTheme
- ttk::entry $w -style Edged.Entry
- } else {
- entry $w
- }
+ InitTheme
+ ttk::entry $w -style Edged.Entry
rename $w _$w
interp alias {} $w {} tentry_widgetproc $w
@@ -353,25 +306,14 @@ proc tentry {w args} {
return $w
}
proc tentry_widgetproc {w cmd args} {
- global use_ttk
switch -- $cmd {
state {
- if {$use_ttk} {
- return [uplevel 1 [list _$w $cmd] $args]
- } else {
- if {[lsearch -exact $args pressed] != -1} {
- _$w configure -background lightpink
- } else {
- _$w configure -background lightgreen
- }
- }
+ return [uplevel 1 [list _$w $cmd] $args]
}
configure {
- if {$use_ttk} {
- if {[set n [lsearch -exact $args -background]] != -1} {
- set args [lreplace $args $n [incr n]]
- if {[llength $args] == 0} {return}
- }
+ if {[set n [lsearch -exact $args -background]] != -1} {
+ set args [lreplace $args $n [incr n]]
+ if {[llength $args] == 0} {return}
}
return [uplevel 1 [list _$w $cmd] $args]
}
diff --git a/git-gui/lib/tools.tcl b/git-gui/lib/tools.tcl
index 413f1a1700..48fddfd814 100644
--- a/git-gui/lib/tools.tcl
+++ b/git-gui/lib/tools.tcl
@@ -110,14 +110,14 @@ proc tools_exec {fullname} {
set cmdline $repo_config(guitool.$fullname.cmd)
if {[is_config_true "guitool.$fullname.noconsole"]} {
- tools_run_silent [list sh -c $cmdline] \
+ tools_run_silent [list [shellpath] -c $cmdline] \
[list tools_complete $fullname {}]
} else {
regsub {/} $fullname { / } title
set w [console::new \
[mc "Tool: %s" $title] \
[mc "Running: %s" $cmdline]]
- console::exec $w [list sh -c $cmdline] \
+ console::exec $w [list [shellpath] -c $cmdline] \
[list tools_complete $fullname $w]
}
@@ -130,8 +130,7 @@ proc tools_exec {fullname} {
}
proc tools_run_silent {cmd after} {
- lappend cmd 2>@1
- set fd [_open_stdout_stderr $cmd]
+ set fd [safe_open_command $cmd [list 2>@1]]
fconfigure $fd -blocking 0 -translation binary
fileevent $fd readable [list tools_consume_input $fd $after]
diff --git a/git-gui/lib/tools_dlg.tcl b/git-gui/lib/tools_dlg.tcl
index c05413ce43..73236215b5 100644
--- a/git-gui/lib/tools_dlg.tcl
+++ b/git-gui/lib/tools_dlg.tcl
@@ -16,7 +16,7 @@ field ask_branch 0; # ask for a revision
field ask_args 0; # ask for additional args
constructor dialog {} {
- global repo_config use_ttk NS
+ global repo_config
make_dialog top w
wm title $top [mc "%s (%s): Add Tool" [appname] [reponame]]
@@ -25,41 +25,41 @@ constructor dialog {} {
wm transient $top .
}
- ${NS}::label $w.header -text [mc "Add New Tool Command"] \
+ ttk::label $w.header -text [mc "Add New Tool Command"] \
-font font_uibold -anchor center
pack $w.header -side top -fill x
- ${NS}::frame $w.buttons
- ${NS}::checkbutton $w.buttons.global \
+ ttk::frame $w.buttons
+ ttk::checkbutton $w.buttons.global \
-text [mc "Add globally"] \
-variable @add_global
pack $w.buttons.global -side left -padx 5
- ${NS}::button $w.buttons.create -text [mc Add] \
+ ttk::button $w.buttons.create -text [mc Add] \
-default active \
-command [cb _add]
pack $w.buttons.create -side right
- ${NS}::button $w.buttons.cancel -text [mc Cancel] \
+ ttk::button $w.buttons.cancel -text [mc Cancel] \
-command [list destroy $w]
pack $w.buttons.cancel -side right -padx 5
pack $w.buttons -side bottom -fill x -pady 10 -padx 10
- ${NS}::labelframe $w.desc -text [mc "Tool Details"]
+ ttk::labelframe $w.desc -text [mc "Tool Details"]
- ${NS}::label $w.desc.name_cmnt -anchor w\
+ ttk::label $w.desc.name_cmnt -anchor w\
-text [mc "Use '/' separators to create a submenu tree:"]
grid x $w.desc.name_cmnt -sticky we -padx {0 5} -pady {0 2}
- ${NS}::label $w.desc.name_l -text [mc "Name:"]
+ ttk::label $w.desc.name_l -text [mc "Name:"]
set w_name $w.desc.name_t
- ${NS}::entry $w_name \
+ ttk::entry $w_name \
-width 40 \
-textvariable @name \
-validate key \
-validatecommand [cb _validate_name %d %S]
grid $w.desc.name_l $w_name -sticky we -padx {0 5}
- ${NS}::label $w.desc.cmd_l -text [mc "Command:"]
+ ttk::label $w.desc.cmd_l -text [mc "Command:"]
set w_cmd $w.desc.cmd_t
- ${NS}::entry $w_cmd \
+ ttk::entry $w_cmd \
-width 40 \
-textvariable @command
grid $w.desc.cmd_l $w_cmd -sticky we -padx {0 5} -pady {0 3}
@@ -67,30 +67,30 @@ constructor dialog {} {
grid columnconfigure $w.desc 1 -weight 1
pack $w.desc -anchor nw -fill x -pady 5 -padx 5
- ${NS}::checkbutton $w.confirm \
+ ttk::checkbutton $w.confirm \
-text [mc "Show a dialog before running"] \
-variable @confirm -command [cb _check_enable_dlg]
- ${NS}::labelframe $w.dlg -labelwidget $w.confirm
+ ttk::labelframe $w.dlg -labelwidget $w.confirm
- ${NS}::checkbutton $w.dlg.askbranch \
+ ttk::checkbutton $w.dlg.askbranch \
-text [mc "Ask the user to select a revision (sets \$REVISION)"] \
-variable @ask_branch -state disabled
pack $w.dlg.askbranch -anchor w -padx 15
- ${NS}::checkbutton $w.dlg.askargs \
+ ttk::checkbutton $w.dlg.askargs \
-text [mc "Ask the user for additional arguments (sets \$ARGS)"] \
-variable @ask_args -state disabled
pack $w.dlg.askargs -anchor w -padx 15
pack $w.dlg -anchor nw -fill x -pady {0 8} -padx 5
- ${NS}::checkbutton $w.noconsole \
+ ttk::checkbutton $w.noconsole \
-text [mc "Don't show the command output window"] \
-variable @no_console
pack $w.noconsole -anchor w -padx 5
- ${NS}::checkbutton $w.needsfile \
+ ttk::checkbutton $w.needsfile \
-text [mc "Run only if a diff is selected (\$FILENAME not empty)"] \
-variable @needs_file
pack $w.needsfile -anchor w -padx 5
@@ -179,7 +179,7 @@ field w ; # widget path
field w_names ; # name list
constructor dialog {} {
- global repo_config global_config system_config use_ttk NS
+ global repo_config global_config system_config
load_config 1
@@ -190,21 +190,21 @@ constructor dialog {} {
wm transient $top .
}
- ${NS}::label $w.header -text [mc "Remove Tool Commands"] \
+ ttk::label $w.header -text [mc "Remove Tool Commands"] \
-font font_uibold -anchor center
pack $w.header -side top -fill x
- ${NS}::frame $w.buttons
- ${NS}::button $w.buttons.create -text [mc Remove] \
+ ttk::frame $w.buttons
+ ttk::button $w.buttons.create -text [mc Remove] \
-default active \
-command [cb _remove]
pack $w.buttons.create -side right
- ${NS}::button $w.buttons.cancel -text [mc Cancel] \
+ ttk::button $w.buttons.cancel -text [mc Cancel] \
-command [list destroy $w]
pack $w.buttons.cancel -side right -padx 5
pack $w.buttons -side bottom -fill x -pady 10 -padx 10
- ${NS}::frame $w.list
+ ttk::frame $w.list
set w_names $w.list.l
slistbox $w_names \
-height 10 \
@@ -227,7 +227,7 @@ constructor dialog {} {
}
if {$local_cnt > 0} {
- ${NS}::label $w.colorlbl -foreground blue \
+ ttk::label $w.colorlbl -foreground blue \
-text [mc "(Blue denotes repository-local tools)"]
pack $w.colorlbl -fill x -pady 5 -padx 5
}
@@ -272,7 +272,7 @@ field is_ok 0; # ok to start
field argstr {}; # arguments
constructor dialog {fullname} {
- global M1B use_ttk NS
+ global M1B
set title [get_config "guitool.$fullname.title"]
if {$title eq {}} {
@@ -292,7 +292,7 @@ constructor dialog {fullname} {
set prompt [mc "Run Command: %s" $command]
}
- ${NS}::label $w.header -text $prompt -font font_uibold -anchor center
+ ttk::label $w.header -text $prompt -font font_uibold -anchor center
pack $w.header -side top -fill x
set argprompt [get_config "guitool.$fullname.argprompt"]
@@ -306,10 +306,10 @@ constructor dialog {fullname} {
set argprompt [mc "Arguments"]
}
- ${NS}::labelframe $w.arg -text $argprompt
+ ttk::labelframe $w.arg -text $argprompt
set w_args $w.arg.txt
- ${NS}::entry $w_args \
+ ttk::entry $w_args \
-width 40 \
-textvariable @argstr
pack $w_args -padx 5 -pady 5 -fill both
@@ -330,18 +330,18 @@ constructor dialog {fullname} {
pack $w.rev -anchor nw -fill both -expand 1 -pady 5 -padx 5
}
- ${NS}::frame $w.buttons
+ ttk::frame $w.buttons
if {$is_ask_revs} {
- ${NS}::button $w.buttons.visualize \
+ ttk::button $w.buttons.visualize \
-text [mc Visualize] \
-command [cb _visualize]
pack $w.buttons.visualize -side left
}
- ${NS}::button $w.buttons.ok \
+ ttk::button $w.buttons.ok \
-text [mc OK] \
-command [cb _start]
pack $w.buttons.ok -side right
- ${NS}::button $w.buttons.cancel \
+ ttk::button $w.buttons.cancel \
-text [mc "Cancel"] \
-command [cb _cancel]
pack $w.buttons.cancel -side right -padx 5
diff --git a/git-gui/lib/transport.tcl b/git-gui/lib/transport.tcl
index a1a424aab5..020d09e112 100644
--- a/git-gui/lib/transport.tcl
+++ b/git-gui/lib/transport.tcl
@@ -120,7 +120,7 @@ trace add variable push_remote write \
proc do_push_anywhere {} {
global all_remotes current_branch
global push_urltype push_remote push_url push_thin push_tags
- global push_force use_ttk NS
+ global push_force
set w .push_setup
toplevel $w
@@ -129,22 +129,22 @@ proc do_push_anywhere {} {
wm geometry $w "+[winfo rootx .]+[winfo rooty .]"
pave_toplevel $w
- ${NS}::label $w.header -text [mc "Push Branches"] \
+ ttk::label $w.header -text [mc "Push Branches"] \
-font font_uibold -anchor center
pack $w.header -side top -fill x
- ${NS}::frame $w.buttons
- ${NS}::button $w.buttons.create -text [mc Push] \
+ ttk::frame $w.buttons
+ ttk::button $w.buttons.create -text [mc Push] \
-default active \
-command [list start_push_anywhere_action $w]
pack $w.buttons.create -side right
- ${NS}::button $w.buttons.cancel -text [mc "Cancel"] \
+ ttk::button $w.buttons.cancel -text [mc "Cancel"] \
-default normal \
-command [list destroy $w]
pack $w.buttons.cancel -side right -padx 5
pack $w.buttons -side bottom -fill x -pady 10 -padx 10
- ${NS}::labelframe $w.source -text [mc "Source Branches"]
+ ttk::labelframe $w.source -text [mc "Source Branches"]
slistbox $w.source.l \
-height 10 \
-width 70 \
@@ -159,20 +159,16 @@ proc do_push_anywhere {} {
pack $w.source.l -side left -fill both -expand 1
pack $w.source -fill both -expand 1 -pady 5 -padx 5
- ${NS}::labelframe $w.dest -text [mc "Destination Repository"]
+ ttk::labelframe $w.dest -text [mc "Destination Repository"]
if {$all_remotes ne {}} {
- ${NS}::radiobutton $w.dest.remote_r \
+ ttk::radiobutton $w.dest.remote_r \
-text [mc "Remote:"] \
-value remote \
-variable push_urltype
- if {$use_ttk} {
- ttk::combobox $w.dest.remote_m -state readonly \
- -exportselection false \
- -textvariable push_remote \
- -values $all_remotes
- } else {
- eval tk_optionMenu $w.dest.remote_m push_remote $all_remotes
- }
+ ttk::combobox $w.dest.remote_m -state readonly \
+ -exportselection false \
+ -textvariable push_remote \
+ -values $all_remotes
grid $w.dest.remote_r $w.dest.remote_m -sticky w
if {[lsearch -sorted -exact $all_remotes origin] != -1} {
set push_remote origin
@@ -183,11 +179,11 @@ proc do_push_anywhere {} {
} else {
set push_urltype url
}
- ${NS}::radiobutton $w.dest.url_r \
+ ttk::radiobutton $w.dest.url_r \
-text [mc "Arbitrary Location:"] \
-value url \
-variable push_urltype
- ${NS}::entry $w.dest.url_t \
+ ttk::entry $w.dest.url_t \
-width 50 \
-textvariable push_url \
-validate key \
@@ -202,16 +198,16 @@ proc do_push_anywhere {} {
grid columnconfigure $w.dest 1 -weight 1
pack $w.dest -anchor nw -fill x -pady 5 -padx 5
- ${NS}::labelframe $w.options -text [mc "Transfer Options"]
- ${NS}::checkbutton $w.options.force \
+ ttk::labelframe $w.options -text [mc "Transfer Options"]
+ ttk::checkbutton $w.options.force \
-text [mc "Force overwrite existing branch (may discard changes)"] \
-variable push_force
grid $w.options.force -columnspan 2 -sticky w
- ${NS}::checkbutton $w.options.thin \
+ ttk::checkbutton $w.options.thin \
-text [mc "Use thin pack (for slow network connections)"] \
-variable push_thin
grid $w.options.thin -columnspan 2 -sticky w
- ${NS}::checkbutton $w.options.tags \
+ ttk::checkbutton $w.options.tags \
-text [mc "Include tags"] \
-variable push_tags
grid $w.options.tags -columnspan 2 -sticky w
diff --git a/git-gui/lib/win32.tcl b/git-gui/lib/win32.tcl
index db91ab84a5..3aedae2f13 100644
--- a/git-gui/lib/win32.tcl
+++ b/git-gui/lib/win32.tcl
@@ -2,11 +2,11 @@
# Copyright (C) 2007 Shawn Pearce
proc win32_read_lnk {lnk_path} {
- return [exec cscript.exe \
+ return [safe_exec [list cscript.exe \
/E:jscript \
/nologo \
[file join $::oguilib win32_shortcut.js] \
- $lnk_path]
+ $lnk_path]]
}
proc win32_create_lnk {lnk_path lnk_exec lnk_dir} {
@@ -15,12 +15,13 @@ proc win32_create_lnk {lnk_path lnk_exec lnk_dir} {
set lnk_args [lrange $lnk_exec 1 end]
set lnk_exec [lindex $lnk_exec 0]
- eval [list exec wscript.exe \
+ set cmd [list wscript.exe \
/E:jscript \
/nologo \
[file nativename [file join $oguilib win32_shortcut.js]] \
$lnk_path \
[file nativename [file join $oguilib git-gui.ico]] \
$lnk_dir \
- $lnk_exec] $lnk_args
+ $lnk_exec]
+ safe_exec [concat $cmd $lnk_args]
}
diff --git a/git-gui/po/bg.po b/git-gui/po/bg.po
index 27b05038e4..70ab2b438a 100644
--- a/git-gui/po/bg.po
+++ b/git-gui/po/bg.po
@@ -1,15 +1,15 @@
# Bulgarian translation of git-gui po-file.
-# Copyright (C) 2012, 2013, 2014, 2015, 2016, 2024 Alexander Shopov <ash@kambanaria.org>.
+# Copyright (C) 2012, 2013, 2014, 2015, 2016, 2024, 2025 Alexander Shopov <ash@kambanaria.org>.
# This file is distributed under the same license as the git package.
-# Alexander Shopov <ash@kambanaria.org>, 2012, 2013, 2014, 2015, 2016, 2024.
+# Alexander Shopov <ash@kambanaria.org>, 2012, 2013, 2014, 2015, 2016, 2024, 2025.
#
#
msgid ""
msgstr ""
"Project-Id-Version: git-gui master\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2020-02-08 22:54+0100\n"
-"PO-Revision-Date: 2024-12-22 15:44+0100\n"
+"POT-Creation-Date: 2025-04-20 09:27+0200\n"
+"PO-Revision-Date: 2025-05-29 13:37+0200\n"
"Last-Translator: Alexander Shopov <ash@kambanaria.org>\n"
"Language-Team: Bulgarian <dict@fsa-bg.org>\n"
"Language: bg\n"
@@ -18,33 +18,33 @@ msgstr ""
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
-#: git-gui.sh:847
+#: git-gui.sh:861
#, tcl-format
msgid "Invalid font specified in %s:"
msgstr "Указан е неправилен шрифт в „%s“:"
-#: git-gui.sh:901
+#: git-gui.sh:924
msgid "Main Font"
msgstr "ОÑновен шрифт"
-#: git-gui.sh:902
+#: git-gui.sh:925
msgid "Diff/Console Font"
msgstr "Шрифт за разликите/конзолата"
-#: git-gui.sh:917 git-gui.sh:931 git-gui.sh:944 git-gui.sh:1034 git-gui.sh:1053
-#: git-gui.sh:3212
+#: git-gui.sh:940 git-gui.sh:954 git-gui.sh:967 git-gui.sh:1057 git-gui.sh:1076
+#: git-gui.sh:3217
msgid "git-gui: fatal error"
msgstr "git-gui: фатална грешка"
-#: git-gui.sh:918
+#: git-gui.sh:941
msgid "Cannot find git in PATH."
msgstr "Командата git липÑва в Ð¿ÑŠÑ‚Ñ (PATH)."
-#: git-gui.sh:945
+#: git-gui.sh:968
msgid "Cannot parse Git version string:"
msgstr "Ðизът Ñ Ð²ÐµÑ€ÑиÑта на Git не може да Ñе анализира:"
-#: git-gui.sh:970
+#: git-gui.sh:993
#, tcl-format
msgid ""
"Git version cannot be determined.\n"
@@ -63,23 +63,23 @@ msgstr ""
"\n"
"Да Ñе приеме ли, че „%s“ е верÑÐ¸Ñ â€ž1.5.0“?\n"
-#: git-gui.sh:1267
+#: git-gui.sh:1287
msgid "Git directory not found:"
msgstr "ДиректориÑта на Git не е открита:"
-#: git-gui.sh:1301
+#: git-gui.sh:1317
msgid "Cannot move to top of working directory:"
msgstr "Ðе може да Ñе премине към родителÑката директориÑ."
-#: git-gui.sh:1309
+#: git-gui.sh:1325
msgid "Cannot use bare repository:"
msgstr "Голо хранилище не може да Ñе използва:"
-#: git-gui.sh:1317
+#: git-gui.sh:1333
msgid "No working directory"
msgstr "Работната Ð´Ð¸Ñ€ÐµÐºÑ‚Ð¾Ñ€Ð¸Ñ Ð»Ð¸Ð¿Ñва"
-#: git-gui.sh:1491 lib/checkout_op.tcl:306
+#: git-gui.sh:1507 lib/checkout_op.tcl:306
msgid "Refreshing file status..."
msgstr "ОбновÑване на ÑÑŠÑтоÑнието на файла…"
@@ -87,19 +87,19 @@ msgstr "ОбновÑване на ÑÑŠÑтоÑнието на файла…"
msgid "Scanning for modified files ..."
msgstr "Проверка за променени файлове…"
-#: git-gui.sh:1629
+#: git-gui.sh:1635
msgid "Calling prepare-commit-msg hook..."
msgstr "Куката „prepare-commit-msg“ Ñе изпълнÑва в момента…"
-#: git-gui.sh:1646
+#: git-gui.sh:1652
msgid "Commit declined by prepare-commit-msg hook."
msgstr "Подаването е отхвърлено от куката „prepare-commit-msg“."
-#: git-gui.sh:1804 lib/browser.tcl:252
+#: git-gui.sh:1810 lib/browser.tcl:252
msgid "Ready."
msgstr "Готово."
-#: git-gui.sh:1968
+#: git-gui.sh:1974
#, tcl-format
msgid ""
"Display limit (gui.maxfilesdisplayed = %s) reached, not showing all %s files."
@@ -108,664 +108,811 @@ msgstr ""
"извеждане(gui.maxfilesdisplayed = %s), Ñъответно не Ñа показани вÑички %s "
"файла."
-#: git-gui.sh:2091
+#: git-gui.sh:2097
msgid "Unmodified"
msgstr "Ðепроменен"
-#: git-gui.sh:2093
+#: git-gui.sh:2099
msgid "Modified, not staged"
msgstr "Променен, но не е в индекÑа"
-#: git-gui.sh:2094 git-gui.sh:2106
+#: git-gui.sh:2100 git-gui.sh:2112
msgid "Staged for commit"
msgstr "Ð’ индекÑа за подаване"
-#: git-gui.sh:2095 git-gui.sh:2107
+#: git-gui.sh:2101 git-gui.sh:2113
msgid "Portions staged for commit"
msgstr "ЧаÑти Ñа в индекÑа за подаване"
-#: git-gui.sh:2096 git-gui.sh:2108
+#: git-gui.sh:2102 git-gui.sh:2114
msgid "Staged for commit, missing"
msgstr "Ð’ индекÑа за подаване, но липÑва"
-#: git-gui.sh:2098
+#: git-gui.sh:2104
msgid "File type changed, not staged"
msgstr "Видът на файла е Ñменен, но не е в индекÑа"
-#: git-gui.sh:2099 git-gui.sh:2100
+#: git-gui.sh:2105 git-gui.sh:2106
msgid "File type changed, old type staged for commit"
msgstr "Видът на файла е Ñменен, но новиÑÑ‚ вид не е в индекÑа"
-#: git-gui.sh:2101
+#: git-gui.sh:2107
msgid "File type changed, staged"
msgstr "Видът на файла е Ñменен и е в индекÑа"
-#: git-gui.sh:2102
+#: git-gui.sh:2108
msgid "File type change staged, modification not staged"
msgstr "Видът на файла е Ñменен в индекÑа, но не и Ñъдържанието"
-#: git-gui.sh:2103
+#: git-gui.sh:2109
msgid "File type change staged, file missing"
msgstr "Видът на файла е Ñменен в индекÑа, но файлът липÑва"
-#: git-gui.sh:2105
+#: git-gui.sh:2111
msgid "Untracked, not staged"
msgstr "ÐеÑледен"
-#: git-gui.sh:2110
+#: git-gui.sh:2116
msgid "Missing"
msgstr "ЛипÑващ"
-#: git-gui.sh:2111
+#: git-gui.sh:2117
msgid "Staged for removal"
msgstr "Ð’ индекÑа за изтриване"
-#: git-gui.sh:2112
+#: git-gui.sh:2118
msgid "Staged for removal, still present"
msgstr "Ð’ индекÑа за изтриване, но още го има"
-#: git-gui.sh:2114 git-gui.sh:2115 git-gui.sh:2116 git-gui.sh:2117
-#: git-gui.sh:2118 git-gui.sh:2119
+#: git-gui.sh:2120 git-gui.sh:2121 git-gui.sh:2122 git-gui.sh:2123
+#: git-gui.sh:2124 git-gui.sh:2125
msgid "Requires merge resolution"
msgstr "ИзиÑква коригиране при Ñливане"
-#: git-gui.sh:2164
+#: git-gui.sh:2170
msgid "Couldn't find gitk in PATH"
msgstr "Командата „gitk“ липÑва в пътищата, определени от променливата PATH."
-#: git-gui.sh:2210 git-gui.sh:2245
+#: git-gui.sh:2217 git-gui.sh:2253
#, tcl-format
msgid "Starting %s... please wait..."
msgstr "Стартиране на „%s“…, изчакайте…"
-#: git-gui.sh:2224
+#: git-gui.sh:2232
msgid "Couldn't find git gui in PATH"
msgstr ""
"Командата „git gui“ липÑва в пътищата, определени от променливата PATH."
-#: git-gui.sh:2726 lib/choose_repository.tcl:53
+#: git-gui.sh:2735 lib/choose_repository.tcl:53
msgid "Repository"
msgstr "Хранилище"
-#: git-gui.sh:2727
+#: git-gui.sh:2736
msgid "Edit"
msgstr "Редактиране"
-#: git-gui.sh:2729 lib/choose_rev.tcl:567
+#: git-gui.sh:2738 lib/choose_rev.tcl:567
msgid "Branch"
msgstr "Клон"
-#: git-gui.sh:2732 lib/choose_rev.tcl:554
+#: git-gui.sh:2741 lib/choose_rev.tcl:554
msgid "Commit@@noun"
msgstr "Подаване"
-#: git-gui.sh:2735 lib/merge.tcl:127 lib/merge.tcl:174
+#: git-gui.sh:2744 lib/merge.tcl:127 lib/merge.tcl:174
msgid "Merge"
msgstr "Сливане"
-#: git-gui.sh:2736 lib/choose_rev.tcl:563
+#: git-gui.sh:2745 lib/choose_rev.tcl:563
msgid "Remote"
msgstr "Отдалечено хранилище"
-#: git-gui.sh:2739
+#: git-gui.sh:2748
msgid "Tools"
msgstr "Команди"
-#: git-gui.sh:2748
+#: git-gui.sh:2757
msgid "Explore Working Copy"
msgstr "Разглеждане на работното копие"
-#: git-gui.sh:2763
+#: git-gui.sh:2772
msgid "Git Bash"
msgstr "Bash за Git"
-#: git-gui.sh:2772
+#: git-gui.sh:2781
msgid "Browse Current Branch's Files"
msgstr "Разглеждане на файловете в Ñ‚ÐµÐºÑƒÑ‰Ð¸Ñ ÐºÐ»Ð¾Ð½"
-#: git-gui.sh:2776
+#: git-gui.sh:2785
msgid "Browse Branch Files..."
msgstr "Разглеждане на Ñ‚ÐµÐºÑƒÑ‰Ð¸Ñ ÐºÐ»Ð¾Ð½â€¦"
-#: git-gui.sh:2781
+#: git-gui.sh:2790
msgid "Visualize Current Branch's History"
msgstr "Ð’Ð¸Ð·ÑƒÐ°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð½Ð° иÑториÑта на Ñ‚ÐµÐºÑƒÑ‰Ð¸Ñ ÐºÐ»Ð¾Ð½"
-#: git-gui.sh:2785
+#: git-gui.sh:2794
msgid "Visualize All Branch History"
msgstr "Ð’Ð¸Ð·ÑƒÐ°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð½Ð° иÑториÑта на вÑички клонове"
-#: git-gui.sh:2792
+#: git-gui.sh:2801
#, tcl-format
msgid "Browse %s's Files"
msgstr "Разглеждане на файловете в „%s“"
-#: git-gui.sh:2794
+#: git-gui.sh:2803
#, tcl-format
msgid "Visualize %s's History"
msgstr "Ð’Ð¸Ð·ÑƒÐ°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð½Ð° иÑториÑта на „%s“"
-#: git-gui.sh:2799 lib/database.tcl:40
+#: git-gui.sh:2808 lib/database.tcl:40
msgid "Database Statistics"
msgstr "СтатиÑтика на базата от данни"
-#: git-gui.sh:2802 lib/database.tcl:33
+#: git-gui.sh:2811 lib/database.tcl:33
msgid "Compress Database"
msgstr "КомпреÑиране на базата от данни"
-#: git-gui.sh:2805
+#: git-gui.sh:2814
msgid "Verify Database"
msgstr "Проверка на базата от данни"
-#: git-gui.sh:2812 git-gui.sh:2816 git-gui.sh:2820
+#: git-gui.sh:2821 git-gui.sh:2825 git-gui.sh:2829
msgid "Create Desktop Icon"
msgstr "ДобавÑне на икона на Ñ€Ð°Ð±Ð¾Ñ‚Ð½Ð¸Ñ Ð¿Ð»Ð¾Ñ‚"
-#: git-gui.sh:2828 lib/choose_repository.tcl:209 lib/choose_repository.tcl:217
+#: git-gui.sh:2837 lib/choose_repository.tcl:206 lib/choose_repository.tcl:214
msgid "Quit"
msgstr "Спиране на програмата"
-#: git-gui.sh:2836
+#: git-gui.sh:2845
msgid "Undo"
msgstr "ОтмÑна"
-#: git-gui.sh:2839
+#: git-gui.sh:2848
msgid "Redo"
msgstr "Повторение"
-#: git-gui.sh:2843 git-gui.sh:3461
+#: git-gui.sh:2852 git-gui.sh:3477
msgid "Cut"
msgstr "ОтрÑзване"
-#: git-gui.sh:2846 git-gui.sh:3464 git-gui.sh:3540 git-gui.sh:3633
+#: git-gui.sh:2855 git-gui.sh:3480 git-gui.sh:3556 git-gui.sh:3651
#: lib/console.tcl:69
msgid "Copy"
msgstr "Копиране"
-#: git-gui.sh:2849 git-gui.sh:3467
+#: git-gui.sh:2858 git-gui.sh:3483
msgid "Paste"
msgstr "ПоÑтавÑне"
-#: git-gui.sh:2852 git-gui.sh:3470 lib/remote_branch_delete.tcl:39
-#: lib/branch_delete.tcl:28
+#: git-gui.sh:2861 git-gui.sh:3486 lib/branch_delete.tcl:28
+#: lib/remote_branch_delete.tcl:39
msgid "Delete"
msgstr "Изтриване"
-#: git-gui.sh:2856 git-gui.sh:3474 git-gui.sh:3637 lib/console.tcl:71
+#: git-gui.sh:2865 git-gui.sh:3490 git-gui.sh:3655 lib/console.tcl:71
msgid "Select All"
msgstr "Избиране на вÑичко"
-#: git-gui.sh:2865
+#: git-gui.sh:2874
msgid "Create..."
msgstr "Създаване…"
-#: git-gui.sh:2871
+#: git-gui.sh:2880
msgid "Checkout..."
msgstr "ИзтеглÑне…"
-#: git-gui.sh:2877
+#: git-gui.sh:2886
msgid "Rename..."
msgstr "Преименуване…"
-#: git-gui.sh:2882
+#: git-gui.sh:2891
msgid "Delete..."
msgstr "Изтриване…"
-#: git-gui.sh:2887
+#: git-gui.sh:2896
msgid "Reset..."
msgstr "ОтмÑна на промените…"
-#: git-gui.sh:2897
+#: git-gui.sh:2906
msgid "Done"
msgstr "Готово"
-#: git-gui.sh:2899
+#: git-gui.sh:2908
msgid "Commit@@verb"
msgstr "Подаване"
-#: git-gui.sh:2908 git-gui.sh:3400
+#: git-gui.sh:2917 git-gui.sh:3416
msgid "Amend Last Commit"
msgstr "ПоправÑне на поÑледното подаване"
-#: git-gui.sh:2918 git-gui.sh:3361 lib/remote_branch_delete.tcl:101
+#: git-gui.sh:2927 git-gui.sh:3377 lib/remote_branch_delete.tcl:101
msgid "Rescan"
msgstr "ОбновÑване"
-#: git-gui.sh:2924
+#: git-gui.sh:2933
msgid "Stage To Commit"
msgstr "Към индекÑа за подаване"
-#: git-gui.sh:2930
+#: git-gui.sh:2939
msgid "Stage Changed Files To Commit"
msgstr "Ð’Ñички променени файлове към индекÑа за подаване"
-#: git-gui.sh:2936
+#: git-gui.sh:2945
msgid "Unstage From Commit"
msgstr "Изваждане от индекÑа за подаване"
-#: git-gui.sh:2942 lib/index.tcl:521
+#: git-gui.sh:2951 lib/index.tcl:521
msgid "Revert Changes"
msgstr "Връщане на оригинала"
-#: git-gui.sh:2950 git-gui.sh:3700 git-gui.sh:3731
+#: git-gui.sh:2959 git-gui.sh:3718 git-gui.sh:3749
msgid "Show Less Context"
msgstr "По-малко контекÑÑ‚"
-#: git-gui.sh:2954 git-gui.sh:3704 git-gui.sh:3735
+#: git-gui.sh:2963 git-gui.sh:3722 git-gui.sh:3753
msgid "Show More Context"
msgstr "Повече контекÑÑ‚"
-#: git-gui.sh:2961 git-gui.sh:3374 git-gui.sh:3485
+#: git-gui.sh:2970 git-gui.sh:3390 git-gui.sh:3501
msgid "Sign Off"
msgstr "ПодпиÑване"
-#: git-gui.sh:2977
+#: git-gui.sh:2986
msgid "Local Merge..."
msgstr "Локално Ñливане…"
-#: git-gui.sh:2982
+#: git-gui.sh:2991
msgid "Abort Merge..."
msgstr "ПреуÑтановÑване на Ñливане…"
-#: git-gui.sh:2994 git-gui.sh:3022
+#: git-gui.sh:3003 git-gui.sh:3031
msgid "Add..."
msgstr "ДобавÑне…"
-#: git-gui.sh:2998
+#: git-gui.sh:3007
msgid "Push..."
msgstr "ИзтлаÑкване…"
-#: git-gui.sh:3002
+#: git-gui.sh:3011
msgid "Delete Branch..."
msgstr "Изтриване на клон…"
-#: git-gui.sh:3012 git-gui.sh:3666
+#: git-gui.sh:3021 git-gui.sh:3684
msgid "Options..."
msgstr "Опции…"
-#: git-gui.sh:3023
+#: git-gui.sh:3032
msgid "Remove..."
msgstr "Премахване…"
-#: git-gui.sh:3032 lib/choose_repository.tcl:67
+#: git-gui.sh:3041 lib/choose_repository.tcl:67
msgid "Help"
msgstr "Помощ"
-#: git-gui.sh:3036 git-gui.sh:3040 lib/choose_repository.tcl:61
-#: lib/choose_repository.tcl:70 lib/about.tcl:14
+#: git-gui.sh:3045 git-gui.sh:3049 lib/about.tcl:14
+#: lib/choose_repository.tcl:61 lib/choose_repository.tcl:70
#, tcl-format
msgid "About %s"
msgstr "ОтноÑно „%s“"
-#: git-gui.sh:3064
+#: git-gui.sh:3069
msgid "Online Documentation"
msgstr "Ð”Ð¾ÐºÑƒÐ¼ÐµÐ½Ñ‚Ð°Ñ†Ð¸Ñ Ð² Интернет"
-#: git-gui.sh:3067 lib/choose_repository.tcl:64 lib/choose_repository.tcl:73
+#: git-gui.sh:3072 lib/choose_repository.tcl:64 lib/choose_repository.tcl:73
msgid "Show SSH Key"
msgstr "Показване на ключа за SSH"
-#: git-gui.sh:3097 git-gui.sh:3229
+#: git-gui.sh:3102 git-gui.sh:3234
msgid "usage:"
msgstr "употреба:"
-#: git-gui.sh:3101 git-gui.sh:3233
+#: git-gui.sh:3106 git-gui.sh:3238
msgid "Usage"
msgstr "Употреба"
-#: git-gui.sh:3182 lib/blame.tcl:575
+#: git-gui.sh:3187 lib/blame.tcl:576
msgid "Error"
msgstr "Грешка"
-#: git-gui.sh:3213
+#: git-gui.sh:3218
#, tcl-format
msgid "fatal: cannot stat path %s: No such file or directory"
msgstr "ФÐТÐЛÐРГРЕШКÐ: пътÑÑ‚ „%s“ липÑва: такъв файл или Ð´Ð¸Ñ€ÐµÐºÑ‚Ð¾Ñ€Ð¸Ñ Ð½Ñма"
-#: git-gui.sh:3246
+#: git-gui.sh:3251
msgid "Current Branch:"
msgstr "Текущ клон:"
-#: git-gui.sh:3271
+#: git-gui.sh:3276
msgid "Unstaged Changes"
msgstr "Промени извън индекÑа"
-#: git-gui.sh:3293
+#: git-gui.sh:3298
msgid "Staged Changes (Will Commit)"
msgstr "Промени в индекÑа (за подаване)"
-#: git-gui.sh:3367
+#: git-gui.sh:3383
msgid "Stage Changed"
msgstr "ИндекÑÑŠÑ‚ е променен"
-#: git-gui.sh:3386 lib/transport.tcl:137
+#: git-gui.sh:3402 lib/transport.tcl:137
msgid "Push"
msgstr "ИзтлаÑкване"
-#: git-gui.sh:3413
+#: git-gui.sh:3429
msgid "Initial Commit Message:"
msgstr "Първоначално Ñъобщение при подаване:"
-#: git-gui.sh:3414
+#: git-gui.sh:3430
msgid "Amended Commit Message:"
msgstr "Поправено Ñъобщение при подаване:"
-#: git-gui.sh:3415
+#: git-gui.sh:3431
msgid "Amended Initial Commit Message:"
msgstr "Поправено първоначално Ñъобщение при подаване:"
-#: git-gui.sh:3416
+#: git-gui.sh:3432
msgid "Amended Merge Commit Message:"
msgstr "Поправено Ñъобщение при подаване ÑÑŠÑ Ñливане:"
-#: git-gui.sh:3417
+#: git-gui.sh:3433
msgid "Merge Commit Message:"
msgstr "Съобщение при подаване ÑÑŠÑ Ñливане:"
-#: git-gui.sh:3418
+#: git-gui.sh:3434
msgid "Commit Message:"
msgstr "Съобщение при подаване:"
-#: git-gui.sh:3477 git-gui.sh:3641 lib/console.tcl:73
+#: git-gui.sh:3493 git-gui.sh:3659 lib/console.tcl:73
msgid "Copy All"
msgstr "Копиране на вÑичко"
-#: git-gui.sh:3501 lib/blame.tcl:106
+#: git-gui.sh:3517 lib/blame.tcl:106
msgid "File:"
msgstr "Файл:"
-#: git-gui.sh:3549 lib/choose_repository.tcl:1100
+#: git-gui.sh:3565 lib/choose_repository.tcl:1054
msgid "Open"
msgstr "ОтварÑне"
-#: git-gui.sh:3629
+#: git-gui.sh:3647
msgid "Refresh"
msgstr "ОбновÑване"
-#: git-gui.sh:3650
+#: git-gui.sh:3668
msgid "Decrease Font Size"
msgstr "По-дребен шрифт"
-#: git-gui.sh:3654
+#: git-gui.sh:3672
msgid "Increase Font Size"
msgstr "По-едър шрифт"
-#: git-gui.sh:3662 lib/blame.tcl:296
+#: git-gui.sh:3680 lib/blame.tcl:296
msgid "Encoding"
msgstr "Кодиране"
-#: git-gui.sh:3673
+#: git-gui.sh:3691
msgid "Apply/Reverse Hunk"
msgstr "Прилагане/връщане на парче"
-#: git-gui.sh:3678
+#: git-gui.sh:3696
msgid "Apply/Reverse Line"
msgstr "Прилагане/връщане на ред"
-#: git-gui.sh:3684 git-gui.sh:3794 git-gui.sh:3805
+#: git-gui.sh:3702 git-gui.sh:3812 git-gui.sh:3823
msgid "Revert Hunk"
msgstr "Връщане на парче"
-#: git-gui.sh:3689 git-gui.sh:3801 git-gui.sh:3812
+#: git-gui.sh:3707 git-gui.sh:3819 git-gui.sh:3830
msgid "Revert Line"
msgstr "Връщане на ред"
-#: git-gui.sh:3694 git-gui.sh:3791
+#: git-gui.sh:3712 git-gui.sh:3809
msgid "Undo Last Revert"
msgstr "ОтмÑна на поÑледното връщане"
-#: git-gui.sh:3713
+#: git-gui.sh:3731
msgid "Run Merge Tool"
msgstr "Изпълнение на програмата за Ñливане"
-#: git-gui.sh:3718
+#: git-gui.sh:3736
msgid "Use Remote Version"
msgstr "ВерÑÐ¸Ñ Ð¾Ñ‚ отдалеченото хранилище"
-#: git-gui.sh:3722
+#: git-gui.sh:3740
msgid "Use Local Version"
msgstr "Локална верÑиÑ"
-#: git-gui.sh:3726
+#: git-gui.sh:3744
msgid "Revert To Base"
msgstr "Връщане към родителÑката верÑиÑ"
-#: git-gui.sh:3744
+#: git-gui.sh:3762
msgid "Visualize These Changes In The Submodule"
msgstr "Визуализиране на промените в подмодула"
-#: git-gui.sh:3748
+#: git-gui.sh:3766
msgid "Visualize Current Branch History In The Submodule"
msgstr "Ð’Ð¸Ð·ÑƒÐ°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð½Ð° иÑториÑта на Ñ‚ÐµÐºÑƒÑ‰Ð¸Ñ ÐºÐ»Ð¾Ð½ в иÑториÑта за подмодула"
-#: git-gui.sh:3752
+#: git-gui.sh:3770
msgid "Visualize All Branch History In The Submodule"
msgstr "Ð’Ð¸Ð·ÑƒÐ°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð½Ð° иÑториÑта на вÑички клони в иÑториÑта за подмодула"
-#: git-gui.sh:3757
+#: git-gui.sh:3775
msgid "Start git gui In The Submodule"
msgstr "Стартиране на „git gui“ за подмодула"
-#: git-gui.sh:3793
+#: git-gui.sh:3811
msgid "Unstage Hunk From Commit"
msgstr "Изваждане на парчето от подаването"
-#: git-gui.sh:3797
+#: git-gui.sh:3815
msgid "Unstage Lines From Commit"
msgstr "Изваждане на редовете от подаването"
-#: git-gui.sh:3798 git-gui.sh:3809
+#: git-gui.sh:3816 git-gui.sh:3827
msgid "Revert Lines"
msgstr "Връщане на редовете"
-#: git-gui.sh:3800
+#: git-gui.sh:3818
msgid "Unstage Line From Commit"
msgstr "Изваждане на реда от подаването"
-#: git-gui.sh:3804
+#: git-gui.sh:3822
msgid "Stage Hunk For Commit"
msgstr "ДобавÑне на парчето за подаване"
-#: git-gui.sh:3808
+#: git-gui.sh:3826
msgid "Stage Lines For Commit"
msgstr "ДобавÑне на редовете за подаване"
-#: git-gui.sh:3811
+#: git-gui.sh:3829
msgid "Stage Line For Commit"
msgstr "ДобавÑне на реда за подаване"
-#: git-gui.sh:3861
+#: git-gui.sh:3879
msgid "Initializing..."
msgstr "Инициализиране…"
-#: git-gui.sh:4017
+#: lib/about.tcl:26
+msgid "git-gui - a graphical user interface for Git."
+msgstr "git-gui — графичен Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ð·Ð° Git."
+
+#: lib/blame.tcl:74
#, tcl-format
-msgid ""
-"Possible environment issues exist.\n"
-"\n"
-"The following environment variables are probably\n"
-"going to be ignored by any Git subprocess run\n"
-"by %s:\n"
-"\n"
-msgstr ""
-"Възможно е да има проблем ÑÑŠÑ Ñредата.\n"
-"\n"
-"Ðай-вероÑтно Ñледните променливи нÑма да Ñе\n"
-"вземат под внимание от подпроцеÑите на Git\n"
-"от %s:\n"
-"\n"
+msgid "%s (%s): File Viewer"
+msgstr "%s (%s): Преглед на файлове"
-#: git-gui.sh:4046
-msgid ""
-"\n"
-"This is due to a known issue with the\n"
-"Tcl binary distributed by Cygwin."
-msgstr ""
-"\n"
-"Това е познат проблем и Ñе дължи на\n"
-"верÑиÑта на Tcl включена в Cygwin."
+#: lib/blame.tcl:80
+msgid "Commit:"
+msgstr "Подаване:"
-#: git-gui.sh:4051
+#: lib/blame.tcl:282
+msgid "Copy Commit"
+msgstr "Копиране на подаване"
+
+#: lib/blame.tcl:286
+msgid "Find Text..."
+msgstr "ТърÑене на текÑт…"
+
+#: lib/blame.tcl:290
+msgid "Goto Line..."
+msgstr "Към ред…"
+
+#: lib/blame.tcl:299
+msgid "Do Full Copy Detection"
+msgstr "Пълно търÑене на копиране"
+
+#: lib/blame.tcl:303
+msgid "Show History Context"
+msgstr "Показване на контекÑта от иÑториÑта"
+
+#: lib/blame.tcl:306
+msgid "Blame Parent Commit"
+msgstr "Ðнотиране на родителÑкото подаване"
+
+#: lib/blame.tcl:469
#, tcl-format
-msgid ""
-"\n"
-"\n"
-"A good replacement for %s\n"
-"is placing values for the user.name and\n"
-"user.email settings into your personal\n"
-"~/.gitconfig file.\n"
-msgstr ""
-"\n"
-"\n"
-"Добър замеÑтител на „%s“\n"
-"е да поÑтавите наÑтройките „user.name“ и\n"
-"„user.email“ в Ð»Ð¸Ñ‡Ð½Ð¸Ñ Ñи файл „~/.gitconfig“.\n"
+msgid "Reading %s..."
+msgstr "Чете Ñе „%s“…"
-#: lib/spellcheck.tcl:57
-msgid "Unsupported spell checker"
-msgstr "Тази програма за проверка на правопиÑа не Ñе поддържа"
+#: lib/blame.tcl:597
+msgid "Loading copy/move tracking annotations..."
+msgstr "Зареждане на анотациите за проÑледÑване на копирането/премеÑтването…"
-#: lib/spellcheck.tcl:65
-msgid "Spell checking is unavailable"
-msgstr "ЛипÑва програма за проверка на правопиÑа"
+#: lib/blame.tcl:614
+msgid "lines annotated"
+msgstr "реда анотирани"
-#: lib/spellcheck.tcl:68
-msgid "Invalid spell checking configuration"
-msgstr "Ðеправилни наÑтройки на проверката на правопиÑа"
+#: lib/blame.tcl:816
+msgid "Loading original location annotations..."
+msgstr "Зареждане на анотациите за първоначалното меÑтоположение…"
-#: lib/spellcheck.tcl:70
+#: lib/blame.tcl:819
+msgid "Annotation complete."
+msgstr "Ðнотирането завърши."
+
+#: lib/blame.tcl:850
+msgid "Busy"
+msgstr "ОперациÑта не е завършила"
+
+#: lib/blame.tcl:851
+msgid "Annotation process is already running."
+msgstr "Ð’ момента тече Ð¿Ñ€Ð¾Ñ†ÐµÑ Ð½Ð° анотиране."
+
+#: lib/blame.tcl:890
+msgid "Running thorough copy detection..."
+msgstr "ИзпълнÑва Ñе цÑлоÑтен Ð¿Ñ€Ð¾Ñ†ÐµÑ Ð½Ð° откриване на копиране…"
+
+#: lib/blame.tcl:958
+msgid "Loading annotation..."
+msgstr "Зареждане на анотации…"
+
+#: lib/blame.tcl:1011
+msgid "Author:"
+msgstr "Ðвтор:"
+
+#: lib/blame.tcl:1015
+msgid "Committer:"
+msgstr "Подал:"
+
+#: lib/blame.tcl:1020
+msgid "Original File:"
+msgstr "Първоначален файл:"
+
+#: lib/blame.tcl:1068
+msgid "Cannot find HEAD commit:"
+msgstr "Подаването за връх „HEAD“ не може да Ñе открие:"
+
+#: lib/blame.tcl:1123
+msgid "Cannot find parent commit:"
+msgstr "РодителÑкото подаване не може да Ñе открие"
+
+#: lib/blame.tcl:1138
+msgid "Unable to display parent"
+msgstr "РодителÑÑ‚ не може да Ñе покаже"
+
+#: lib/blame.tcl:1139 lib/diff.tcl:334
+msgid "Error loading diff:"
+msgstr "Грешка при зареждане на разлика:"
+
+#: lib/blame.tcl:1280
+msgid "Originally By:"
+msgstr "Първоначално от:"
+
+#: lib/blame.tcl:1286
+msgid "In File:"
+msgstr "Във файл:"
+
+#: lib/blame.tcl:1291
+msgid "Copied Or Moved Here By:"
+msgstr "Копирано или премеÑтено тук от:"
+
+#: lib/branch_checkout.tcl:16
#, tcl-format
-msgid "Reverting dictionary to %s."
-msgstr "Ползване на речник за език „%s“."
+msgid "%s (%s): Checkout Branch"
+msgstr "%s (%s): Клон за изтеглÑне"
-#: lib/spellcheck.tcl:73
-msgid "Spell checker silently failed on startup"
-msgstr "Програмата за Ð¿Ñ€Ð°Ð²Ð¾Ð¿Ð¸Ñ Ð´Ð°Ð¶Ðµ не Ñтартира уÑпешно."
+#: lib/branch_checkout.tcl:21
+msgid "Checkout Branch"
+msgstr "Клон за изтеглÑне"
-#: lib/spellcheck.tcl:80
-msgid "Unrecognized spell checker"
-msgstr "Ðепозната програма за проверка на правопиÑа"
+#: lib/branch_checkout.tcl:26
+msgid "Checkout"
+msgstr "ИзтеглÑне"
-#: lib/spellcheck.tcl:186
-msgid "No Suggestions"
-msgstr "ÐÑма предложениÑ"
+#: lib/branch_checkout.tcl:30 lib/branch_create.tcl:37 lib/branch_delete.tcl:34
+#: lib/branch_rename.tcl:32 lib/browser.tcl:292 lib/checkout_op.tcl:580
+#: lib/choose_font.tcl:45 lib/merge.tcl:178 lib/option.tcl:127
+#: lib/remote_add.tcl:34 lib/remote_branch_delete.tcl:43 lib/tools_dlg.tcl:41
+#: lib/tools_dlg.tcl:202 lib/tools_dlg.tcl:345 lib/transport.tcl:141
+msgid "Cancel"
+msgstr "Отказване"
-#: lib/spellcheck.tcl:388
-msgid "Unexpected EOF from spell checker"
-msgstr "Ðеочакван край на файл от програмата за проверка на правопиÑа"
+#: lib/branch_checkout.tcl:35 lib/browser.tcl:297 lib/tools_dlg.tcl:321
+msgid "Revision"
+msgstr "ВерÑиÑ"
-#: lib/spellcheck.tcl:392
-msgid "Spell Checker Failed"
-msgstr "Грешка в програмата за проверка на правопиÑа"
+#: lib/branch_checkout.tcl:39 lib/branch_create.tcl:69 lib/option.tcl:310
+msgid "Options"
+msgstr "Опции"
+
+#: lib/branch_checkout.tcl:42 lib/branch_create.tcl:92
+msgid "Fetch Tracking Branch"
+msgstr "ИзтеглÑне на промените от ÑÐ»ÐµÐ´ÐµÐ½Ð¸Ñ ÐºÐ»Ð¾Ð½"
+
+#: lib/branch_checkout.tcl:47
+msgid "Detach From Local Branch"
+msgstr "Изтриване от Ð»Ð¾ÐºÐ°Ð»Ð½Ð¸Ñ ÐºÐ»Ð¾Ð½"
-#: lib/transport.tcl:6 lib/remote_add.tcl:132
+#: lib/branch_create.tcl:23
#, tcl-format
-msgid "fetch %s"
-msgstr "доÑтавÑне на „%s“"
+msgid "%s (%s): Create Branch"
+msgstr "%s (%s): Създаване на клон"
-#: lib/transport.tcl:7
+#: lib/branch_create.tcl:28
+msgid "Create New Branch"
+msgstr "Създаване на нов клон"
+
+#: lib/branch_create.tcl:33 lib/choose_repository.tcl:386
+msgid "Create"
+msgstr "Създаване"
+
+#: lib/branch_create.tcl:42
+msgid "Branch Name"
+msgstr "Име на клона"
+
+#: lib/branch_create.tcl:44 lib/remote_add.tcl:41 lib/tools_dlg.tcl:51
+msgid "Name:"
+msgstr "Име:"
+
+#: lib/branch_create.tcl:57
+msgid "Match Tracking Branch Name"
+msgstr "Съвпадане по името на ÑÐ»ÐµÐ´ÐµÐ½Ð¸Ñ ÐºÐ»Ð¾Ð½"
+
+#: lib/branch_create.tcl:66
+msgid "Starting Revision"
+msgstr "Ðачална верÑиÑ"
+
+#: lib/branch_create.tcl:72
+msgid "Update Existing Branch:"
+msgstr "ОбновÑване на ÑъщеÑтвуващ клон:"
+
+#: lib/branch_create.tcl:75
+msgid "No"
+msgstr "Ðе"
+
+#: lib/branch_create.tcl:80
+msgid "Fast Forward Only"
+msgstr "Само тривиално превъртащо Ñливане"
+
+#: lib/branch_create.tcl:85 lib/checkout_op.tcl:572
+msgid "Reset"
+msgstr "Отначало"
+
+#: lib/branch_create.tcl:97
+msgid "Checkout After Creation"
+msgstr "Преминаване към клона Ñлед Ñъздаването му"
+
+#: lib/branch_create.tcl:132
+msgid "Please select a tracking branch."
+msgstr "Изберете клон за Ñледени."
+
+#: lib/branch_create.tcl:141
#, tcl-format
-msgid "Fetching new changes from %s"
-msgstr "ДоÑтавÑне на промените от „%s“"
+msgid "Tracking branch %s is not a branch in the remote repository."
+msgstr "СледÑщиÑÑ‚ клон — „%s“, не ÑъщеÑтвува в отдалеченото хранилище."
-#: lib/transport.tcl:18
+#: lib/branch_create.tcl:154 lib/branch_rename.tcl:92
+msgid "Please supply a branch name."
+msgstr "Дайте име на клона."
+
+#: lib/branch_create.tcl:165 lib/branch_rename.tcl:112
#, tcl-format
-msgid "remote prune %s"
-msgstr "окаÑтрÑне на ÑледÑщите клони към „%s“"
+msgid "'%s' is not an acceptable branch name."
+msgstr "„%s“ не може да Ñе използва за име на клон."
-#: lib/transport.tcl:19
+#: lib/branch_delete.tcl:16
#, tcl-format
-msgid "Pruning tracking branches deleted from %s"
-msgstr "ОкаÑтрÑне на ÑледÑщите клони на изтритите клони от „%s“"
+msgid "%s (%s): Delete Branch"
+msgstr "%s (%s): Изтриване на клон"
-#: lib/transport.tcl:25
-msgid "fetch all remotes"
-msgstr "доÑтавÑне от вÑички отдалечени"
+#: lib/branch_delete.tcl:21
+msgid "Delete Local Branch"
+msgstr "Изтриване на локален клон"
-#: lib/transport.tcl:26
-msgid "Fetching new changes from all remotes"
-msgstr "ДоÑтавÑне на промените от вÑички отдалечени хранилища"
+#: lib/branch_delete.tcl:39
+msgid "Local Branches"
+msgstr "Локални клони"
-#: lib/transport.tcl:40
-msgid "remote prune all remotes"
-msgstr "окаÑтрÑне на ÑледÑщите изтрити"
+#: lib/branch_delete.tcl:51
+msgid "Delete Only If Merged Into"
+msgstr "Изтриване, Ñамо ако промените Ñа Ñлети и другаде"
-#: lib/transport.tcl:41
-msgid "Pruning tracking branches deleted from all remotes"
-msgstr ""
-"ОкаÑтрÑне на ÑледÑщите клони на изтритите клони от вÑички отдалечени "
-"хранилища"
+#: lib/branch_delete.tcl:53 lib/remote_branch_delete.tcl:120
+msgid "Always (Do not perform merge checks)"
+msgstr "Винаги (без проверка за Ñливане)"
-#: lib/transport.tcl:54 lib/transport.tcl:92 lib/transport.tcl:110
-#: lib/remote_add.tcl:162
+#: lib/branch_delete.tcl:103
#, tcl-format
-msgid "push %s"
-msgstr "изтлаÑкване на „%s“"
+msgid "The following branches are not completely merged into %s:"
+msgstr "Ðе вÑички промени в клоните Ñа Ñлети в „%s“:"
-#: lib/transport.tcl:55
+#: lib/branch_delete.tcl:115 lib/remote_branch_delete.tcl:218
+msgid ""
+"Recovering deleted branches is difficult.\n"
+"\n"
+"Delete the selected branches?"
+msgstr ""
+"ВъзÑтановÑването на изтрити клони може да е трудно.\n"
+"\n"
+"Сигурни ли Ñте, че иÑкате да триете?"
+
+#: lib/branch_delete.tcl:131
#, tcl-format
-msgid "Pushing changes to %s"
-msgstr "ИзтлаÑкване на промените към „%s“"
+msgid " - %s:"
+msgstr " — „%s:“"
-#: lib/transport.tcl:93
+#: lib/branch_delete.tcl:141
#, tcl-format
-msgid "Mirroring to %s"
-msgstr "ИзтлаÑкване на вÑичко към „%s“"
+msgid ""
+"Failed to delete branches:\n"
+"%s"
+msgstr ""
+"ÐеуÑпешно триене на клони:\n"
+"%s"
-#: lib/transport.tcl:111
+#: lib/branch_rename.tcl:15
#, tcl-format
-msgid "Pushing %s %s to %s"
-msgstr "ИзтлаÑкване на %s „%s“ към „%s“"
+msgid "%s (%s): Rename Branch"
+msgstr "%s (%s): Преименуване на клон"
-#: lib/transport.tcl:132
-msgid "Push Branches"
-msgstr "Клони за изтлаÑкване"
+#: lib/branch_rename.tcl:23
+msgid "Rename Branch"
+msgstr "Преименуване на клон"
-#: lib/transport.tcl:141 lib/checkout_op.tcl:580 lib/remote_add.tcl:34
-#: lib/browser.tcl:292 lib/branch_checkout.tcl:30 lib/branch_rename.tcl:32
-#: lib/choose_font.tcl:45 lib/option.tcl:127 lib/tools_dlg.tcl:41
-#: lib/tools_dlg.tcl:202 lib/tools_dlg.tcl:345 lib/remote_branch_delete.tcl:43
-#: lib/branch_create.tcl:37 lib/branch_delete.tcl:34 lib/merge.tcl:178
-msgid "Cancel"
-msgstr "Отказване"
+#: lib/branch_rename.tcl:28
+msgid "Rename"
+msgstr "Преименуване"
-#: lib/transport.tcl:147
-msgid "Source Branches"
-msgstr "Клони-източници"
+#: lib/branch_rename.tcl:38
+msgid "Branch:"
+msgstr "Клон:"
-#: lib/transport.tcl:162
-msgid "Destination Repository"
-msgstr "Целево хранилище"
+#: lib/branch_rename.tcl:46
+msgid "New Name:"
+msgstr "Ðово име:"
-#: lib/transport.tcl:165 lib/remote_branch_delete.tcl:51
-msgid "Remote:"
-msgstr "Отдалечено хранилище:"
+#: lib/branch_rename.tcl:81
+msgid "Please select a branch to rename."
+msgstr "Изберете клон за преименуване."
-#: lib/transport.tcl:187 lib/remote_branch_delete.tcl:72
-msgid "Arbitrary Location:"
-msgstr "Произволно меÑтоположение:"
+#: lib/branch_rename.tcl:102 lib/checkout_op.tcl:202
+#, tcl-format
+msgid "Branch '%s' already exists."
+msgstr "Клонът „%s“ вече ÑъщеÑтвува."
-#: lib/transport.tcl:205
-msgid "Transfer Options"
-msgstr "ÐаÑтройки при пренаÑÑнето"
+#: lib/branch_rename.tcl:123
+#, tcl-format
+msgid "Failed to rename '%s'."
+msgstr "ÐеуÑпешно преименуване на „%s“."
-#: lib/transport.tcl:207
-msgid "Force overwrite existing branch (may discard changes)"
-msgstr ""
-"Изрично презапиÑване на ÑъщеÑтвуващ клон (нÑкои промени може да Ñе загубÑÑ‚)"
+#: lib/browser.tcl:17
+msgid "Starting..."
+msgstr "Стартиране…"
-#: lib/transport.tcl:211
-msgid "Use thin pack (for slow network connections)"
-msgstr "МакÑимална компреÑÐ¸Ñ (за бавни мрежови връзки)"
+#: lib/browser.tcl:27
+#, tcl-format
+msgid "%s (%s): File Browser"
+msgstr "%s (%s): Файлов браузър"
-#: lib/transport.tcl:215
-msgid "Include tags"
-msgstr "Включване на етикетите"
+#: lib/browser.tcl:132 lib/browser.tcl:149
+#, tcl-format
+msgid "Loading %s..."
+msgstr "Зареждане на „%s“…"
-#: lib/transport.tcl:229
+#: lib/browser.tcl:193
+msgid "[Up To Parent]"
+msgstr "[Към родителÑ]"
+
+#: lib/browser.tcl:275
#, tcl-format
-msgid "%s (%s): Push"
-msgstr "%s (%s): ИзтлаÑкване"
+msgid "%s (%s): Browse Branch Files"
+msgstr "%s (%s): Разглеждане на файловете в клона"
+
+#: lib/browser.tcl:282
+msgid "Browse Branch Files"
+msgstr "Разглеждане на файловете в клона"
+
+#: lib/browser.tcl:288 lib/choose_repository.tcl:401
+#: lib/choose_repository.tcl:488 lib/choose_repository.tcl:497
+#: lib/choose_repository.tcl:1069
+msgid "Browse"
+msgstr "Разглеждане"
#: lib/checkout_op.tcl:85
#, tcl-format
@@ -777,8 +924,8 @@ msgstr "ДоÑтавÑне на „%s“ от „%s“"
msgid "fatal: Cannot resolve %s"
msgstr "фатална грешка: „%s“ не може да Ñе открие"
-#: lib/checkout_op.tcl:146 lib/sshkey.tcl:58 lib/console.tcl:81
-#: lib/database.tcl:30
+#: lib/checkout_op.tcl:146 lib/console.tcl:81 lib/database.tcl:30
+#: lib/sshkey.tcl:58
msgid "Close"
msgstr "ЗатварÑне"
@@ -792,11 +939,6 @@ msgstr "Клонът „%s“ не ÑъщеÑтвува."
msgid "Failed to configure simplified git-pull for '%s'."
msgstr "ÐеуÑпешно наÑтройване на опроÑтен git-pull за „%s“."
-#: lib/checkout_op.tcl:202 lib/branch_rename.tcl:102
-#, tcl-format
-msgid "Branch '%s' already exists."
-msgstr "Клонът „%s“ вече ÑъщеÑтвува."
-
#: lib/checkout_op.tcl:229
#, tcl-format
msgid ""
@@ -896,14 +1038,10 @@ msgstr "ВъзÑтановÑването на загубените подаваÐ
msgid "Reset '%s'?"
msgstr "ЗанулÑване на „%s“?"
-#: lib/checkout_op.tcl:568 lib/tools_dlg.tcl:336 lib/merge.tcl:170
+#: lib/checkout_op.tcl:568 lib/merge.tcl:170 lib/tools_dlg.tcl:336
msgid "Visualize"
msgstr "ВизуализациÑ"
-#: lib/checkout_op.tcl:572 lib/branch_create.tcl:85
-msgid "Reset"
-msgstr "Отначало"
-
#: lib/checkout_op.tcl:636
#, tcl-format
msgid ""
@@ -922,327 +1060,6 @@ msgstr ""
"Това ÑÑŠÑтоÑние е аварийно и не трÑбва да Ñе Ñлучва. Програмата „%s“ ще "
"преуÑтанови работа."
-#: lib/remote_add.tcl:20
-#, tcl-format
-msgid "%s (%s): Add Remote"
-msgstr "%s (%s): ДобавÑне на отдалечено хранилище"
-
-#: lib/remote_add.tcl:25
-msgid "Add New Remote"
-msgstr "ДобавÑне на отдалечено хранилище"
-
-#: lib/remote_add.tcl:30 lib/tools_dlg.tcl:37
-msgid "Add"
-msgstr "ДобавÑне"
-
-#: lib/remote_add.tcl:39
-msgid "Remote Details"
-msgstr "Данни за отдалеченото хранилище"
-
-#: lib/remote_add.tcl:41 lib/tools_dlg.tcl:51 lib/branch_create.tcl:44
-msgid "Name:"
-msgstr "Име:"
-
-#: lib/remote_add.tcl:50
-msgid "Location:"
-msgstr "МеÑтоположение:"
-
-#: lib/remote_add.tcl:60
-msgid "Further Action"
-msgstr "Следващо дейÑтвие"
-
-#: lib/remote_add.tcl:63
-msgid "Fetch Immediately"
-msgstr "Ðезабавно доÑтавÑне"
-
-#: lib/remote_add.tcl:69
-msgid "Initialize Remote Repository and Push"
-msgstr "Инициализиране на отдалеченото хранилище и изтлаÑкване на промените"
-
-#: lib/remote_add.tcl:75
-msgid "Do Nothing Else Now"
-msgstr "Да не Ñе прави нищо"
-
-#: lib/remote_add.tcl:100
-msgid "Please supply a remote name."
-msgstr "Задайте име за отдалеченото хранилище."
-
-#: lib/remote_add.tcl:113
-#, tcl-format
-msgid "'%s' is not an acceptable remote name."
-msgstr "Отдалечено хранилище не може да Ñе казва „%s“."
-
-#: lib/remote_add.tcl:124
-#, tcl-format
-msgid "Failed to add remote '%s' of location '%s'."
-msgstr "ÐеуÑпешно добавÑне на отдалеченото хранилище „%s“ от Ð°Ð´Ñ€ÐµÑ â€ž%s“."
-
-#: lib/remote_add.tcl:133
-#, tcl-format
-msgid "Fetching the %s"
-msgstr "ДоÑтавÑне на „%s“"
-
-#: lib/remote_add.tcl:156
-#, tcl-format
-msgid "Do not know how to initialize repository at location '%s'."
-msgstr "Хранилището Ñ Ð¼ÐµÑтоположение „%s“ не може да Ñе инициализира."
-
-#: lib/remote_add.tcl:163
-#, tcl-format
-msgid "Setting up the %s (at %s)"
-msgstr "ДобавÑне на хранилище „%s“ (Ñ Ð°Ð´Ñ€ÐµÑ â€ž%s“)"
-
-#: lib/browser.tcl:17
-msgid "Starting..."
-msgstr "Стартиране…"
-
-#: lib/browser.tcl:27
-#, tcl-format
-msgid "%s (%s): File Browser"
-msgstr "%s (%s): Файлов браузър"
-
-#: lib/browser.tcl:132 lib/browser.tcl:149
-#, tcl-format
-msgid "Loading %s..."
-msgstr "Зареждане на „%s“…"
-
-#: lib/browser.tcl:193
-msgid "[Up To Parent]"
-msgstr "[Към родителÑ]"
-
-#: lib/browser.tcl:275
-#, tcl-format
-msgid "%s (%s): Browse Branch Files"
-msgstr "%s (%s): Разглеждане на файловете в клона"
-
-#: lib/browser.tcl:282
-msgid "Browse Branch Files"
-msgstr "Разглеждане на файловете в клона"
-
-#: lib/browser.tcl:288 lib/choose_repository.tcl:437
-#: lib/choose_repository.tcl:524 lib/choose_repository.tcl:533
-#: lib/choose_repository.tcl:1115
-msgid "Browse"
-msgstr "Разглеждане"
-
-#: lib/browser.tcl:297 lib/branch_checkout.tcl:35 lib/tools_dlg.tcl:321
-msgid "Revision"
-msgstr "ВерÑиÑ"
-
-#: lib/index.tcl:6
-msgid "Unable to unlock the index."
-msgstr "ИндекÑÑŠÑ‚ не може да Ñе отключи."
-
-#: lib/index.tcl:30
-msgid "Index Error"
-msgstr "Грешка в индекÑа"
-
-#: lib/index.tcl:32
-msgid ""
-"Updating the Git index failed. A rescan will be automatically started to "
-"resynchronize git-gui."
-msgstr ""
-"ÐеуÑпешно обновÑване на индекÑа на Git. Ðвтоматично ще започне нова проверка "
-"за Ñинхронизирането на git-gui."
-
-#: lib/index.tcl:43
-msgid "Continue"
-msgstr "Продължаване"
-
-#: lib/index.tcl:46
-msgid "Unlock Index"
-msgstr "Отключване на индекÑа"
-
-#: lib/index.tcl:77 lib/index.tcl:146 lib/index.tcl:220 lib/index.tcl:587
-#: lib/choose_repository.tcl:999
-msgid "files"
-msgstr "файлове"
-
-#: lib/index.tcl:326
-msgid "Unstaging selected files from commit"
-msgstr "Изваждане на избраните файлове от подаването"
-
-#: lib/index.tcl:330
-#, tcl-format
-msgid "Unstaging %s from commit"
-msgstr "Изваждане на „%s“ от подаването"
-
-#: lib/index.tcl:369
-msgid "Ready to commit."
-msgstr "ГотовноÑÑ‚ за подаване."
-
-#: lib/index.tcl:378
-msgid "Adding selected files"
-msgstr "ДобавÑне на избраните файлове"
-
-#: lib/index.tcl:382
-#, tcl-format
-msgid "Adding %s"
-msgstr "ДобавÑне на „%s“"
-
-#: lib/index.tcl:412
-#, tcl-format
-msgid "Stage %d untracked files?"
-msgstr "Да Ñе добавÑÑ‚ ли %d неÑледени файла към индекÑа?"
-
-#: lib/index.tcl:420
-msgid "Adding all changed files"
-msgstr "ДобавÑне на вÑички променени файлове"
-
-#: lib/index.tcl:503
-#, tcl-format
-msgid "Revert changes in file %s?"
-msgstr "Да Ñе махнат ли промените във файла „%s“?"
-
-#: lib/index.tcl:508
-#, tcl-format
-msgid "Revert changes in these %i files?"
-msgstr "Да Ñе махнат ли промените в тези %i файла?"
-
-#: lib/index.tcl:517
-msgid "Any unstaged changes will be permanently lost by the revert."
-msgstr ""
-"Ð’Ñички промени, които не Ñа били добавени в индекÑа, ще Ñе загубÑÑ‚ "
-"безвъзвратно."
-
-#: lib/index.tcl:520 lib/index.tcl:563
-msgid "Do Nothing"
-msgstr "Ðищо да не Ñе прави"
-
-#: lib/index.tcl:545
-#, tcl-format
-msgid "Delete untracked file %s?"
-msgstr "Да Ñе изтрие ли неÑледениÑÑ‚ файл „%s“?"
-
-#: lib/index.tcl:550
-#, tcl-format
-msgid "Delete these %i untracked files?"
-msgstr "Да Ñе изтриÑÑ‚ ли тези %d неÑледени файла?"
-
-#: lib/index.tcl:560
-msgid "Files will be permanently deleted."
-msgstr "Файловете ще Ñе изтриÑÑ‚ окончателно."
-
-#: lib/index.tcl:564
-msgid "Delete Files"
-msgstr "Изтриване на файлове"
-
-#: lib/index.tcl:586
-msgid "Deleting"
-msgstr "Изтриване"
-
-#: lib/index.tcl:665
-msgid "Encountered errors deleting files:\n"
-msgstr "Грешки при изтриване на файловете:\n"
-
-#: lib/index.tcl:674
-#, tcl-format
-msgid "None of the %d selected files could be deleted."
-msgstr "Ðикой от избраните %d файла не бе изтрит."
-
-#: lib/index.tcl:679
-#, tcl-format
-msgid "%d of the %d selected files could not be deleted."
-msgstr "%d от избраните %d файла не бÑха изтрити."
-
-#: lib/index.tcl:726
-msgid "Reverting selected files"
-msgstr "Махане на промените в избраните файлове"
-
-#: lib/index.tcl:730
-#, tcl-format
-msgid "Reverting %s"
-msgstr "Махане на промените в „%s“"
-
-#: lib/branch_checkout.tcl:16
-#, tcl-format
-msgid "%s (%s): Checkout Branch"
-msgstr "%s (%s): Клон за изтеглÑне"
-
-#: lib/branch_checkout.tcl:21
-msgid "Checkout Branch"
-msgstr "Клон за изтеглÑне"
-
-#: lib/branch_checkout.tcl:26
-msgid "Checkout"
-msgstr "ИзтеглÑне"
-
-#: lib/branch_checkout.tcl:39 lib/option.tcl:310 lib/branch_create.tcl:69
-msgid "Options"
-msgstr "Опции"
-
-#: lib/branch_checkout.tcl:42 lib/branch_create.tcl:92
-msgid "Fetch Tracking Branch"
-msgstr "ИзтеглÑне на промените от ÑÐ»ÐµÐ´ÐµÐ½Ð¸Ñ ÐºÐ»Ð¾Ð½"
-
-#: lib/branch_checkout.tcl:47
-msgid "Detach From Local Branch"
-msgstr "Изтриване от Ð»Ð¾ÐºÐ°Ð»Ð½Ð¸Ñ ÐºÐ»Ð¾Ð½"
-
-#: lib/status_bar.tcl:263
-#, tcl-format
-msgid "%s ... %*i of %*i %s (%3i%%)"
-msgstr "%s… %*i от общо %*i %s (%3i%%)"
-
-#: lib/remote.tcl:200
-msgid "Push to"
-msgstr "ИзтлаÑкване към"
-
-#: lib/remote.tcl:218
-msgid "Remove Remote"
-msgstr "Премахване на отдалечено хранилище"
-
-#: lib/remote.tcl:223
-msgid "Prune from"
-msgstr "ОкаÑтрÑне от"
-
-#: lib/remote.tcl:228
-msgid "Fetch from"
-msgstr "ДоÑтавÑне от"
-
-#: lib/remote.tcl:249 lib/remote.tcl:253 lib/remote.tcl:258 lib/remote.tcl:264
-msgid "All"
-msgstr "Ð’Ñички"
-
-#: lib/branch_rename.tcl:15
-#, tcl-format
-msgid "%s (%s): Rename Branch"
-msgstr "%s (%s): Преименуване на клон"
-
-#: lib/branch_rename.tcl:23
-msgid "Rename Branch"
-msgstr "Преименуване на клон"
-
-#: lib/branch_rename.tcl:28
-msgid "Rename"
-msgstr "Преименуване"
-
-#: lib/branch_rename.tcl:38
-msgid "Branch:"
-msgstr "Клон:"
-
-#: lib/branch_rename.tcl:46
-msgid "New Name:"
-msgstr "Ðово име:"
-
-#: lib/branch_rename.tcl:81
-msgid "Please select a branch to rename."
-msgstr "Изберете клон за преименуване."
-
-#: lib/branch_rename.tcl:92 lib/branch_create.tcl:154
-msgid "Please supply a branch name."
-msgstr "Дайте име на клона."
-
-#: lib/branch_rename.tcl:112 lib/branch_create.tcl:165
-#, tcl-format
-msgid "'%s' is not an acceptable branch name."
-msgstr "„%s“ не може да Ñе използва за име на клон."
-
-#: lib/branch_rename.tcl:123
-#, tcl-format
-msgid "Failed to rename '%s'."
-msgstr "ÐеуÑпешно преименуване на „%s“."
-
#: lib/choose_font.tcl:41
msgid "Select"
msgstr "Избор"
@@ -1267,518 +1084,11 @@ msgstr ""
"Това е примерен текÑÑ‚.\n"
"Ðко ви хареÑва как изглежда, изберете шрифта."
-#: lib/option.tcl:11
-#, tcl-format
-msgid "Invalid global encoding '%s'"
-msgstr "Ðеправилно глобално кодиране „%s“"
-
-#: lib/option.tcl:19
-#, tcl-format
-msgid "Invalid repo encoding '%s'"
-msgstr "Ðеправилно кодиране „%s“ на хранилището"
-
-#: lib/option.tcl:119
-msgid "Restore Defaults"
-msgstr "Стандартни наÑтройки"
-
-#: lib/option.tcl:123
-msgid "Save"
-msgstr "Запазване"
-
-#: lib/option.tcl:133
-#, tcl-format
-msgid "%s Repository"
-msgstr "Хранилище „%s“"
-
-#: lib/option.tcl:134
-msgid "Global (All Repositories)"
-msgstr "Глобално (за вÑички хранилища)"
-
-#: lib/option.tcl:140
-msgid "User Name"
-msgstr "ПотребителÑко име"
-
-#: lib/option.tcl:141
-msgid "Email Address"
-msgstr "ÐÐ´Ñ€ÐµÑ Ð½Ð° е-поща"
-
-#: lib/option.tcl:143
-msgid "Summarize Merge Commits"
-msgstr "Обобщаване на подаваниÑта при Ñливане"
-
-#: lib/option.tcl:144
-msgid "Merge Verbosity"
-msgstr "ПодробноÑти при ÑливаниÑта"
-
-#: lib/option.tcl:145
-msgid "Show Diffstat After Merge"
-msgstr "Извеждане на ÑтатиÑтика Ñлед ÑливаниÑта"
-
-#: lib/option.tcl:146
-msgid "Use Merge Tool"
-msgstr "Използване на програма за Ñливане"
-
-#: lib/option.tcl:148
-msgid "Trust File Modification Timestamps"
-msgstr "Доверие във времето на промÑна на файловете"
-
-#: lib/option.tcl:149
-msgid "Prune Tracking Branches During Fetch"
-msgstr "ОкаÑтрÑне на ÑледÑщите клонове при доÑтавÑне"
-
-#: lib/option.tcl:150
-msgid "Match Tracking Branches"
-msgstr "ÐапаÑване на ÑледÑщите клонове"
-
-#: lib/option.tcl:151
-msgid "Use Textconv For Diffs and Blames"
-msgstr "Използване на „textconv“ за разликите и анотирането"
-
-#: lib/option.tcl:152
-msgid "Blame Copy Only On Changed Files"
-msgstr "Ðнотиране на копието Ñамо по променените файлове"
-
-#: lib/option.tcl:153
-msgid "Maximum Length of Recent Repositories List"
-msgstr "МакÑимален брой на ÑпиÑъка „Скоро ползвани“ хранилища"
-
-#: lib/option.tcl:154
-msgid "Minimum Letters To Blame Copy On"
-msgstr "Минимален брой знаци за анотиране на копието"
-
-#: lib/option.tcl:155
-msgid "Blame History Context Radius (days)"
-msgstr "ИÑторичеÑки обхват за анотиране в дни"
-
-#: lib/option.tcl:156
-msgid "Number of Diff Context Lines"
-msgstr "Брой редове за контекÑта на разликите"
-
-#: lib/option.tcl:157
-msgid "Additional Diff Parameters"
-msgstr "Ðргументи към командата за разликите"
-
-#: lib/option.tcl:158
-msgid "Commit Message Text Width"
-msgstr "Широчина на текÑта на Ñъобщението при подаване"
-
-#: lib/option.tcl:159
-msgid "New Branch Name Template"
-msgstr "Шаблон за името на новите клони"
-
-#: lib/option.tcl:160
-msgid "Default File Contents Encoding"
-msgstr "Кодиране на файловете"
-
-#: lib/option.tcl:161
-msgid "Warn before committing to a detached head"
-msgstr "Предупреждаване при подаване към неÑвързан указател"
-
-#: lib/option.tcl:162
-msgid "Staging of untracked files"
-msgstr "ДобавÑне на неÑледените файлове към индекÑа"
-
-#: lib/option.tcl:163
-msgid "Show untracked files"
-msgstr "Показване на неÑледените файлове"
-
-#: lib/option.tcl:164
-msgid "Tab spacing"
-msgstr "Ширина на табулациÑта"
-
-#: lib/option.tcl:182 lib/option.tcl:197 lib/option.tcl:220 lib/option.tcl:282
-#: lib/database.tcl:57
-#, tcl-format
-msgid "%s:"
-msgstr "%s:"
-
-#: lib/option.tcl:210
-msgid "Change"
-msgstr "СмÑна"
-
-#: lib/option.tcl:254
-msgid "Spelling Dictionary:"
-msgstr "ПравопиÑен речник:"
-
-#: lib/option.tcl:284
-msgid "Change Font"
-msgstr "СмÑна на шрифта"
-
-#: lib/option.tcl:288
-#, tcl-format
-msgid "Choose %s"
-msgstr "Избор на „%s“"
-
-#: lib/option.tcl:294
-msgid "pt."
-msgstr "тчк."
-
-#: lib/option.tcl:308
-msgid "Preferences"
-msgstr "ÐаÑтройки"
-
-#: lib/option.tcl:345
-msgid "Failed to completely save options:"
-msgstr "ÐеуÑпешно запазване на наÑтройките:"
-
-#: lib/encoding.tcl:443
-msgid "Default"
-msgstr "Стандартното"
-
-#: lib/encoding.tcl:448
-#, tcl-format
-msgid "System (%s)"
-msgstr "СиÑтемното (%s)"
-
-#: lib/encoding.tcl:459 lib/encoding.tcl:465
-msgid "Other"
-msgstr "Друго"
-
-#: lib/tools.tcl:76
-#, tcl-format
-msgid "Running %s requires a selected file."
-msgstr "За изпълнението на „%s“ трÑбва да изберете файл."
-
-#: lib/tools.tcl:92
-#, tcl-format
-msgid "Are you sure you want to run %1$s on file \"%2$s\"?"
-msgstr "Сигурни ли Ñте, че иÑкате да изпълните „%1$s“ върху файла „%2$s“?"
-
-#: lib/tools.tcl:96
-#, tcl-format
-msgid "Are you sure you want to run %s?"
-msgstr "Сигурни ли Ñте, че иÑкате да изпълните „%s“?"
-
-#: lib/tools.tcl:118
-#, tcl-format
-msgid "Tool: %s"
-msgstr "Команда: %s"
-
-#: lib/tools.tcl:119
-#, tcl-format
-msgid "Running: %s"
-msgstr "Изпълнение: %s"
-
-#: lib/tools.tcl:158
-#, tcl-format
-msgid "Tool completed successfully: %s"
-msgstr "Командата завърши уÑпешно: %s"
-
-#: lib/tools.tcl:160
-#, tcl-format
-msgid "Tool failed: %s"
-msgstr "Командата върна грешка: %s"
-
-#: lib/mergetool.tcl:8
-msgid "Force resolution to the base version?"
-msgstr "Да Ñе използва базовата верÑиÑ"
-
-#: lib/mergetool.tcl:9
-msgid "Force resolution to this branch?"
-msgstr "Да Ñе използва верÑиÑта от този клон"
-
-#: lib/mergetool.tcl:10
-msgid "Force resolution to the other branch?"
-msgstr "Да Ñе използва верÑиÑта от Ð´Ñ€ÑƒÐ³Ð¸Ñ ÐºÐ»Ð¾Ð½"
-
-#: lib/mergetool.tcl:14
-#, tcl-format
-msgid ""
-"Note that the diff shows only conflicting changes.\n"
-"\n"
-"%s will be overwritten.\n"
-"\n"
-"This operation can be undone only by restarting the merge."
-msgstr ""
-"Разликата показва Ñамо разликите Ñ ÐºÐ¾Ð½Ñ„Ð»Ð¸ÐºÑ‚.\n"
-"\n"
-"Файлът „%s“ ще Ñе презапише.\n"
-"\n"
-"Тази Ð¾Ð¿ÐµÑ€Ð°Ñ†Ð¸Ñ Ð¼Ð¾Ð¶Ðµ да Ñе отмени Ñамо чрез започване на Ñливането наново."
-
-#: lib/mergetool.tcl:45
-#, tcl-format
-msgid "File %s seems to have unresolved conflicts, still stage?"
-msgstr ""
-"Изглежда, че вÑе още има некоригирани конфликти във файла „%s“. Да Ñе добави "
-"ли файлът към индекÑа?"
-
-#: lib/mergetool.tcl:60
-#, tcl-format
-msgid "Adding resolution for %s"
-msgstr "ДобавÑне на ÐºÐ¾Ñ€ÐµÐºÑ†Ð¸Ñ Ð½Ð° конфликтите в „%s“"
-
-#: lib/mergetool.tcl:141
-msgid "Cannot resolve deletion or link conflicts using a tool"
-msgstr ""
-"Конфликтите при Ñимволни връзки или изтриване не може да Ñе коригират Ñ "
-"външна програма."
-
-#: lib/mergetool.tcl:146
-msgid "Conflict file does not exist"
-msgstr "Файлът, в който е конфликтът, не ÑъщеÑтвува"
-
-#: lib/mergetool.tcl:246
-#, tcl-format
-msgid "Not a GUI merge tool: '%s'"
-msgstr "Това не е графична програма за Ñливане: „%s“"
-
-#: lib/mergetool.tcl:275
-#, tcl-format
-msgid "Unsupported merge tool '%s'"
-msgstr "Ðеподдържана програма за Ñливане: „%s“"
-
-#: lib/mergetool.tcl:310
-msgid "Merge tool is already running, terminate it?"
-msgstr "Програмата за Ñливане вече е Ñтартирана. Да Ñе изключи ли?"
-
-#: lib/mergetool.tcl:330
-#, tcl-format
-msgid ""
-"Error retrieving versions:\n"
-"%s"
-msgstr ""
-"Грешка при изтеглÑнето на верÑии:\n"
-"%s"
-
-#: lib/mergetool.tcl:350
-#, tcl-format
-msgid ""
-"Could not start the merge tool:\n"
-"\n"
-"%s"
-msgstr ""
-"Програмата за Ñливане не може да Ñе Ñтартира:\n"
-"\n"
-"%s"
-
-#: lib/mergetool.tcl:354
-msgid "Running merge tool..."
-msgstr "Стартиране на програмата за Ñливане…"
-
-#: lib/mergetool.tcl:382 lib/mergetool.tcl:390
-msgid "Merge tool failed."
-msgstr "Грешка в програмата за Ñливане."
-
-#: lib/tools_dlg.tcl:22
-#, tcl-format
-msgid "%s (%s): Add Tool"
-msgstr "%s (%s): ДобавÑне на команда"
-
-#: lib/tools_dlg.tcl:28
-msgid "Add New Tool Command"
-msgstr "ДобавÑне на команда"
-
-#: lib/tools_dlg.tcl:34
-msgid "Add globally"
-msgstr "Глобално добавÑне"
-
-#: lib/tools_dlg.tcl:46
-msgid "Tool Details"
-msgstr "ПодробноÑти за командата"
-
-#: lib/tools_dlg.tcl:49
-msgid "Use '/' separators to create a submenu tree:"
-msgstr "За Ñъздаване на подменюта използвайте знака „/“ за разделител:"
-
-#: lib/tools_dlg.tcl:60
-msgid "Command:"
-msgstr "Команда:"
-
-#: lib/tools_dlg.tcl:71
-msgid "Show a dialog before running"
-msgstr "Преди изпълнение да Ñе извежда диалогов прозорец"
-
-#: lib/tools_dlg.tcl:77
-msgid "Ask the user to select a revision (sets $REVISION)"
-msgstr "ПотребителÑÑ‚ да укаже верÑÐ¸Ñ (задаване на променливата $REVISION)"
-
-#: lib/tools_dlg.tcl:82
-msgid "Ask the user for additional arguments (sets $ARGS)"
-msgstr ""
-"ПотребителÑÑ‚ да укаже допълнителни аргументи (задаване на променливата $ARGS)"
-
-#: lib/tools_dlg.tcl:89
-msgid "Don't show the command output window"
-msgstr "Без показване на прозорец Ñ Ð¸Ð·Ñ…Ð¾Ð´Ð° от командата"
-
-#: lib/tools_dlg.tcl:94
-msgid "Run only if a diff is selected ($FILENAME not empty)"
-msgstr ""
-"Стартиране Ñамо Ñлед избор на разлика (променливата $FILENAME не е празна)"
-
-#: lib/tools_dlg.tcl:118
-msgid "Please supply a name for the tool."
-msgstr "Задайте име за командата."
-
-#: lib/tools_dlg.tcl:126
-#, tcl-format
-msgid "Tool '%s' already exists."
-msgstr "Командата „%s“ вече ÑъщеÑтвува."
-
-#: lib/tools_dlg.tcl:148
-#, tcl-format
-msgid ""
-"Could not add tool:\n"
-"%s"
-msgstr ""
-"Командата не може да Ñе добави:\n"
-"%s"
-
-#: lib/tools_dlg.tcl:187
-#, tcl-format
-msgid "%s (%s): Remove Tool"
-msgstr "%s (%s): Премахване на команда"
-
-#: lib/tools_dlg.tcl:193
-msgid "Remove Tool Commands"
-msgstr "Премахване на команди"
-
-#: lib/tools_dlg.tcl:198
-msgid "Remove"
-msgstr "Премахване"
-
-#: lib/tools_dlg.tcl:231
-msgid "(Blue denotes repository-local tools)"
-msgstr "(командите към локалното хранилище Ñа обозначени в Ñиньо)"
-
-#: lib/tools_dlg.tcl:283
-#, tcl-format
-msgid "%s (%s):"
-msgstr "%s (%s):"
-
-#: lib/tools_dlg.tcl:292
-#, tcl-format
-msgid "Run Command: %s"
-msgstr "Изпълнение на командата „%s“"
-
-#: lib/tools_dlg.tcl:306
-msgid "Arguments"
-msgstr "Ðргументи"
-
-#: lib/tools_dlg.tcl:341
-msgid "OK"
-msgstr "Добре"
-
-#: lib/search.tcl:48
-msgid "Find:"
-msgstr "ТърÑене:"
-
-#: lib/search.tcl:50
-msgid "Next"
-msgstr "Следваща поÑва"
-
-#: lib/search.tcl:51
-msgid "Prev"
-msgstr "Предишна поÑва"
-
-#: lib/search.tcl:52
-msgid "RegExp"
-msgstr "РегИзр"
-
-#: lib/search.tcl:54
-msgid "Case"
-msgstr "Главни/Малки"
-
-#: lib/shortcut.tcl:8 lib/shortcut.tcl:43 lib/shortcut.tcl:75
-#, tcl-format
-msgid "%s (%s): Create Desktop Icon"
-msgstr "%s (%s): ДобавÑне на икона на Ñ€Ð°Ð±Ð¾Ñ‚Ð½Ð¸Ñ Ð¿Ð»Ð¾Ñ‚"
-
-#: lib/shortcut.tcl:24 lib/shortcut.tcl:65
-msgid "Cannot write shortcut:"
-msgstr "Клавишната ÐºÐ¾Ð¼Ð±Ð¸Ð½Ð°Ñ†Ð¸Ñ Ð½Ðµ може да Ñе запази:"
-
-#: lib/shortcut.tcl:140
-msgid "Cannot write icon:"
-msgstr "Иконата не може да Ñе запази:"
-
-#: lib/remote_branch_delete.tcl:29
-#, tcl-format
-msgid "%s (%s): Delete Branch Remotely"
-msgstr "%s (%s): Изтриване на Ð¾Ñ‚Ð´Ð°Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ ÐºÐ»Ð¾Ð½"
-
-#: lib/remote_branch_delete.tcl:34
-msgid "Delete Branch Remotely"
-msgstr "Изтриване на Ð¾Ñ‚Ð´Ð°Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ ÐºÐ»Ð¾Ð½"
-
-#: lib/remote_branch_delete.tcl:48
-msgid "From Repository"
-msgstr "От хранилище"
-
-#: lib/remote_branch_delete.tcl:88
-msgid "Branches"
-msgstr "Клони"
-
-#: lib/remote_branch_delete.tcl:110
-msgid "Delete Only If"
-msgstr "Изтриване, Ñамо ако"
-
-#: lib/remote_branch_delete.tcl:112
-msgid "Merged Into:"
-msgstr "СлÑÑ‚ в:"
-
-#: lib/remote_branch_delete.tcl:120 lib/branch_delete.tcl:53
-msgid "Always (Do not perform merge checks)"
-msgstr "Винаги (без проверка за Ñливане)"
-
-#: lib/remote_branch_delete.tcl:153
-msgid "A branch is required for 'Merged Into'."
-msgstr "За данните „СлÑÑ‚ в“ е необходимо да зададете клон."
-
-#: lib/remote_branch_delete.tcl:185
-#, tcl-format
-msgid ""
-"The following branches are not completely merged into %s:\n"
-"\n"
-" - %s"
-msgstr ""
-"Следните клони не Ñа Ñлети напълно в „%s“:\n"
-"\n"
-" â— %s"
-
-#: lib/remote_branch_delete.tcl:190
-#, tcl-format
-msgid ""
-"One or more of the merge tests failed because you have not fetched the "
-"necessary commits. Try fetching from %s first."
-msgstr ""
-"Поне една от пробите за Ñливане е неуÑпешна, защото не Ñте доÑтавили вÑички "
-"необходими подаваниÑ. Пробвайте първо да доÑтавите подаваниÑта от „%s“."
-
-#: lib/remote_branch_delete.tcl:208
-msgid "Please select one or more branches to delete."
-msgstr "Изберете поне един клон за изтриване."
-
-#: lib/remote_branch_delete.tcl:218 lib/branch_delete.tcl:115
-msgid ""
-"Recovering deleted branches is difficult.\n"
-"\n"
-"Delete the selected branches?"
-msgstr ""
-"ВъзÑтановÑването на изтрити клони може да е трудно.\n"
-"\n"
-"Сигурни ли Ñте, че иÑкате да триете?"
-
-#: lib/remote_branch_delete.tcl:227
-#, tcl-format
-msgid "Deleting branches from %s"
-msgstr "Изтриване на клони от „%s“"
-
-#: lib/remote_branch_delete.tcl:300
-msgid "No repository selected."
-msgstr "Ðе е избрано хранилище."
-
-#: lib/remote_branch_delete.tcl:305
-#, tcl-format
-msgid "Scanning %s..."
-msgstr "ПретърÑване на „%s“…"
-
#: lib/choose_repository.tcl:45
msgid "Git Gui"
msgstr "ГПИ на Git"
-#: lib/choose_repository.tcl:104 lib/choose_repository.tcl:427
+#: lib/choose_repository.tcl:104 lib/choose_repository.tcl:391
msgid "Create New Repository"
msgstr "Създаване на ново хранилище"
@@ -1786,7 +1096,7 @@ msgstr "Създаване на ново хранилище"
msgid "New..."
msgstr "Ðово…"
-#: lib/choose_repository.tcl:117 lib/choose_repository.tcl:511
+#: lib/choose_repository.tcl:117 lib/choose_repository.tcl:475
msgid "Clone Existing Repository"
msgstr "Клониране на ÑъщеÑтвуващо хранилище"
@@ -1794,7 +1104,7 @@ msgstr "Клониране на ÑъщеÑтвуващо хранилище"
msgid "Clone..."
msgstr "Клониране…"
-#: lib/choose_repository.tcl:135 lib/choose_repository.tcl:1105
+#: lib/choose_repository.tcl:135 lib/choose_repository.tcl:1059
msgid "Open Existing Repository"
msgstr "ОтварÑне на ÑъщеÑтвуващо хранилище"
@@ -1810,556 +1120,211 @@ msgstr "Скоро ползвани"
msgid "Open Recent Repository:"
msgstr "ОтварÑне на хранилище ползвано наÑкоро:"
-#: lib/choose_repository.tcl:331 lib/choose_repository.tcl:338
-#: lib/choose_repository.tcl:345
+#: lib/choose_repository.tcl:328 lib/choose_repository.tcl:335
+#: lib/choose_repository.tcl:342
#, tcl-format
msgid "Failed to create repository %s:"
msgstr "ÐеуÑпешно Ñъздаване на хранилището „%s“:"
-#: lib/choose_repository.tcl:422 lib/branch_create.tcl:33
-msgid "Create"
-msgstr "Създаване"
-
-#: lib/choose_repository.tcl:432
+#: lib/choose_repository.tcl:396
msgid "Directory:"
msgstr "ДиректориÑ:"
-#: lib/choose_repository.tcl:462 lib/choose_repository.tcl:588
-#: lib/choose_repository.tcl:1139
+#: lib/choose_repository.tcl:426 lib/choose_repository.tcl:552
+#: lib/choose_repository.tcl:1093
msgid "Git Repository"
msgstr "Хранилище на Git"
-#: lib/choose_repository.tcl:487
+#: lib/choose_repository.tcl:451
#, tcl-format
msgid "Directory %s already exists."
msgstr "Вече ÑъщеÑтвува Ð´Ð¸Ñ€ÐµÐºÑ‚Ð¾Ñ€Ð¸Ñ â€ž%s“."
-#: lib/choose_repository.tcl:491
+#: lib/choose_repository.tcl:455
#, tcl-format
msgid "File %s already exists."
msgstr "Вече ÑъщеÑтвува файл „%s“."
-#: lib/choose_repository.tcl:506
+#: lib/choose_repository.tcl:470
msgid "Clone"
msgstr "Клониране"
-#: lib/choose_repository.tcl:519
+#: lib/choose_repository.tcl:483
msgid "Source Location:"
msgstr "ÐÐ´Ñ€ÐµÑ Ð½Ð° източника:"
-#: lib/choose_repository.tcl:528
+#: lib/choose_repository.tcl:492
msgid "Target Directory:"
msgstr "Целева директориÑ:"
-#: lib/choose_repository.tcl:538
+#: lib/choose_repository.tcl:502
msgid "Clone Type:"
msgstr "Вид клониране:"
-#: lib/choose_repository.tcl:543
+#: lib/choose_repository.tcl:507
msgid "Standard (Fast, Semi-Redundant, Hardlinks)"
msgstr "Стандартно (бързо, чаÑтично ÑподелÑне на файлове, твърди връзки)"
-#: lib/choose_repository.tcl:548
+#: lib/choose_repository.tcl:512
msgid "Full Copy (Slower, Redundant Backup)"
msgstr "Пълно (бавно, пълноценно резервно копие)"
-#: lib/choose_repository.tcl:553
+#: lib/choose_repository.tcl:517
msgid "Shared (Fastest, Not Recommended, No Backup)"
msgstr "Споделено (най-бързо, не Ñе препоръчва, не прави резервно копие)"
-#: lib/choose_repository.tcl:560
+#: lib/choose_repository.tcl:524
msgid "Recursively clone submodules too"
msgstr "РекурÑивно клониране и на подмодулите"
-#: lib/choose_repository.tcl:594 lib/choose_repository.tcl:641
-#: lib/choose_repository.tcl:790 lib/choose_repository.tcl:864
-#: lib/choose_repository.tcl:1145 lib/choose_repository.tcl:1153
+#: lib/choose_repository.tcl:558 lib/choose_repository.tcl:605
+#: lib/choose_repository.tcl:744 lib/choose_repository.tcl:818
+#: lib/choose_repository.tcl:1099 lib/choose_repository.tcl:1107
#, tcl-format
msgid "Not a Git repository: %s"
msgstr "Това не е хранилище на Git: %s"
-#: lib/choose_repository.tcl:630
+#: lib/choose_repository.tcl:594
msgid "Standard only available for local repository."
msgstr "Само локални хранилища може да Ñе клонират Ñтандартно"
-#: lib/choose_repository.tcl:634
+#: lib/choose_repository.tcl:598
msgid "Shared only available for local repository."
msgstr "Само локални хранилища може да Ñе клонират Ñподелено"
-#: lib/choose_repository.tcl:655
+#: lib/choose_repository.tcl:613
#, tcl-format
msgid "Location %s already exists."
msgstr "МеÑтоположението „%s“ вече ÑъщеÑтвува."
-#: lib/choose_repository.tcl:666
+#: lib/choose_repository.tcl:624
msgid "Failed to configure origin"
msgstr "ÐеуÑпешно наÑтройване на хранилището-източник"
-#: lib/choose_repository.tcl:678
+#: lib/choose_repository.tcl:636
msgid "Counting objects"
msgstr "ПреброÑване на обекти"
-#: lib/choose_repository.tcl:679
+#: lib/choose_repository.tcl:637
msgid "buckets"
msgstr "клетки"
-#: lib/choose_repository.tcl:703
+#: lib/choose_repository.tcl:657
#, tcl-format
msgid "Unable to copy objects/info/alternates: %s"
msgstr "Обектите/ИнформациÑта/Синонимите не може да Ñе копират: %s"
-#: lib/choose_repository.tcl:740
+#: lib/choose_repository.tcl:694
#, tcl-format
msgid "Nothing to clone from %s."
msgstr "ÐÑма какво да Ñе клонира от „%s“."
-#: lib/choose_repository.tcl:742 lib/choose_repository.tcl:962
-#: lib/choose_repository.tcl:974
+#: lib/choose_repository.tcl:696 lib/choose_repository.tcl:916
+#: lib/choose_repository.tcl:928
msgid "The 'master' branch has not been initialized."
msgstr "ОÑновниÑÑ‚ клон — „master“ не е инициализиран."
-#: lib/choose_repository.tcl:755
+#: lib/choose_repository.tcl:709
msgid "Hardlinks are unavailable. Falling back to copying."
msgstr "Ðе Ñе поддържат твърди връзки. Преминава Ñе към копиране."
-#: lib/choose_repository.tcl:769
+#: lib/choose_repository.tcl:723
#, tcl-format
msgid "Cloning from %s"
msgstr "Клониране на „%s“"
-#: lib/choose_repository.tcl:800
+#: lib/choose_repository.tcl:754
msgid "Copying objects"
msgstr "Копиране на обекти"
-#: lib/choose_repository.tcl:801
+#: lib/choose_repository.tcl:755
msgid "KiB"
msgstr "KiB"
-#: lib/choose_repository.tcl:825
+#: lib/choose_repository.tcl:779
#, tcl-format
msgid "Unable to copy object: %s"
msgstr "ÐеуÑпешно копиране на обект: %s"
-#: lib/choose_repository.tcl:837
+#: lib/choose_repository.tcl:791
msgid "Linking objects"
msgstr "Създаване на връзки към обектите"
-#: lib/choose_repository.tcl:838
+#: lib/choose_repository.tcl:792
msgid "objects"
msgstr "обекти"
-#: lib/choose_repository.tcl:846
+#: lib/choose_repository.tcl:800
#, tcl-format
msgid "Unable to hardlink object: %s"
msgstr "ÐеуÑпешно Ñъздаване на твърда връзка към обект: %s"
-#: lib/choose_repository.tcl:903
+#: lib/choose_repository.tcl:857
msgid "Cannot fetch branches and objects. See console output for details."
msgstr ""
"Клоните и обектите не може да Ñе изтеглÑÑ‚. За повече Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¿Ð¾Ð³Ð»ÐµÐ´Ð½ÐµÑ‚Ðµ "
"изхода на конзолата."
-#: lib/choose_repository.tcl:914
+#: lib/choose_repository.tcl:868
msgid "Cannot fetch tags. See console output for details."
msgstr ""
"Етикетите не може да Ñе изтеглÑÑ‚. За повече Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¿Ð¾Ð³Ð»ÐµÐ´Ð½ÐµÑ‚Ðµ изхода на "
"конзолата."
-#: lib/choose_repository.tcl:938
+#: lib/choose_repository.tcl:892
msgid "Cannot determine HEAD. See console output for details."
msgstr ""
"Върхът „HEAD“ не може да Ñе определи. За повече Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¿Ð¾Ð³Ð»ÐµÐ´Ð½ÐµÑ‚Ðµ изхода "
"на конзолата."
-#: lib/choose_repository.tcl:947
+#: lib/choose_repository.tcl:901
#, tcl-format
msgid "Unable to cleanup %s"
msgstr "„%s“ не може да Ñе изчиÑти"
-#: lib/choose_repository.tcl:953
+#: lib/choose_repository.tcl:907
msgid "Clone failed."
msgstr "ÐеуÑпешно клониране."
-#: lib/choose_repository.tcl:960
+#: lib/choose_repository.tcl:914
msgid "No default branch obtained."
msgstr "Ðе е получен клон по подразбиране."
-#: lib/choose_repository.tcl:971
+#: lib/choose_repository.tcl:925
#, tcl-format
msgid "Cannot resolve %s as a commit."
msgstr "ÐÑма подаване отговарÑщо на „%s“."
-#: lib/choose_repository.tcl:998
+#: lib/choose_repository.tcl:952
msgid "Creating working directory"
msgstr "Създаване на работната директориÑ"
-#: lib/choose_repository.tcl:1028
+#: lib/choose_repository.tcl:953 lib/index.tcl:77 lib/index.tcl:146
+#: lib/index.tcl:220 lib/index.tcl:589
+msgid "files"
+msgstr "файлове"
+
+#: lib/choose_repository.tcl:982
msgid "Initial file checkout failed."
msgstr "ÐеуÑпешно първоначално изтеглÑне."
-#: lib/choose_repository.tcl:1072
+#: lib/choose_repository.tcl:1026
msgid "Cloning submodules"
msgstr "Клониране на подмодули"
-#: lib/choose_repository.tcl:1087
+#: lib/choose_repository.tcl:1041
msgid "Cannot clone submodules."
msgstr "Подмодулите не може да Ñе клонират."
-#: lib/choose_repository.tcl:1110
+#: lib/choose_repository.tcl:1064
msgid "Repository:"
msgstr "Хранилище:"
-#: lib/choose_repository.tcl:1159
+#: lib/choose_repository.tcl:1113
#, tcl-format
msgid "Failed to open repository %s:"
msgstr "ÐеуÑпешно отварÑне на хранилището „%s“:"
-#: lib/about.tcl:26
-msgid "git-gui - a graphical user interface for Git."
-msgstr "git-gui — графичен Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ð·Ð° Git."
-
-#: lib/blame.tcl:74
-#, tcl-format
-msgid "%s (%s): File Viewer"
-msgstr "%s (%s): Преглед на файлове"
-
-#: lib/blame.tcl:80
-msgid "Commit:"
-msgstr "Подаване:"
-
-#: lib/blame.tcl:282
-msgid "Copy Commit"
-msgstr "Копиране на подаване"
-
-#: lib/blame.tcl:286
-msgid "Find Text..."
-msgstr "ТърÑене на текÑт…"
-
-#: lib/blame.tcl:290
-msgid "Goto Line..."
-msgstr "Към ред…"
-
-#: lib/blame.tcl:299
-msgid "Do Full Copy Detection"
-msgstr "Пълно търÑене на копиране"
-
-#: lib/blame.tcl:303
-msgid "Show History Context"
-msgstr "Показване на контекÑта от иÑториÑта"
-
-#: lib/blame.tcl:306
-msgid "Blame Parent Commit"
-msgstr "Ðнотиране на родителÑкото подаване"
-
-#: lib/blame.tcl:468
-#, tcl-format
-msgid "Reading %s..."
-msgstr "Чете Ñе „%s“…"
-
-#: lib/blame.tcl:596
-msgid "Loading copy/move tracking annotations..."
-msgstr "Зареждане на анотациите за проÑледÑване на копирането/премеÑтването…"
-
-#: lib/blame.tcl:613
-msgid "lines annotated"
-msgstr "реда анотирани"
-
-#: lib/blame.tcl:815
-msgid "Loading original location annotations..."
-msgstr "Зареждане на анотациите за първоначалното меÑтоположение…"
-
-#: lib/blame.tcl:818
-msgid "Annotation complete."
-msgstr "Ðнотирането завърши."
-
-#: lib/blame.tcl:849
-msgid "Busy"
-msgstr "ОперациÑта не е завършила"
-
-#: lib/blame.tcl:850
-msgid "Annotation process is already running."
-msgstr "Ð’ момента тече Ð¿Ñ€Ð¾Ñ†ÐµÑ Ð½Ð° анотиране."
-
-#: lib/blame.tcl:889
-msgid "Running thorough copy detection..."
-msgstr "ИзпълнÑва Ñе цÑлоÑтен Ð¿Ñ€Ð¾Ñ†ÐµÑ Ð½Ð° откриване на копиране…"
-
-#: lib/blame.tcl:957
-msgid "Loading annotation..."
-msgstr "Зареждане на анотации…"
-
-#: lib/blame.tcl:1010
-msgid "Author:"
-msgstr "Ðвтор:"
-
-#: lib/blame.tcl:1014
-msgid "Committer:"
-msgstr "Подал:"
-
-#: lib/blame.tcl:1019
-msgid "Original File:"
-msgstr "Първоначален файл:"
-
-#: lib/blame.tcl:1067
-msgid "Cannot find HEAD commit:"
-msgstr "Подаването за връх „HEAD“ не може да Ñе открие:"
-
-#: lib/blame.tcl:1122
-msgid "Cannot find parent commit:"
-msgstr "РодителÑкото подаване не може да Ñе открие"
-
-#: lib/blame.tcl:1137
-msgid "Unable to display parent"
-msgstr "РодителÑÑ‚ не може да Ñе покаже"
-
-#: lib/blame.tcl:1138 lib/diff.tcl:345
-msgid "Error loading diff:"
-msgstr "Грешка при зареждане на разлика:"
-
-#: lib/blame.tcl:1279
-msgid "Originally By:"
-msgstr "Първоначално от:"
-
-#: lib/blame.tcl:1285
-msgid "In File:"
-msgstr "Във файл:"
-
-#: lib/blame.tcl:1290
-msgid "Copied Or Moved Here By:"
-msgstr "Копирано или премеÑтено тук от:"
-
-#: lib/diff.tcl:77
-#, tcl-format
-msgid ""
-"No differences detected.\n"
-"\n"
-"%s has no changes.\n"
-"\n"
-"The modification date of this file was updated by another application, but "
-"the content within the file was not changed.\n"
-"\n"
-"A rescan will be automatically started to find other files which may have "
-"the same state."
-msgstr ""
-"Ðе Ñа открити разлики.\n"
-"\n"
-"ÐÑма промени в „%s“.\n"
-"\n"
-"Времето на промÑна на файла е бил зададен от друга програма, но Ñъдържанието "
-"му не е променено.\n"
-"\n"
-"Ðвтоматично ще започне нова проверка дали нÑма други файлове в това "
-"ÑÑŠÑтоÑние."
-
-#: lib/diff.tcl:117
-#, tcl-format
-msgid "Loading diff of %s..."
-msgstr "Зареждане на разликите в „%s“…"
-
-#: lib/diff.tcl:143
-msgid ""
-"LOCAL: deleted\n"
-"REMOTE:\n"
-msgstr ""
-"ЛОКÐЛÐО: изтрит\n"
-"ОТДÐЛЕЧЕÐО:\n"
-
-#: lib/diff.tcl:148
-msgid ""
-"REMOTE: deleted\n"
-"LOCAL:\n"
-msgstr ""
-"ОТДÐЛЕЧЕÐО: изтрит\n"
-"ЛОКÐЛÐО:\n"
-
-#: lib/diff.tcl:155
-msgid "LOCAL:\n"
-msgstr "ЛОКÐЛÐО:\n"
-
-#: lib/diff.tcl:158
-msgid "REMOTE:\n"
-msgstr "ОТДÐЛЕЧЕÐО:\n"
-
-#: lib/diff.tcl:220 lib/diff.tcl:344
-#, tcl-format
-msgid "Unable to display %s"
-msgstr "Файлът „%s“ не може да Ñе покаже"
-
-#: lib/diff.tcl:221
-msgid "Error loading file:"
-msgstr "Грешка при зареждане на файл:"
-
-#: lib/diff.tcl:227
-msgid "Git Repository (subproject)"
-msgstr "Хранилище на Git (подмодул)"
-
-#: lib/diff.tcl:239
-msgid "* Binary file (not showing content)."
-msgstr "◠Двоичен файл (Ñъдържанието не Ñе показва)."
-
-#: lib/diff.tcl:244
-#, tcl-format
-msgid ""
-"* Untracked file is %d bytes.\n"
-"* Showing only first %d bytes.\n"
-msgstr ""
-"â— ÐеÑледениÑÑ‚ файл е %d байта.\n"
-"◠Показват Ñе Ñамо първите %d байта.\n"
-
-#: lib/diff.tcl:250
-#, tcl-format
-msgid ""
-"\n"
-"* Untracked file clipped here by %s.\n"
-"* To see the entire file, use an external editor.\n"
-msgstr ""
-"\n"
-"â— ÐеÑледениÑÑ‚ файл е отрÑзан дотук от програмата „%s“.\n"
-"◠Използвайте външен редактор, за да видите Ñ†ÐµÐ»Ð¸Ñ Ñ„Ð°Ð¹Ð».\n"
-
-#: lib/diff.tcl:583
-msgid "Failed to unstage selected hunk."
-msgstr "Избраното парче не може да Ñе извади от индекÑа."
-
-#: lib/diff.tcl:591
-msgid "Failed to revert selected hunk."
-msgstr "Избраното парче не може да Ñе върне."
-
-#: lib/diff.tcl:594
-msgid "Failed to stage selected hunk."
-msgstr "Избраното парче не може да Ñе добави към индекÑа."
-
-#: lib/diff.tcl:687
-msgid "Failed to unstage selected line."
-msgstr "ИзбраниÑÑ‚ ред не може да Ñе извади от индекÑа."
-
-#: lib/diff.tcl:696
-msgid "Failed to revert selected line."
-msgstr "ИзбраниÑÑ‚ ред не може да Ñе върне."
-
-#: lib/diff.tcl:700
-msgid "Failed to stage selected line."
-msgstr "ИзбраниÑÑ‚ ред не може да Ñе добави към индекÑа."
-
-#: lib/diff.tcl:889
-msgid "Failed to undo last revert."
-msgstr "ÐеуÑпешна отмÑна на поÑледното връщане."
-
-#: lib/sshkey.tcl:34
-msgid "No keys found."
-msgstr "Ðе Ñа открити ключове."
-
-#: lib/sshkey.tcl:37
-#, tcl-format
-msgid "Found a public key in: %s"
-msgstr "Открит е публичен ключ в „%s“"
-
-#: lib/sshkey.tcl:43
-msgid "Generate Key"
-msgstr "Генериране на ключ"
-
-#: lib/sshkey.tcl:61
-msgid "Copy To Clipboard"
-msgstr "Копиране към ÑиÑÑ‚ÐµÐ¼Ð½Ð¸Ñ Ð±ÑƒÑ„ÐµÑ€"
-
-#: lib/sshkey.tcl:75
-msgid "Your OpenSSH Public Key"
-msgstr "ПубличниÑÑ‚ ви ключ за OpenSSH"
-
-#: lib/sshkey.tcl:83
-msgid "Generating..."
-msgstr "Генериране…"
-
-#: lib/sshkey.tcl:89
-#, tcl-format
-msgid ""
-"Could not start ssh-keygen:\n"
-"\n"
-"%s"
-msgstr ""
-"Програмата „ssh-keygen“ не може да Ñе Ñтартира:\n"
-"\n"
-"%s"
-
-#: lib/sshkey.tcl:116
-msgid "Generation failed."
-msgstr "ÐеуÑпешно генериране."
-
-#: lib/sshkey.tcl:123
-msgid "Generation succeeded, but no keys found."
-msgstr "Генерирането завърши уÑпешно, а не Ñа намерени ключове."
-
-#: lib/sshkey.tcl:126
-#, tcl-format
-msgid "Your key is in: %s"
-msgstr "Ключът ви е в „%s“"
-
-#: lib/branch_create.tcl:23
-#, tcl-format
-msgid "%s (%s): Create Branch"
-msgstr "%s (%s): Създаване на клон"
-
-#: lib/branch_create.tcl:28
-msgid "Create New Branch"
-msgstr "Създаване на нов клон"
-
-#: lib/branch_create.tcl:42
-msgid "Branch Name"
-msgstr "Име на клона"
-
-#: lib/branch_create.tcl:57
-msgid "Match Tracking Branch Name"
-msgstr "Съвпадане по името на ÑÐ»ÐµÐ´ÐµÐ½Ð¸Ñ ÐºÐ»Ð¾Ð½"
-
-#: lib/branch_create.tcl:66
-msgid "Starting Revision"
-msgstr "Ðачална верÑиÑ"
-
-#: lib/branch_create.tcl:72
-msgid "Update Existing Branch:"
-msgstr "ОбновÑване на ÑъщеÑтвуващ клон:"
-
-#: lib/branch_create.tcl:75
-msgid "No"
-msgstr "Ðе"
-
-#: lib/branch_create.tcl:80
-msgid "Fast Forward Only"
-msgstr "Само тривиално превъртащо Ñливане"
-
-#: lib/branch_create.tcl:97
-msgid "Checkout After Creation"
-msgstr "Преминаване към клона Ñлед Ñъздаването му"
-
-#: lib/branch_create.tcl:132
-msgid "Please select a tracking branch."
-msgstr "Изберете клон за Ñледени."
-
-#: lib/branch_create.tcl:141
-#, tcl-format
-msgid "Tracking branch %s is not a branch in the remote repository."
-msgstr "СледÑщиÑÑ‚ клон — „%s“, не ÑъщеÑтвува в отдалеченото хранилище."
-
-#: lib/console.tcl:59
-msgid "Working... please wait..."
-msgstr "Ð’ момента Ñе извършва дейÑтвие, изчакайте…"
-
-#: lib/console.tcl:186
-msgid "Success"
-msgstr "УÑпех"
-
-#: lib/console.tcl:200
-msgid "Error: Command Failed"
-msgstr "Грешка: неуÑпешно изпълнение на команда"
-
-#: lib/line.tcl:17
-msgid "Goto Line:"
-msgstr "Към ред:"
-
-#: lib/line.tcl:23
-msgid "Go"
-msgstr "Към"
-
#: lib/choose_rev.tcl:52
msgid "This Detached Checkout"
msgstr "Това неÑвързано изтеглÑне"
@@ -2494,7 +1459,7 @@ msgstr ""
"\n"
"ТрÑбва да добавите поне един файл към индекÑа, за да подадете.\n"
-#: lib/commit.tcl:213
+#: lib/commit.tcl:224
msgid ""
"Please supply a commit message.\n"
"\n"
@@ -2512,15 +1477,15 @@ msgstr ""
"◠Втори ред: празен.\n"
"◠ОÑтаналите редове: опишете защо Ñе налага тази промÑна.\n"
-#: lib/commit.tcl:244
+#: lib/commit.tcl:255
msgid "Calling pre-commit hook..."
msgstr "ИзпълнÑване на куката преди подаване…"
-#: lib/commit.tcl:259
+#: lib/commit.tcl:270
msgid "Commit declined by pre-commit hook."
msgstr "Подаването е отхвърлено от куката преди подаване."
-#: lib/commit.tcl:278
+#: lib/commit.tcl:289
msgid ""
"You are about to commit on a detached head. This is a potentially dangerous "
"thing to do because if you switch to another branch you will lose your "
@@ -2536,32 +1501,32 @@ msgstr ""
" \n"
"Сигурни ли Ñте, че иÑкате да извършите текущото подаване?"
-#: lib/commit.tcl:299
+#: lib/commit.tcl:310
msgid "Calling commit-msg hook..."
msgstr "ИзпълнÑване на куката за Ñъобщението при подаване…"
-#: lib/commit.tcl:314
+#: lib/commit.tcl:325
msgid "Commit declined by commit-msg hook."
msgstr "Подаването е отхвърлено от куката за Ñъобщението при подаване."
-#: lib/commit.tcl:327
+#: lib/commit.tcl:338
msgid "Committing changes..."
msgstr "Подаване на промените…"
-#: lib/commit.tcl:344
+#: lib/commit.tcl:355
msgid "write-tree failed:"
msgstr "неуÑпешно запазване на дървото (write-tree):"
-#: lib/commit.tcl:345 lib/commit.tcl:395 lib/commit.tcl:422
+#: lib/commit.tcl:356 lib/commit.tcl:406 lib/commit.tcl:433
msgid "Commit failed."
msgstr "ÐеуÑпешно подаване."
-#: lib/commit.tcl:362
+#: lib/commit.tcl:373
#, tcl-format
msgid "Commit %s appears to be corrupt"
msgstr "Подаването „%s“ изглежда повредено"
-#: lib/commit.tcl:367
+#: lib/commit.tcl:378
msgid ""
"No changes to commit.\n"
"\n"
@@ -2576,63 +1541,34 @@ msgstr ""
"\n"
"Ðвтоматично ще започне нова проверка.\n"
-#: lib/commit.tcl:374
+#: lib/commit.tcl:385
msgid "No changes to commit."
msgstr "ÐÑма промени за подаване."
-#: lib/commit.tcl:394
+#: lib/commit.tcl:405
msgid "commit-tree failed:"
msgstr "неуÑпешно подаване на дървото (commit-tree):"
-#: lib/commit.tcl:421
+#: lib/commit.tcl:432
msgid "update-ref failed:"
msgstr "неуÑпешно обновÑване на указателите (update-ref):"
-#: lib/commit.tcl:514
+#: lib/commit.tcl:526
#, tcl-format
msgid "Created commit %s: %s"
msgstr "УÑпешно подаване %s: %s"
-#: lib/branch_delete.tcl:16
-#, tcl-format
-msgid "%s (%s): Delete Branch"
-msgstr "%s (%s): Изтриване на клон"
-
-#: lib/branch_delete.tcl:21
-msgid "Delete Local Branch"
-msgstr "Изтриване на локален клон"
-
-#: lib/branch_delete.tcl:39
-msgid "Local Branches"
-msgstr "Локални клони"
-
-#: lib/branch_delete.tcl:51
-msgid "Delete Only If Merged Into"
-msgstr "Изтриване, Ñамо ако промените Ñа Ñлети и другаде"
-
-#: lib/branch_delete.tcl:103
-#, tcl-format
-msgid "The following branches are not completely merged into %s:"
-msgstr "Ðе вÑички промени в клоните Ñа Ñлети в „%s“:"
-
-#: lib/branch_delete.tcl:131
-#, tcl-format
-msgid " - %s:"
-msgstr " — „%s:“"
+#: lib/console.tcl:59
+msgid "Working... please wait..."
+msgstr "Ð’ момента Ñе извършва дейÑтвие, изчакайте…"
-#: lib/branch_delete.tcl:141
-#, tcl-format
-msgid ""
-"Failed to delete branches:\n"
-"%s"
-msgstr ""
-"ÐеуÑпешно триене на клони:\n"
-"%s"
+#: lib/console.tcl:186
+msgid "Success"
+msgstr "УÑпех"
-#: lib/date.tcl:25
-#, tcl-format
-msgid "Invalid date from Git: %s"
-msgstr "Ðеправилни данни от Git: %s"
+#: lib/console.tcl:200
+msgid "Error: Command Failed"
+msgstr "Грешка: неуÑпешно изпълнение на команда"
#: lib/database.tcl:42
msgid "Number of loose objects"
@@ -2662,6 +1598,12 @@ msgstr "Пакетирани обекти за окаÑтрÑне"
msgid "Garbage files"
msgstr "Файлове за боклука"
+#: lib/database.tcl:57 lib/option.tcl:182 lib/option.tcl:197 lib/option.tcl:220
+#: lib/option.tcl:282
+#, tcl-format
+msgid "%s:"
+msgstr "%s:"
+
#: lib/database.tcl:66
#, tcl-format
msgid "%s (%s): Database Statistics"
@@ -2692,6 +1634,130 @@ msgstr ""
"\n"
"Да Ñе започне ли компреÑирането?"
+#: lib/date.tcl:25
+#, tcl-format
+msgid "Invalid date from Git: %s"
+msgstr "Ðеправилни данни от Git: %s"
+
+#: lib/diff.tcl:74
+msgid ""
+"* No differences detected; stage the file to de-list it from Unstaged "
+"Changes.\n"
+msgstr ""
+"â— ÐÑма разлики. Добавете файла към индекÑа, за да Ñе извади от промените "
+"извън индекÑа.\n"
+
+#: lib/diff.tcl:75
+msgid "* Click to find other files that may have the same state.\n"
+msgstr "â— ÐатиÑнете, за да потърÑите други файлове в това ÑÑŠÑтоÑние.\n"
+
+#: lib/diff.tcl:106
+#, tcl-format
+msgid "Loading diff of %s..."
+msgstr "Зареждане на разликите в „%s“…"
+
+#: lib/diff.tcl:132
+msgid ""
+"LOCAL: deleted\n"
+"REMOTE:\n"
+msgstr ""
+"ЛОКÐЛÐО: изтрит\n"
+"ОТДÐЛЕЧЕÐО:\n"
+
+#: lib/diff.tcl:137
+msgid ""
+"REMOTE: deleted\n"
+"LOCAL:\n"
+msgstr ""
+"ОТДÐЛЕЧЕÐО: изтрит\n"
+"ЛОКÐЛÐО:\n"
+
+#: lib/diff.tcl:144
+msgid "LOCAL:\n"
+msgstr "ЛОКÐЛÐО:\n"
+
+#: lib/diff.tcl:147
+msgid "REMOTE:\n"
+msgstr "ОТДÐЛЕЧЕÐО:\n"
+
+#: lib/diff.tcl:209 lib/diff.tcl:333
+#, tcl-format
+msgid "Unable to display %s"
+msgstr "Файлът „%s“ не може да Ñе покаже"
+
+#: lib/diff.tcl:210
+msgid "Error loading file:"
+msgstr "Грешка при зареждане на файл:"
+
+#: lib/diff.tcl:216
+msgid "Git Repository (subproject)"
+msgstr "Хранилище на Git (подмодул)"
+
+#: lib/diff.tcl:228
+msgid "* Binary file (not showing content)."
+msgstr "◠Двоичен файл (Ñъдържанието не Ñе показва)."
+
+#: lib/diff.tcl:233
+#, tcl-format
+msgid ""
+"* Untracked file is %d bytes.\n"
+"* Showing only first %d bytes.\n"
+msgstr ""
+"â— ÐеÑледениÑÑ‚ файл е %d байта.\n"
+"◠Показват Ñе Ñамо първите %d байта.\n"
+
+#: lib/diff.tcl:239
+#, tcl-format
+msgid ""
+"\n"
+"* Untracked file clipped here by %s.\n"
+"* To see the entire file, use an external editor.\n"
+msgstr ""
+"\n"
+"â— ÐеÑледениÑÑ‚ файл е отрÑзан дотук от програмата „%s“.\n"
+"◠Използвайте външен редактор, за да видите Ñ†ÐµÐ»Ð¸Ñ Ñ„Ð°Ð¹Ð».\n"
+
+#: lib/diff.tcl:569
+msgid "Failed to unstage selected hunk."
+msgstr "Избраното парче не може да Ñе извади от индекÑа."
+
+#: lib/diff.tcl:577
+msgid "Failed to revert selected hunk."
+msgstr "Избраното парче не може да Ñе върне."
+
+#: lib/diff.tcl:580
+msgid "Failed to stage selected hunk."
+msgstr "Избраното парче не може да Ñе добави към индекÑа."
+
+#: lib/diff.tcl:673
+msgid "Failed to unstage selected line."
+msgstr "ИзбраниÑÑ‚ ред не може да Ñе извади от индекÑа."
+
+#: lib/diff.tcl:682
+msgid "Failed to revert selected line."
+msgstr "ИзбраниÑÑ‚ ред не може да Ñе върне."
+
+#: lib/diff.tcl:686
+msgid "Failed to stage selected line."
+msgstr "ИзбраниÑÑ‚ ред не може да Ñе добави към индекÑа."
+
+#: lib/diff.tcl:875
+msgid "Failed to undo last revert."
+msgstr "ÐеуÑпешна отмÑна на поÑледното връщане."
+
+#: lib/encoding.tcl:443
+msgid "Default"
+msgstr "Стандартното"
+
+#: lib/encoding.tcl:448
+#, tcl-format
+msgid "System (%s)"
+msgstr "СиÑтемното (%s)"
+
+#: lib/encoding.tcl:459 lib/encoding.tcl:465
+msgid "Other"
+msgstr "Друго"
+
#: lib/error.tcl:20
#, tcl-format
msgid "%s: error"
@@ -2716,6 +1782,134 @@ msgstr "Преди да можете да подадете, коригирайт
msgid "%s (%s): error"
msgstr "%s (%s): грешка"
+#: lib/index.tcl:6
+msgid "Unable to unlock the index."
+msgstr "ИндекÑÑŠÑ‚ не може да Ñе отключи."
+
+#: lib/index.tcl:30
+msgid "Index Error"
+msgstr "Грешка в индекÑа"
+
+#: lib/index.tcl:32
+msgid ""
+"Updating the Git index failed. A rescan will be automatically started to "
+"resynchronize git-gui."
+msgstr ""
+"ÐеуÑпешно обновÑване на индекÑа на Git. Ðвтоматично ще започне нова проверка "
+"за Ñинхронизирането на git-gui."
+
+#: lib/index.tcl:43
+msgid "Continue"
+msgstr "Продължаване"
+
+#: lib/index.tcl:46
+msgid "Unlock Index"
+msgstr "Отключване на индекÑа"
+
+#: lib/index.tcl:326
+msgid "Unstaging selected files from commit"
+msgstr "Изваждане на избраните файлове от подаването"
+
+#: lib/index.tcl:330
+#, tcl-format
+msgid "Unstaging %s from commit"
+msgstr "Изваждане на „%s“ от подаването"
+
+#: lib/index.tcl:369
+msgid "Ready to commit."
+msgstr "ГотовноÑÑ‚ за подаване."
+
+#: lib/index.tcl:378
+msgid "Adding selected files"
+msgstr "ДобавÑне на избраните файлове"
+
+#: lib/index.tcl:382
+#, tcl-format
+msgid "Adding %s"
+msgstr "ДобавÑне на „%s“"
+
+#: lib/index.tcl:412
+#, tcl-format
+msgid "Stage %d untracked files?"
+msgstr "Да Ñе добавÑÑ‚ ли %d неÑледени файла към индекÑа?"
+
+#: lib/index.tcl:420
+msgid "Adding all changed files"
+msgstr "ДобавÑне на вÑички променени файлове"
+
+#: lib/index.tcl:503
+#, tcl-format
+msgid "Revert changes in file %s?"
+msgstr "Да Ñе махнат ли промените във файла „%s“?"
+
+#: lib/index.tcl:508
+#, tcl-format
+msgid "Revert changes in these %i files?"
+msgstr "Да Ñе махнат ли промените в тези %i файла?"
+
+#: lib/index.tcl:517
+msgid "Any unstaged changes will be permanently lost by the revert."
+msgstr ""
+"Ð’Ñички промени, които не Ñа били добавени в индекÑа, ще Ñе загубÑÑ‚ "
+"безвъзвратно."
+
+#: lib/index.tcl:520 lib/index.tcl:564
+msgid "Do Nothing"
+msgstr "Ðищо да не Ñе прави"
+
+#: lib/index.tcl:546
+#, tcl-format
+msgid "Delete untracked file %s?"
+msgstr "Да Ñе изтрие ли неÑледениÑÑ‚ файл „%s“?"
+
+#: lib/index.tcl:551
+#, tcl-format
+msgid "Delete these %i untracked files?"
+msgstr "Да Ñе изтриÑÑ‚ ли тези %d неÑледени файла?"
+
+#: lib/index.tcl:561
+msgid "Files will be permanently deleted."
+msgstr "Файловете ще Ñе изтриÑÑ‚ окончателно."
+
+#: lib/index.tcl:565
+msgid "Delete Files"
+msgstr "Изтриване на файлове"
+
+#: lib/index.tcl:588
+msgid "Deleting"
+msgstr "Изтриване"
+
+#: lib/index.tcl:667
+msgid "Encountered errors deleting files:\n"
+msgstr "Грешки при изтриване на файловете:\n"
+
+#: lib/index.tcl:676
+#, tcl-format
+msgid "None of the %d selected files could be deleted."
+msgstr "Ðикой от избраните %d файла не бе изтрит."
+
+#: lib/index.tcl:681
+#, tcl-format
+msgid "%d of the %d selected files could not be deleted."
+msgstr "%d от избраните %d файла не бÑха изтрити."
+
+#: lib/index.tcl:728
+msgid "Reverting selected files"
+msgstr "Махане на промените в избраните файлове"
+
+#: lib/index.tcl:732
+#, tcl-format
+msgid "Reverting %s"
+msgstr "Махане на промените в „%s“"
+
+#: lib/line.tcl:17
+msgid "Goto Line:"
+msgstr "Към ред:"
+
+#: lib/line.tcl:23
+msgid "Go"
+msgstr "Към"
+
#: lib/merge.tcl:13
msgid ""
"Cannot merge while amending.\n"
@@ -2864,3 +2058,775 @@ msgstr "ÐеуÑпешно преуÑтановÑване."
#: lib/merge.tcl:279
msgid "Abort completed. Ready."
msgstr "УÑпешно преуÑтановÑване. ГотовноÑÑ‚ за Ñледващо дейÑтвие."
+
+#: lib/mergetool.tcl:8
+msgid "Force resolution to the base version?"
+msgstr "Да Ñе използва базовата верÑиÑ"
+
+#: lib/mergetool.tcl:9
+msgid "Force resolution to this branch?"
+msgstr "Да Ñе използва верÑиÑта от този клон"
+
+#: lib/mergetool.tcl:10
+msgid "Force resolution to the other branch?"
+msgstr "Да Ñе използва верÑиÑта от Ð´Ñ€ÑƒÐ³Ð¸Ñ ÐºÐ»Ð¾Ð½"
+
+#: lib/mergetool.tcl:14
+#, tcl-format
+msgid ""
+"Note that the diff shows only conflicting changes.\n"
+"\n"
+"%s will be overwritten.\n"
+"\n"
+"This operation can be undone only by restarting the merge."
+msgstr ""
+"Разликата показва Ñамо разликите Ñ ÐºÐ¾Ð½Ñ„Ð»Ð¸ÐºÑ‚.\n"
+"\n"
+"Файлът „%s“ ще Ñе презапише.\n"
+"\n"
+"Тази Ð¾Ð¿ÐµÑ€Ð°Ñ†Ð¸Ñ Ð¼Ð¾Ð¶Ðµ да Ñе отмени Ñамо чрез започване на Ñливането наново."
+
+#: lib/mergetool.tcl:45
+#, tcl-format
+msgid "File %s seems to have unresolved conflicts, still stage?"
+msgstr ""
+"Изглежда, че вÑе още има некоригирани конфликти във файла „%s“. Да Ñе добави "
+"ли файлът към индекÑа?"
+
+#: lib/mergetool.tcl:60
+#, tcl-format
+msgid "Adding resolution for %s"
+msgstr "ДобавÑне на ÐºÐ¾Ñ€ÐµÐºÑ†Ð¸Ñ Ð½Ð° конфликтите в „%s“"
+
+#: lib/mergetool.tcl:141
+msgid "Cannot resolve deletion or link conflicts using a tool"
+msgstr ""
+"Конфликтите при Ñимволни връзки или изтриване не може да Ñе коригират Ñ "
+"външна програма."
+
+#: lib/mergetool.tcl:146
+msgid "Conflict file does not exist"
+msgstr "Файлът, в който е конфликтът, не ÑъщеÑтвува"
+
+#: lib/mergetool.tcl:246
+#, tcl-format
+msgid "Not a GUI merge tool: '%s'"
+msgstr "Това не е графична програма за Ñливане: „%s“"
+
+#: lib/mergetool.tcl:278
+#, tcl-format
+msgid ""
+"Unable to process square brackets in \"mergetool.%s.cmd\" configuration "
+"option.\n"
+"\n"
+"Please remove the square brackets."
+msgstr ""
+"Квадратните Ñкоби в наÑтройката „mergetool.%s.cmd“ не може да Ñе обработÑÑ‚.\n"
+"\n"
+"Махнете ги."
+
+#: lib/mergetool.tcl:289
+#, tcl-format
+msgid ""
+"Unsupported merge tool '%s'.\n"
+"\n"
+"To use this tool, configure \"mergetool.%s.cmd\" as shown in the git-config "
+"manual page."
+msgstr ""
+"Ðеподдържана програма за Ñливане: „%s“.\n"
+"\n"
+"За да Ñ Ð¸Ð·Ð¿Ð¾Ð»Ð·Ð²Ð°Ñ‚Ðµ, наÑтройте „mergetool.%s.cmd“ както както е обÑÑнено в "
+"Ñтраницата на ръководÑтвото за „git-config“."
+
+#: lib/mergetool.tcl:327
+msgid "Merge tool is already running, terminate it?"
+msgstr "Програмата за Ñливане вече е Ñтартирана. Да Ñе изключи ли?"
+
+#: lib/mergetool.tcl:347
+#, tcl-format
+msgid ""
+"Error retrieving versions:\n"
+"%s"
+msgstr ""
+"Грешка при изтеглÑнето на верÑии:\n"
+"%s"
+
+#: lib/mergetool.tcl:367
+#, tcl-format
+msgid ""
+"Could not start the merge tool:\n"
+"\n"
+"%s"
+msgstr ""
+"Програмата за Ñливане не може да Ñе Ñтартира:\n"
+"\n"
+"%s"
+
+#: lib/mergetool.tcl:371
+msgid "Running merge tool..."
+msgstr "Стартиране на програмата за Ñливане…"
+
+#: lib/mergetool.tcl:399 lib/mergetool.tcl:407
+msgid "Merge tool failed."
+msgstr "Грешка в програмата за Ñливане."
+
+#: lib/option.tcl:11
+#, tcl-format
+msgid "Invalid global encoding '%s'"
+msgstr "Ðеправилно глобално кодиране „%s“"
+
+#: lib/option.tcl:19
+#, tcl-format
+msgid "Invalid repo encoding '%s'"
+msgstr "Ðеправилно кодиране „%s“ на хранилището"
+
+#: lib/option.tcl:119
+msgid "Restore Defaults"
+msgstr "Стандартни наÑтройки"
+
+#: lib/option.tcl:123
+msgid "Save"
+msgstr "Запазване"
+
+#: lib/option.tcl:133
+#, tcl-format
+msgid "%s Repository"
+msgstr "Хранилище „%s“"
+
+#: lib/option.tcl:134
+msgid "Global (All Repositories)"
+msgstr "Глобално (за вÑички хранилища)"
+
+#: lib/option.tcl:140
+msgid "User Name"
+msgstr "ПотребителÑко име"
+
+#: lib/option.tcl:141
+msgid "Email Address"
+msgstr "ÐÐ´Ñ€ÐµÑ Ð½Ð° е-поща"
+
+#: lib/option.tcl:143
+msgid "Summarize Merge Commits"
+msgstr "Обобщаване на подаваниÑта при Ñливане"
+
+#: lib/option.tcl:144
+msgid "Merge Verbosity"
+msgstr "ПодробноÑти при ÑливаниÑта"
+
+#: lib/option.tcl:145
+msgid "Show Diffstat After Merge"
+msgstr "Извеждане на ÑтатиÑтика Ñлед ÑливаниÑта"
+
+#: lib/option.tcl:146
+msgid "Use Merge Tool"
+msgstr "Използване на програма за Ñливане"
+
+#: lib/option.tcl:148
+msgid "Trust File Modification Timestamps"
+msgstr "Доверие във времето на промÑна на файловете"
+
+#: lib/option.tcl:149
+msgid "Prune Tracking Branches During Fetch"
+msgstr "ОкаÑтрÑне на ÑледÑщите клонове при доÑтавÑне"
+
+#: lib/option.tcl:150
+msgid "Match Tracking Branches"
+msgstr "ÐапаÑване на ÑледÑщите клонове"
+
+#: lib/option.tcl:151
+msgid "Use Textconv For Diffs and Blames"
+msgstr "Използване на „textconv“ за разликите и анотирането"
+
+#: lib/option.tcl:152
+msgid "Blame Copy Only On Changed Files"
+msgstr "Ðнотиране на копието Ñамо по променените файлове"
+
+#: lib/option.tcl:153
+msgid "Maximum Length of Recent Repositories List"
+msgstr "МакÑимален брой на ÑпиÑъка „Скоро ползвани“ хранилища"
+
+#: lib/option.tcl:154
+msgid "Minimum Letters To Blame Copy On"
+msgstr "Минимален брой знаци за анотиране на копието"
+
+#: lib/option.tcl:155
+msgid "Blame History Context Radius (days)"
+msgstr "ИÑторичеÑки обхват за анотиране в дни"
+
+#: lib/option.tcl:156
+msgid "Number of Diff Context Lines"
+msgstr "Брой редове за контекÑта на разликите"
+
+#: lib/option.tcl:157
+msgid "Additional Diff Parameters"
+msgstr "Ðргументи към командата за разликите"
+
+#: lib/option.tcl:158
+msgid "Commit Message Text Width"
+msgstr "Широчина на текÑта на Ñъобщението при подаване"
+
+#: lib/option.tcl:159
+msgid "New Branch Name Template"
+msgstr "Шаблон за името на новите клони"
+
+#: lib/option.tcl:160
+msgid "Default File Contents Encoding"
+msgstr "Кодиране на файловете"
+
+#: lib/option.tcl:161
+msgid "Warn before committing to a detached head"
+msgstr "Предупреждаване при подаване към неÑвързан указател"
+
+#: lib/option.tcl:162
+msgid "Staging of untracked files"
+msgstr "ДобавÑне на неÑледените файлове към индекÑа"
+
+#: lib/option.tcl:163
+msgid "Show untracked files"
+msgstr "Показване на неÑледените файлове"
+
+#: lib/option.tcl:164
+msgid "Tab spacing"
+msgstr "Ширина на табулациÑта"
+
+#: lib/option.tcl:210
+msgid "Change"
+msgstr "СмÑна"
+
+#: lib/option.tcl:254
+msgid "Spelling Dictionary:"
+msgstr "ПравопиÑен речник:"
+
+#: lib/option.tcl:284
+msgid "Change Font"
+msgstr "СмÑна на шрифта"
+
+#: lib/option.tcl:288
+#, tcl-format
+msgid "Choose %s"
+msgstr "Избор на „%s“"
+
+#: lib/option.tcl:294
+msgid "pt."
+msgstr "тчк."
+
+#: lib/option.tcl:308
+msgid "Preferences"
+msgstr "ÐаÑтройки"
+
+#: lib/option.tcl:345
+msgid "Failed to completely save options:"
+msgstr "ÐеуÑпешно запазване на наÑтройките:"
+
+#: lib/remote_add.tcl:20
+#, tcl-format
+msgid "%s (%s): Add Remote"
+msgstr "%s (%s): ДобавÑне на отдалечено хранилище"
+
+#: lib/remote_add.tcl:25
+msgid "Add New Remote"
+msgstr "ДобавÑне на отдалечено хранилище"
+
+#: lib/remote_add.tcl:30 lib/tools_dlg.tcl:37
+msgid "Add"
+msgstr "ДобавÑне"
+
+#: lib/remote_add.tcl:39
+msgid "Remote Details"
+msgstr "Данни за отдалеченото хранилище"
+
+#: lib/remote_add.tcl:50
+msgid "Location:"
+msgstr "МеÑтоположение:"
+
+#: lib/remote_add.tcl:60
+msgid "Further Action"
+msgstr "Следващо дейÑтвие"
+
+#: lib/remote_add.tcl:63
+msgid "Fetch Immediately"
+msgstr "Ðезабавно доÑтавÑне"
+
+#: lib/remote_add.tcl:69
+msgid "Initialize Remote Repository and Push"
+msgstr "Инициализиране на отдалеченото хранилище и изтлаÑкване на промените"
+
+#: lib/remote_add.tcl:75
+msgid "Do Nothing Else Now"
+msgstr "Да не Ñе прави нищо"
+
+#: lib/remote_add.tcl:100
+msgid "Please supply a remote name."
+msgstr "Задайте име за отдалеченото хранилище."
+
+#: lib/remote_add.tcl:113
+#, tcl-format
+msgid "'%s' is not an acceptable remote name."
+msgstr "Отдалечено хранилище не може да Ñе казва „%s“."
+
+#: lib/remote_add.tcl:124
+#, tcl-format
+msgid "Failed to add remote '%s' of location '%s'."
+msgstr "ÐеуÑпешно добавÑне на отдалеченото хранилище „%s“ от Ð°Ð´Ñ€ÐµÑ â€ž%s“."
+
+#: lib/remote_add.tcl:132 lib/transport.tcl:6
+#, tcl-format
+msgid "fetch %s"
+msgstr "доÑтавÑне на „%s“"
+
+#: lib/remote_add.tcl:133
+#, tcl-format
+msgid "Fetching the %s"
+msgstr "ДоÑтавÑне на „%s“"
+
+#: lib/remote_add.tcl:156
+#, tcl-format
+msgid "Do not know how to initialize repository at location '%s'."
+msgstr "Хранилището Ñ Ð¼ÐµÑтоположение „%s“ не може да Ñе инициализира."
+
+#: lib/remote_add.tcl:162 lib/transport.tcl:54 lib/transport.tcl:92
+#: lib/transport.tcl:110
+#, tcl-format
+msgid "push %s"
+msgstr "изтлаÑкване на „%s“"
+
+#: lib/remote_add.tcl:163
+#, tcl-format
+msgid "Setting up the %s (at %s)"
+msgstr "ДобавÑне на хранилище „%s“ (Ñ Ð°Ð´Ñ€ÐµÑ â€ž%s“)"
+
+#: lib/remote_branch_delete.tcl:29
+#, tcl-format
+msgid "%s (%s): Delete Branch Remotely"
+msgstr "%s (%s): Изтриване на Ð¾Ñ‚Ð´Ð°Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ ÐºÐ»Ð¾Ð½"
+
+#: lib/remote_branch_delete.tcl:34
+msgid "Delete Branch Remotely"
+msgstr "Изтриване на Ð¾Ñ‚Ð´Ð°Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ ÐºÐ»Ð¾Ð½"
+
+#: lib/remote_branch_delete.tcl:48
+msgid "From Repository"
+msgstr "От хранилище"
+
+#: lib/remote_branch_delete.tcl:51 lib/transport.tcl:165
+msgid "Remote:"
+msgstr "Отдалечено хранилище:"
+
+#: lib/remote_branch_delete.tcl:72 lib/transport.tcl:187
+msgid "Arbitrary Location:"
+msgstr "Произволно меÑтоположение:"
+
+#: lib/remote_branch_delete.tcl:88
+msgid "Branches"
+msgstr "Клони"
+
+#: lib/remote_branch_delete.tcl:110
+msgid "Delete Only If"
+msgstr "Изтриване, Ñамо ако"
+
+#: lib/remote_branch_delete.tcl:112
+msgid "Merged Into:"
+msgstr "СлÑÑ‚ в:"
+
+#: lib/remote_branch_delete.tcl:153
+msgid "A branch is required for 'Merged Into'."
+msgstr "За данните „СлÑÑ‚ в“ е необходимо да зададете клон."
+
+#: lib/remote_branch_delete.tcl:185
+#, tcl-format
+msgid ""
+"The following branches are not completely merged into %s:\n"
+"\n"
+" - %s"
+msgstr ""
+"Следните клони не Ñа Ñлети напълно в „%s“:\n"
+"\n"
+" â— %s"
+
+#: lib/remote_branch_delete.tcl:190
+#, tcl-format
+msgid ""
+"One or more of the merge tests failed because you have not fetched the "
+"necessary commits. Try fetching from %s first."
+msgstr ""
+"Поне една от пробите за Ñливане е неуÑпешна, защото не Ñте доÑтавили вÑички "
+"необходими подаваниÑ. Пробвайте първо да доÑтавите подаваниÑта от „%s“."
+
+#: lib/remote_branch_delete.tcl:208
+msgid "Please select one or more branches to delete."
+msgstr "Изберете поне един клон за изтриване."
+
+#: lib/remote_branch_delete.tcl:227
+#, tcl-format
+msgid "Deleting branches from %s"
+msgstr "Изтриване на клони от „%s“"
+
+#: lib/remote_branch_delete.tcl:300
+msgid "No repository selected."
+msgstr "Ðе е избрано хранилище."
+
+#: lib/remote_branch_delete.tcl:305
+#, tcl-format
+msgid "Scanning %s..."
+msgstr "ПретърÑване на „%s“…"
+
+#: lib/remote.tcl:200
+msgid "Push to"
+msgstr "ИзтлаÑкване към"
+
+#: lib/remote.tcl:218
+msgid "Remove Remote"
+msgstr "Премахване на отдалечено хранилище"
+
+#: lib/remote.tcl:223
+msgid "Prune from"
+msgstr "ОкаÑтрÑне от"
+
+#: lib/remote.tcl:228
+msgid "Fetch from"
+msgstr "ДоÑтавÑне от"
+
+#: lib/remote.tcl:249 lib/remote.tcl:253 lib/remote.tcl:258 lib/remote.tcl:264
+msgid "All"
+msgstr "Ð’Ñички"
+
+#: lib/search.tcl:48
+msgid "Find:"
+msgstr "ТърÑене:"
+
+#: lib/search.tcl:50
+msgid "Next"
+msgstr "Следваща поÑва"
+
+#: lib/search.tcl:51
+msgid "Prev"
+msgstr "Предишна поÑва"
+
+#: lib/search.tcl:52
+msgid "RegExp"
+msgstr "РегИзр"
+
+#: lib/search.tcl:54
+msgid "Case"
+msgstr "Главни/Малки"
+
+#: lib/shortcut.tcl:8 lib/shortcut.tcl:40 lib/shortcut.tcl:72
+#, tcl-format
+msgid "%s (%s): Create Desktop Icon"
+msgstr "%s (%s): ДобавÑне на икона на Ñ€Ð°Ð±Ð¾Ñ‚Ð½Ð¸Ñ Ð¿Ð»Ð¾Ñ‚"
+
+#: lib/shortcut.tcl:24 lib/shortcut.tcl:62
+msgid "Cannot write shortcut:"
+msgstr "Клавишната ÐºÐ¾Ð¼Ð±Ð¸Ð½Ð°Ñ†Ð¸Ñ Ð½Ðµ може да Ñе запази:"
+
+#: lib/shortcut.tcl:137
+msgid "Cannot write icon:"
+msgstr "Иконата не може да Ñе запази:"
+
+#: lib/spellcheck.tcl:57
+msgid "Unsupported spell checker"
+msgstr "Тази програма за проверка на правопиÑа не Ñе поддържа"
+
+#: lib/spellcheck.tcl:65
+msgid "Spell checking is unavailable"
+msgstr "ЛипÑва програма за проверка на правопиÑа"
+
+#: lib/spellcheck.tcl:68
+msgid "Invalid spell checking configuration"
+msgstr "Ðеправилни наÑтройки на проверката на правопиÑа"
+
+#: lib/spellcheck.tcl:70
+#, tcl-format
+msgid "Reverting dictionary to %s."
+msgstr "Ползване на речник за език „%s“."
+
+#: lib/spellcheck.tcl:73
+msgid "Spell checker silently failed on startup"
+msgstr "Програмата за Ð¿Ñ€Ð°Ð²Ð¾Ð¿Ð¸Ñ Ð´Ð°Ð¶Ðµ не Ñтартира уÑпешно."
+
+#: lib/spellcheck.tcl:80
+msgid "Unrecognized spell checker"
+msgstr "Ðепозната програма за проверка на правопиÑа"
+
+#: lib/spellcheck.tcl:186
+msgid "No Suggestions"
+msgstr "ÐÑма предложениÑ"
+
+#: lib/spellcheck.tcl:388
+msgid "Unexpected EOF from spell checker"
+msgstr "Ðеочакван край на файл от програмата за проверка на правопиÑа"
+
+#: lib/spellcheck.tcl:392
+msgid "Spell Checker Failed"
+msgstr "Грешка в програмата за проверка на правопиÑа"
+
+#: lib/sshkey.tcl:34
+msgid "No keys found."
+msgstr "Ðе Ñа открити ключове."
+
+#: lib/sshkey.tcl:37
+#, tcl-format
+msgid "Found a public key in: %s"
+msgstr "Открит е публичен ключ в „%s“"
+
+#: lib/sshkey.tcl:43
+msgid "Generate Key"
+msgstr "Генериране на ключ"
+
+#: lib/sshkey.tcl:61
+msgid "Copy To Clipboard"
+msgstr "Копиране към ÑиÑÑ‚ÐµÐ¼Ð½Ð¸Ñ Ð±ÑƒÑ„ÐµÑ€"
+
+#: lib/sshkey.tcl:75
+msgid "Your OpenSSH Public Key"
+msgstr "ПубличниÑÑ‚ ви ключ за OpenSSH"
+
+#: lib/sshkey.tcl:83
+msgid "Generating..."
+msgstr "Генериране…"
+
+#: lib/sshkey.tcl:89
+#, tcl-format
+msgid ""
+"Could not start ssh-keygen:\n"
+"\n"
+"%s"
+msgstr ""
+"Програмата „ssh-keygen“ не може да Ñе Ñтартира:\n"
+"\n"
+"%s"
+
+#: lib/sshkey.tcl:116
+msgid "Generation failed."
+msgstr "ÐеуÑпешно генериране."
+
+#: lib/sshkey.tcl:123
+msgid "Generation succeeded, but no keys found."
+msgstr "Генерирането завърши уÑпешно, а не Ñа намерени ключове."
+
+#: lib/sshkey.tcl:126
+#, tcl-format
+msgid "Your key is in: %s"
+msgstr "Ключът ви е в „%s“"
+
+#: lib/status_bar.tcl:263
+#, tcl-format
+msgid "%s ... %*i of %*i %s (%3i%%)"
+msgstr "%s… %*i от общо %*i %s (%3i%%)"
+
+#: lib/tools_dlg.tcl:22
+#, tcl-format
+msgid "%s (%s): Add Tool"
+msgstr "%s (%s): ДобавÑне на команда"
+
+#: lib/tools_dlg.tcl:28
+msgid "Add New Tool Command"
+msgstr "ДобавÑне на команда"
+
+#: lib/tools_dlg.tcl:34
+msgid "Add globally"
+msgstr "Глобално добавÑне"
+
+#: lib/tools_dlg.tcl:46
+msgid "Tool Details"
+msgstr "ПодробноÑти за командата"
+
+#: lib/tools_dlg.tcl:49
+msgid "Use '/' separators to create a submenu tree:"
+msgstr "За Ñъздаване на подменюта използвайте знака „/“ за разделител:"
+
+#: lib/tools_dlg.tcl:60
+msgid "Command:"
+msgstr "Команда:"
+
+#: lib/tools_dlg.tcl:71
+msgid "Show a dialog before running"
+msgstr "Преди изпълнение да Ñе извежда диалогов прозорец"
+
+#: lib/tools_dlg.tcl:77
+msgid "Ask the user to select a revision (sets $REVISION)"
+msgstr "ПотребителÑÑ‚ да укаже верÑÐ¸Ñ (задаване на променливата $REVISION)"
+
+#: lib/tools_dlg.tcl:82
+msgid "Ask the user for additional arguments (sets $ARGS)"
+msgstr ""
+"ПотребителÑÑ‚ да укаже допълнителни аргументи (задаване на променливата $ARGS)"
+
+#: lib/tools_dlg.tcl:89
+msgid "Don't show the command output window"
+msgstr "Без показване на прозорец Ñ Ð¸Ð·Ñ…Ð¾Ð´Ð° от командата"
+
+#: lib/tools_dlg.tcl:94
+msgid "Run only if a diff is selected ($FILENAME not empty)"
+msgstr ""
+"Стартиране Ñамо Ñлед избор на разлика (променливата $FILENAME не е празна)"
+
+#: lib/tools_dlg.tcl:118
+msgid "Please supply a name for the tool."
+msgstr "Задайте име за командата."
+
+#: lib/tools_dlg.tcl:126
+#, tcl-format
+msgid "Tool '%s' already exists."
+msgstr "Командата „%s“ вече ÑъщеÑтвува."
+
+#: lib/tools_dlg.tcl:148
+#, tcl-format
+msgid ""
+"Could not add tool:\n"
+"%s"
+msgstr ""
+"Командата не може да Ñе добави:\n"
+"%s"
+
+#: lib/tools_dlg.tcl:187
+#, tcl-format
+msgid "%s (%s): Remove Tool"
+msgstr "%s (%s): Премахване на команда"
+
+#: lib/tools_dlg.tcl:193
+msgid "Remove Tool Commands"
+msgstr "Премахване на команди"
+
+#: lib/tools_dlg.tcl:198
+msgid "Remove"
+msgstr "Премахване"
+
+#: lib/tools_dlg.tcl:231
+msgid "(Blue denotes repository-local tools)"
+msgstr "(командите към локалното хранилище Ñа обозначени в Ñиньо)"
+
+#: lib/tools_dlg.tcl:283
+#, tcl-format
+msgid "%s (%s):"
+msgstr "%s (%s):"
+
+#: lib/tools_dlg.tcl:292
+#, tcl-format
+msgid "Run Command: %s"
+msgstr "Изпълнение на командата „%s“"
+
+#: lib/tools_dlg.tcl:306
+msgid "Arguments"
+msgstr "Ðргументи"
+
+#: lib/tools_dlg.tcl:341
+msgid "OK"
+msgstr "Добре"
+
+#: lib/tools.tcl:76
+#, tcl-format
+msgid "Running %s requires a selected file."
+msgstr "За изпълнението на „%s“ трÑбва да изберете файл."
+
+#: lib/tools.tcl:92
+#, tcl-format
+msgid "Are you sure you want to run %1$s on file \"%2$s\"?"
+msgstr "Сигурни ли Ñте, че иÑкате да изпълните „%1$s“ върху файла „%2$s“?"
+
+#: lib/tools.tcl:96
+#, tcl-format
+msgid "Are you sure you want to run %s?"
+msgstr "Сигурни ли Ñте, че иÑкате да изпълните „%s“?"
+
+#: lib/tools.tcl:118
+#, tcl-format
+msgid "Tool: %s"
+msgstr "Команда: %s"
+
+#: lib/tools.tcl:119
+#, tcl-format
+msgid "Running: %s"
+msgstr "Изпълнение: %s"
+
+#: lib/tools.tcl:158
+#, tcl-format
+msgid "Tool completed successfully: %s"
+msgstr "Командата завърши уÑпешно: %s"
+
+#: lib/tools.tcl:160
+#, tcl-format
+msgid "Tool failed: %s"
+msgstr "Командата върна грешка: %s"
+
+#: lib/transport.tcl:7
+#, tcl-format
+msgid "Fetching new changes from %s"
+msgstr "ДоÑтавÑне на промените от „%s“"
+
+#: lib/transport.tcl:18
+#, tcl-format
+msgid "remote prune %s"
+msgstr "окаÑтрÑне на ÑледÑщите клони към „%s“"
+
+#: lib/transport.tcl:19
+#, tcl-format
+msgid "Pruning tracking branches deleted from %s"
+msgstr "ОкаÑтрÑне на ÑледÑщите клони на изтритите клони от „%s“"
+
+#: lib/transport.tcl:25
+msgid "fetch all remotes"
+msgstr "доÑтавÑне от вÑички отдалечени"
+
+#: lib/transport.tcl:26
+msgid "Fetching new changes from all remotes"
+msgstr "ДоÑтавÑне на промените от вÑички отдалечени хранилища"
+
+#: lib/transport.tcl:40
+msgid "remote prune all remotes"
+msgstr "окаÑтрÑне на ÑледÑщите изтрити"
+
+#: lib/transport.tcl:41
+msgid "Pruning tracking branches deleted from all remotes"
+msgstr ""
+"ОкаÑтрÑне на ÑледÑщите клони на изтритите клони от вÑички отдалечени "
+"хранилища"
+
+#: lib/transport.tcl:55
+#, tcl-format
+msgid "Pushing changes to %s"
+msgstr "ИзтлаÑкване на промените към „%s“"
+
+#: lib/transport.tcl:93
+#, tcl-format
+msgid "Mirroring to %s"
+msgstr "ИзтлаÑкване на вÑичко към „%s“"
+
+#: lib/transport.tcl:111
+#, tcl-format
+msgid "Pushing %s %s to %s"
+msgstr "ИзтлаÑкване на %s „%s“ към „%s“"
+
+#: lib/transport.tcl:132
+msgid "Push Branches"
+msgstr "Клони за изтлаÑкване"
+
+#: lib/transport.tcl:147
+msgid "Source Branches"
+msgstr "Клони-източници"
+
+#: lib/transport.tcl:162
+msgid "Destination Repository"
+msgstr "Целево хранилище"
+
+#: lib/transport.tcl:205
+msgid "Transfer Options"
+msgstr "ÐаÑтройки при пренаÑÑнето"
+
+#: lib/transport.tcl:207
+msgid "Force overwrite existing branch (may discard changes)"
+msgstr ""
+"Изрично презапиÑване на ÑъщеÑтвуващ клон (нÑкои промени може да Ñе загубÑÑ‚)"
+
+#: lib/transport.tcl:211
+msgid "Use thin pack (for slow network connections)"
+msgstr "МакÑимална компреÑÐ¸Ñ (за бавни мрежови връзки)"
+
+#: lib/transport.tcl:215
+msgid "Include tags"
+msgstr "Включване на етикетите"
+
+#: lib/transport.tcl:229
+#, tcl-format
+msgid "%s (%s): Push"
+msgstr "%s (%s): ИзтлаÑкване"
diff --git a/git-send-email.perl b/git-send-email.perl
index 659e6c588b..437f8ac46a 100755
--- a/git-send-email.perl
+++ b/git-send-email.perl
@@ -1653,8 +1653,18 @@ EOF
default => $ask_default);
die __("Send this email reply required") unless defined $_;
if (/^n/i) {
+ # If we are skipping a message, we should make sure that
+ # the next message is treated as the successor to the
+ # previously sent message, and not the skipped message.
+ $message_num--;
return 0;
} elsif (/^e/i) {
+ # Since the same message will be sent again, we need to
+ # decrement the message number to the previous message.
+ # Otherwise, the edited message will be treated as a
+ # different message sent after the original non-edited
+ # message.
+ $message_num--;
return -1;
} elsif (/^q/i) {
cleanup_compose_files();
@@ -1778,7 +1788,8 @@ EOF
if (is_outlook($smtp_server)) {
if ($smtp->message =~ /<([^>]+)>/) {
$message_id = "<$1>";
- printf __("Outlook reassigned Message-ID to: %s\n"), $message_id;
+ $header =~ s/^(Message-ID:\s*).*\n/${1}$message_id\n/m;
+ printf __("Outlook reassigned Message-ID to: %s\n"), $message_id if $smtp->debug;
} else {
warn __("Warning: Could not retrieve Message-ID from server response.\n");
}
@@ -2101,6 +2112,17 @@ if ($validate) {
}
}
+ # Validate the SMTP server port, if provided.
+ if (defined $smtp_server_port) {
+ my $port = Git::port_num($smtp_server_port);
+ if ($port) {
+ $smtp_server_port = $port;
+ } else {
+ die sprintf(__("error: invalid SMTP port '%s'\n"),
+ $smtp_server_port);
+ }
+ }
+
# Run the loop once again to avoid gaps in the counter due to FIFO
# arguments provided by the user.
my $num = 1;
diff --git a/git.c b/git.c
index 77c4359522..07a5fe39fb 100644
--- a/git.c
+++ b/git.c
@@ -462,12 +462,12 @@ static int run_builtin(struct cmd_struct *p, int argc, const char **argv, struct
precompose_argv_prefix(argc, argv, NULL);
if (use_pager == -1 && run_setup &&
!(p->option & DELAY_PAGER_CONFIG))
- use_pager = check_pager_config(the_repository, p->cmd);
+ use_pager = check_pager_config(repo, p->cmd);
if (use_pager == -1 && p->option & USE_PAGER)
use_pager = 1;
if (run_setup && startup_info->have_repository)
/* get_git_dir() may set up repo, avoid that */
- trace_repo_setup(the_repository);
+ trace_repo_setup(repo);
commit_pager_choice();
if (!help && p->option & NEED_WORK_TREE)
@@ -646,7 +646,9 @@ static struct cmd_struct commands[] = {
{ "verify-pack", cmd_verify_pack },
{ "verify-tag", cmd_verify_tag, RUN_SETUP },
{ "version", cmd_version },
+#ifndef WITH_BREAKING_CHANGES
{ "whatchanged", cmd_whatchanged, RUN_SETUP },
+#endif
{ "worktree", cmd_worktree, RUN_SETUP },
{ "write-tree", cmd_write_tree, RUN_SETUP },
};
diff --git a/gitk-git/gitk b/gitk-git/gitk
index 19689765cd..427a8a96c9 100755
--- a/gitk-git/gitk
+++ b/gitk-git/gitk
@@ -7,7 +7,31 @@ exec wish "$0" -- "$@"
# and distributed under the terms of the GNU General Public Licence,
# either version 2, or (at your option) any later version.
-package require Tk
+if {[catch {package require Tcl 8.6-8.8} err]} {
+ catch {wm withdraw .}
+ tk_messageBox \
+ -icon error \
+ -type ok \
+ -title "gitk: fatal error" \
+ -message $err
+ exit 1
+}
+
+set MIN_GIT_VERSION 2.20
+regexp {^git version ([\d.]*\d)} [exec git version] _ git_version
+if {[package vcompare $git_version $MIN_GIT_VERSION] < 0} {
+ set message "The git executable found is too old.
+The minimum required version is $MIN_GIT_VERSION.0.
+The version of git found is $git_version."
+
+ catch {wm withdraw .}
+ tk_messageBox \
+ -icon error \
+ -type ok \
+ -title "gitk: fatal error" \
+ -message $message
+ exit 1
+}
######################################################################
##
@@ -113,6 +137,91 @@ if {[is_Windows]} {
# End of safe PATH lookup stuff
+# Wrap exec/open to sanitize arguments
+
+# unsafe arguments begin with redirections or the pipe or background operators
+proc is_arg_unsafe {arg} {
+ regexp {^([<|>&]|2>)} $arg
+}
+
+proc make_arg_safe {arg} {
+ if {[is_arg_unsafe $arg]} {
+ set arg [file join . $arg]
+ }
+ return $arg
+}
+
+proc make_arglist_safe {arglist} {
+ set res {}
+ foreach arg $arglist {
+ lappend res [make_arg_safe $arg]
+ }
+ return $res
+}
+
+# executes one command
+# no redirections or pipelines are possible
+# cmd is a list that specifies the command and its arguments
+# calls `exec` and returns its value
+proc safe_exec {cmd} {
+ eval exec [make_arglist_safe $cmd]
+}
+
+# executes one command with redirections
+# no pipelines are possible
+# cmd is a list that specifies the command and its arguments
+# redir is a list that specifies redirections (output, background, constant(!) commands)
+# calls `exec` and returns its value
+proc safe_exec_redirect {cmd redir} {
+ eval exec [make_arglist_safe $cmd] $redir
+}
+
+proc safe_open_file {filename flags} {
+ # a file name starting with "|" would attempt to run a process
+ # but such a file name must be treated as a relative path
+ # hide the "|" behind "./"
+ if {[string index $filename 0] eq "|"} {
+ set filename [file join . $filename]
+ }
+ open $filename $flags
+}
+
+# opens a command pipeline for reading
+# cmd is a list that specifies the command and its arguments
+# calls `open` and returns the file id
+proc safe_open_command {cmd} {
+ open |[make_arglist_safe $cmd] r
+}
+
+# opens a command pipeline for reading and writing
+# cmd is a list that specifies the command and its arguments
+# calls `open` and returns the file id
+proc safe_open_command_rw {cmd} {
+ open |[make_arglist_safe $cmd] r+
+}
+
+# opens a command pipeline for reading with redirections
+# cmd is a list that specifies the command and its arguments
+# redir is a list that specifies redirections
+# calls `open` and returns the file id
+proc safe_open_command_redirect {cmd redir} {
+ set cmd [make_arglist_safe $cmd]
+ open |[concat $cmd $redir] r
+}
+
+# opens a pipeline with several commands for reading
+# cmds is a list of lists, each of which specifies a command and its arguments
+# calls `open` and returns the file id
+proc safe_open_pipeline {cmds} {
+ set cmd {}
+ foreach subcmd $cmds {
+ set cmd [concat $cmd | [make_arglist_safe $subcmd]]
+ }
+ open $cmd r
+}
+
+# End exec/open wrappers
+
proc hasworktree {} {
return [expr {[exec git rev-parse --is-bare-repository] == "false" &&
[exec git rev-parse --is-inside-git-dir] == "false"}]
@@ -238,7 +347,7 @@ proc unmerged_files {files} {
set mlist {}
set nr_unmerged 0
if {[catch {
- set fd [open "| git ls-files -u" r]
+ set fd [safe_open_command {git ls-files -u}]
} err]} {
show_error {} . "[mc "Couldn't get list of unmerged files:"] $err"
exit 1
@@ -260,7 +369,7 @@ proc unmerged_files {files} {
proc parseviewargs {n arglist} {
global vdatemode vmergeonly vflags vdflags vrevs vfiltered vorigargs env
global vinlinediff
- global worddiff git_version
+ global worddiff
set vdatemode($n) 0
set vmergeonly($n) 0
@@ -311,14 +420,10 @@ proc parseviewargs {n arglist} {
"--color-words*" - "--word-diff=color" {
# These trigger a word diff in the console interface,
# so help the user by enabling our own support
- if {[package vcompare $git_version "1.7.2"] >= 0} {
- set worddiff [mc "Color words"]
- }
+ set worddiff [mc "Color words"]
}
"--word-diff*" {
- if {[package vcompare $git_version "1.7.2"] >= 0} {
- set worddiff [mc "Markup words"]
- }
+ set worddiff [mc "Markup words"]
}
"--stat=*" - "--numstat" - "--shortstat" - "--summary" -
"--check" - "--exit-code" - "--quiet" - "--topo-order" -
@@ -394,20 +499,21 @@ proc parseviewargs {n arglist} {
proc parseviewrevs {view revs} {
global vposids vnegids
+ global hashlength
if {$revs eq {}} {
set revs HEAD
} elseif {[lsearch -exact $revs --all] >= 0} {
lappend revs HEAD
}
- if {[catch {set ids [eval exec git rev-parse $revs]} err]} {
+ if {[catch {set ids [safe_exec [concat git rev-parse $revs]]} err]} {
# we get stdout followed by stderr in $err
# for an unknown rev, git rev-parse echoes it and then errors out
set errlines [split $err "\n"]
set badrev {}
for {set l 0} {$l < [llength $errlines]} {incr l} {
set line [lindex $errlines $l]
- if {!([string length $line] == 40 && [string is xdigit $line])} {
+ if {!([string length $line] == $hashlength && [string is xdigit $line])} {
if {[string match "fatal:*" $line]} {
if {[string match "fatal: ambiguous argument*" $line]
&& $badrev ne {}} {
@@ -457,16 +563,6 @@ proc parseviewrevs {view revs} {
return $ret
}
-# Escapes a list of filter paths to be passed to git log via stdin. Note that
-# paths must not be quoted.
-proc escape_filter_paths {paths} {
- set escaped [list]
- foreach path $paths {
- lappend escaped [string map {\\ \\\\ "\ " "\\\ "} $path]
- }
- return $escaped
-}
-
# Start off a git log process and arrange to read its output
proc start_rev_list {view} {
global startmsecs commitidx viewcomplete curview
@@ -476,7 +572,6 @@ proc start_rev_list {view} {
global viewactive viewinstances vmergeonly
global mainheadid viewmainheadid viewmainheadid_orig
global vcanopt vflags vrevs vorigargs
- global show_notes
set startmsecs [clock clicks -milliseconds]
set commitidx($view) 0
@@ -488,7 +583,7 @@ proc start_rev_list {view} {
set args $viewargs($view)
if {$viewargscmd($view) ne {}} {
if {[catch {
- set str [exec sh -c $viewargscmd($view)]
+ set str [safe_exec [list sh -c $viewargscmd($view)]]
} err]} {
error_popup "[mc "Error executing --argscmd command:"] $err"
return 0
@@ -526,10 +621,9 @@ proc start_rev_list {view} {
}
if {[catch {
- set fd [open [concat | git log --no-color -z --pretty=raw $show_notes \
- --parents --boundary $args --stdin \
- "<<[join [concat $revs "--" \
- [escape_filter_paths $files]] "\\n"]"] r]
+ set fd [safe_open_command_redirect [concat git log --no-color -z --pretty=raw --show-notes \
+ --parents --boundary $args --stdin] \
+ [list "<<[join [concat $revs "--" $files] "\n"]"]]
} err]} {
error_popup "[mc "Error executing git log:"] $err"
return 0
@@ -563,9 +657,9 @@ proc stop_instance {inst} {
set pid [pid $fd]
if {$::tcl_platform(platform) eq {windows}} {
- exec taskkill /pid $pid
+ safe_exec [list taskkill /pid $pid]
} else {
- exec kill $pid
+ safe_exec [list kill $pid]
}
}
catch {close $fd}
@@ -623,7 +717,7 @@ proc updatecommits {} {
global mainheadid viewmainheadid viewmainheadid_orig pending_select
global hasworktree
global varcid vposids vnegids vflags vrevs
- global show_notes
+ global hashlength
set hasworktree [hasworktree]
rereadrefs
@@ -657,7 +751,7 @@ proc updatecommits {} {
# take out positive refs that we asked for before or
# that we have already seen
foreach rev $revs {
- if {[string length $rev] == 40} {
+ if {[string length $rev] == $hashlength} {
if {[lsearch -exact $oldpos $rev] < 0
&& ![info exists varcid($view,$rev)]} {
lappend newrevs $rev
@@ -680,11 +774,9 @@ proc updatecommits {} {
set args $vorigargs($view)
}
if {[catch {
- set fd [open [concat | git log --no-color -z --pretty=raw $show_notes \
- --parents --boundary $args --stdin \
- "<<[join [concat $revs "--" \
- [escape_filter_paths \
- $vfilelimit($view)]] "\\n"]"] r]
+ set fd [safe_open_command_redirect [concat git log --no-color -z --pretty=raw --show-notes \
+ --parents --boundary $args --stdin] \
+ [list "<<[join [concat $revs "--" $vfilelimit($view)] "\n"]"]]
} err]} {
error_popup "[mc "Error executing git log:"] $err"
return
@@ -1542,6 +1634,7 @@ proc getcommitlines {fd inst view updating} {
global parents children curview hlview
global idpending ordertok
global varccommits varcid varctok vtokmod vfilelimit vshortids
+ global hashlength
set stuff [read $fd 500000]
# git log doesn't terminate the last commit with a null...
@@ -1624,7 +1717,7 @@ proc getcommitlines {fd inst view updating} {
}
set ok 1
foreach id $ids {
- if {[string length $id] != 40} {
+ if {[string length $id] != $hashlength} {
set ok 0
break
}
@@ -1651,8 +1744,8 @@ proc getcommitlines {fd inst view updating} {
# and if we already know about it, using the rewritten
# parent as a substitute parent for $id's children.
if {![catch {
- set rwid [exec git rev-list --first-parent --max-count=1 \
- $id -- $vfilelimit($view)]
+ set rwid [safe_exec [list git rev-list --first-parent --max-count=1 \
+ $id -- $vfilelimit($view)]]
}]} {
if {$rwid ne {} && [info exists varcid($view,$rwid)]} {
# use $rwid in place of $id
@@ -1772,7 +1865,7 @@ proc do_readcommit {id} {
global tclencoding
# Invoke git-log to handle automatic encoding conversion
- set fd [open [concat | git log --no-color --pretty=raw -1 $id] r]
+ set fd [safe_open_command [concat git log --no-color --pretty=raw -1 $id]]
# Read the results using i18n.logoutputencoding
fconfigure $fd -translation lf -eofchar {}
if {$tclencoding != {}} {
@@ -1870,8 +1963,8 @@ proc getcommit {id} {
return 1
}
-# Expand an abbreviated commit ID to a list of full 40-char IDs that match
-# and are present in the current view.
+# Expand an abbreviated commit ID to a list of full 40-char (or 64-char
+# for SHA256 repo) IDs that match and are present in the current view.
# This is fairly slow...
proc longid {prefix} {
global varcid curview vshortids
@@ -1899,23 +1992,24 @@ proc longid {prefix} {
}
proc readrefs {} {
- global tagids idtags headids idheads tagobjid
+ global tagids idtags headids idheads tagobjid upstreamofref
global otherrefids idotherrefs mainhead mainheadid
global selecthead selectheadid
global hideremotes
global tclencoding
+ global hashlength
- foreach v {tagids idtags headids idheads otherrefids idotherrefs} {
+ foreach v {tagids idtags headids idheads otherrefids idotherrefs upstreamofref} {
unset -nocomplain $v
}
- set refd [open [list | git show-ref -d] r]
+ set refd [safe_open_command [list git show-ref -d]]
if {$tclencoding != {}} {
fconfigure $refd -encoding $tclencoding
}
while {[gets $refd line] >= 0} {
- if {[string index $line 40] ne " "} continue
- set id [string range $line 0 39]
- set ref [string range $line 41 end]
+ if {[string index $line $hashlength] ne " "} continue
+ set id [string range $line 0 [expr {$hashlength - 1}]]
+ set ref [string range $line [expr {$hashlength + 1}] end]
if {![string match "refs/*" $ref]} continue
set name [string range $ref 5 end]
if {[string match "remotes/*" $name]} {
@@ -1956,9 +2050,20 @@ proc readrefs {} {
set selectheadid {}
if {$selecthead ne {}} {
catch {
- set selectheadid [exec git rev-parse --verify $selecthead]
+ set selectheadid [safe_exec [list git rev-parse --verify $selecthead]]
}
}
+ #load the local_branch->upstream mapping
+ # the result of the for-each-ref command produces: local_branch NUL upstream
+ set refd [safe_open_command [list git for-each-ref {--format=%(refname:short)%00%(upstream)} refs/heads/]]
+ while {[gets $refd local_tracking] >= 0} {
+ set line [split $local_tracking \0]
+ if {[lindex $line 1] ne {}} {
+ set upstream_ref [string map {"refs/" ""} [lindex $line 1]]
+ set upstreamofref([lindex $line 0]) $upstream_ref
+ }
+ }
+ catch {close $refd}
}
# skip over fake commits
@@ -1999,23 +2104,12 @@ proc removehead {id name} {
}
proc ttk_toplevel {w args} {
- global use_ttk
eval [linsert $args 0 ::toplevel $w]
- if {$use_ttk} {
- place [ttk::frame $w._toplevel_background] -x 0 -y 0 -relwidth 1 -relheight 1
- }
+ place [ttk::frame $w._toplevel_background] -x 0 -y 0 -relwidth 1 -relheight 1
return $w
}
proc make_transient {window origin} {
- global have_tk85
-
- # In MacOS Tk 8.4 transient appears to work by setting
- # overrideredirect, which is utterly useless, since the
- # windows get no border, and are not even kept above
- # the parent.
- if {!$have_tk85 && [tk windowingsystem] eq {aqua}} return
-
wm transient $window $origin
# Windows fails to place transient windows normally, so
@@ -2026,12 +2120,10 @@ proc make_transient {window origin} {
}
proc show_error {w top msg} {
- global NS
- if {![info exists NS]} {set NS ""}
if {[wm state $top] eq "withdrawn"} { wm deiconify $top }
message $w.m -text $msg -justify center -aspect 400
pack $w.m -side top -fill x -padx 20 -pady 20
- ${NS}::button $w.ok -default active -text [mc OK] -command "destroy $top"
+ ttk::button $w.ok -default active -text [mc OK] -command "destroy $top"
pack $w.ok -side bottom -fill x
bind $top <Visibility> "grab $top; focus $top"
bind $top <Key-Return> "destroy $top"
@@ -2053,16 +2145,16 @@ proc error_popup {msg {owner .}} {
}
proc confirm_popup {msg {owner .}} {
- global confirm_ok NS
+ global confirm_ok
set confirm_ok 0
set w .confirm
ttk_toplevel $w
make_transient $w $owner
message $w.m -text $msg -justify center -aspect 400
pack $w.m -side top -fill x -padx 20 -pady 20
- ${NS}::button $w.ok -text [mc OK] -command "set confirm_ok 1; destroy $w"
+ ttk::button $w.ok -text [mc OK] -command "set confirm_ok 1; destroy $w"
pack $w.ok -side left -fill x
- ${NS}::button $w.cancel -text [mc Cancel] -command "destroy $w"
+ ttk::button $w.cancel -text [mc Cancel] -command "destroy $w"
pack $w.cancel -side right -fill x
bind $w <Visibility> "grab $w; focus $w"
bind $w <Key-Return> "set confirm_ok 1; destroy $w"
@@ -2078,8 +2170,6 @@ proc haveselectionclipboard {} {
}
proc setoptions {} {
- global use_ttk
-
if {[tk windowingsystem] ne "win32"} {
option add *Panedwindow.showHandle 1 startupFile
option add *Panedwindow.sashRelief raised startupFile
@@ -2172,23 +2262,52 @@ proc cleardropsel {w} {
$w selection clear
}
proc makedroplist {w varname args} {
- global use_ttk
- if {$use_ttk} {
- set width 0
- foreach label $args {
- set cx [string length $label]
- if {$cx > $width} {set width $cx}
- }
- set gm [ttk::combobox $w -width $width -state readonly\
- -textvariable $varname -values $args \
- -exportselection false]
- bind $gm <<ComboboxSelected>> [list $gm selection clear]
- } else {
- set gm [eval [linsert $args 0 tk_optionMenu $w $varname]]
- }
+ set width 0
+ foreach label $args {
+ set cx [string length $label]
+ if {$cx > $width} {set width $cx}
+ }
+ set gm [ttk::combobox $w -width $width -state readonly\
+ -textvariable $varname -values $args \
+ -exportselection false]
+ bind $gm <<ComboboxSelected>> [list $gm selection clear]
return $gm
}
+proc scrollval {D {koff 0}} {
+ global kscroll scroll_D0
+ return [expr int(-($D / $scroll_D0) * max(1, $kscroll-$koff))]
+}
+
+proc bind_mousewheel {} {
+ global canv cflist ctext
+ bindall <MouseWheel> {allcanvs yview scroll [scrollval %D] units}
+ bindall <Shift-MouseWheel> break
+ bind $ctext <MouseWheel> {$ctext yview scroll [scrollval %D 2] units}
+ bind $ctext <Shift-MouseWheel> {$ctext xview scroll [scrollval %D 2] units}
+ bind $cflist <MouseWheel> {$cflist yview scroll [scrollval %D 2] units}
+ bind $cflist <Shift-MouseWheel> break
+ bind $canv <Shift-MouseWheel> {$canv xview scroll [scrollval %D] units}
+}
+
+proc bind_mousewheel_buttons {} {
+ global canv cflist ctext
+ bindall <ButtonRelease-4> {allcanvs yview scroll [scrollval 1] units}
+ bindall <ButtonRelease-5> {allcanvs yview scroll [scrollval -1] units}
+ bindall <Shift-ButtonRelease-4> break
+ bindall <Shift-ButtonRelease-5> break
+ bind $ctext <ButtonRelease-4> {$ctext yview scroll [scrollval 1 2] units}
+ bind $ctext <ButtonRelease-5> {$ctext yview scroll [scrollval -1 2] units}
+ bind $ctext <Shift-ButtonRelease-4> {$ctext xview scroll [scrollval 1 2] units}
+ bind $ctext <Shift-ButtonRelease-5> {$ctext xview scroll [scrollval -1 2] units}
+ bind $cflist <ButtonRelease-4> {$cflist yview scroll [scrollval 1 2] units}
+ bind $cflist <ButtonRelease-5> {$cflist yview scroll [scrollval -1 2] units}
+ bind $cflist <Shift-ButtonRelease-4> break
+ bind $cflist <Shift-ButtonRelease-5> break
+ bind $canv <Shift-ButtonRelease-4> {$canv xview scroll [scrollval 1] units}
+ bind $canv <Shift-ButtonRelease-5> {$canv xview scroll [scrollval -1] units}
+}
+
proc makewindow {} {
global canv canv2 canv3 linespc charspc ctext cflist cscroll
global tabstop
@@ -2207,9 +2326,8 @@ proc makewindow {} {
global headctxmenu progresscanv progressitem progresscoords statusw
global fprogitem fprogcoord lastprogupdate progupdatepending
global rprogitem rprogcoord rownumsel numcommits
- global have_tk85 have_tk86 use_ttk NS
- global git_version
global worddiff
+ global hashlength scroll_D0
# The "mc" arguments here are purely so that xgettext
# sees the following string as needing to be translated
@@ -2220,7 +2338,7 @@ proc makewindow {} {
{mc "Reread re&ferences" command rereadrefs}
{mc "&List references" command showrefs -accelerator F2}
{xx "" separator}
- {mc "Start git &gui" command {exec git gui &}}
+ {mc "Start git &gui" command {safe_exec_redirect [list git gui] [list &]}}
{xx "" separator}
{mc "&Quit" command doquit -accelerator Meta1-Q}
}}
@@ -2261,13 +2379,11 @@ proc makewindow {} {
makemenu .bar $bar
. configure -menu .bar
- if {$use_ttk} {
- # cover the non-themed toplevel with a themed frame.
- place [ttk::frame ._main_background] -x 0 -y 0 -relwidth 1 -relheight 1
- }
+ # cover the non-themed toplevel with a themed frame.
+ place [ttk::frame ._main_background] -x 0 -y 0 -relwidth 1 -relheight 1
# the gui has upper and lower half, parts of a paned window.
- ${NS}::panedwindow .ctop -orient vertical
+ ttk::panedwindow .ctop -orient vertical
# possibly use assumed geometry
if {![info exists geometry(pwsash0)]} {
@@ -2280,12 +2396,9 @@ proc makewindow {} {
}
# the upper half will have a paned window, a scroll bar to the right, and some stuff below
- ${NS}::frame .tf -height $geometry(topheight) -width $geometry(topwidth)
- ${NS}::frame .tf.histframe
- ${NS}::panedwindow .tf.histframe.pwclist -orient horizontal
- if {!$use_ttk} {
- .tf.histframe.pwclist configure -sashpad 0 -handlesize 4
- }
+ ttk::frame .tf -height $geometry(topheight) -width $geometry(topwidth)
+ ttk::frame .tf.histframe
+ ttk::panedwindow .tf.histframe.pwclist -orient horizontal
# create three canvases
set cscroll .tf.histframe.csb
@@ -2293,6 +2406,7 @@ proc makewindow {} {
canvas $canv \
-selectbackground $selectbgcolor \
-background $bgcolor -bd 0 \
+ -xscrollincr $linespc \
-yscrollincr $linespc -yscrollcommand "scrollcanv $cscroll"
.tf.histframe.pwclist add $canv
set canv2 .tf.histframe.pwclist.canv2
@@ -2305,28 +2419,22 @@ proc makewindow {} {
-selectbackground $selectbgcolor \
-background $bgcolor -bd 0 -yscrollincr $linespc
.tf.histframe.pwclist add $canv3
- if {$use_ttk} {
- bind .tf.histframe.pwclist <Map> {
- bind %W <Map> {}
- .tf.histframe.pwclist sashpos 1 [lindex $::geometry(pwsash1) 0]
- .tf.histframe.pwclist sashpos 0 [lindex $::geometry(pwsash0) 0]
- }
- } else {
- eval .tf.histframe.pwclist sash place 0 $geometry(pwsash0)
- eval .tf.histframe.pwclist sash place 1 $geometry(pwsash1)
+ bind .tf.histframe.pwclist <Map> {
+ bind %W <Map> {}
+ .tf.histframe.pwclist sashpos 1 [lindex $::geometry(pwsash1) 0]
+ .tf.histframe.pwclist sashpos 0 [lindex $::geometry(pwsash0) 0]
}
# a scroll bar to rule them
- ${NS}::scrollbar $cscroll -command {allcanvs yview}
- if {!$use_ttk} {$cscroll configure -highlightthickness 0}
+ ttk::scrollbar $cscroll -command {allcanvs yview}
pack $cscroll -side right -fill y
bind .tf.histframe.pwclist <Configure> {resizeclistpanes %W %w}
lappend bglist $canv $canv2 $canv3
pack .tf.histframe.pwclist -fill both -expand 1 -side left
# we have two button bars at bottom of top frame. Bar 1
- ${NS}::frame .tf.bar
- ${NS}::frame .tf.lbar -height 15
+ ttk::frame .tf.bar
+ ttk::frame .tf.lbar -height 15
set sha1entry .tf.bar.sha1
set entries $sha1entry
@@ -2335,7 +2443,7 @@ proc makewindow {} {
-command gotocommit -width 8
$sha1but conf -disabledforeground [$sha1but cget -foreground]
pack .tf.bar.sha1label -side left
- ${NS}::entry $sha1entry -width 40 -font textfont -textvariable sha1string
+ ttk::entry $sha1entry -width $hashlength -font textfont -textvariable sha1string
trace add variable sha1string write sha1change
pack $sha1entry -side left -pady 2
@@ -2360,50 +2468,30 @@ proc makewindow {} {
image create bitmap bm-right -data $bm_right_data -foreground $uifgcolor
image create bitmap bm-right-gray -data $bm_right_data -foreground $uifgdisabledcolor
- ${NS}::button .tf.bar.leftbut -command goback -state disabled -width 26
- if {$use_ttk} {
- .tf.bar.leftbut configure -image [list bm-left disabled bm-left-gray]
- } else {
- .tf.bar.leftbut configure -image bm-left
- }
+ ttk::button .tf.bar.leftbut -command goback -state disabled -width 26
+ .tf.bar.leftbut configure -image [list bm-left disabled bm-left-gray]
pack .tf.bar.leftbut -side left -fill y
- ${NS}::button .tf.bar.rightbut -command goforw -state disabled -width 26
- if {$use_ttk} {
- .tf.bar.rightbut configure -image [list bm-right disabled bm-right-gray]
- } else {
- .tf.bar.rightbut configure -image bm-right
- }
+ ttk::button .tf.bar.rightbut -command goforw -state disabled -width 26
+ .tf.bar.rightbut configure -image [list bm-right disabled bm-right-gray]
pack .tf.bar.rightbut -side left -fill y
- ${NS}::label .tf.bar.rowlabel -text [mc "Row"]
+ ttk::label .tf.bar.rowlabel -text [mc "Row"]
set rownumsel {}
- ${NS}::label .tf.bar.rownum -width 7 -textvariable rownumsel \
+ ttk::label .tf.bar.rownum -width 7 -textvariable rownumsel \
-relief sunken -anchor e
- ${NS}::label .tf.bar.rowlabel2 -text "/"
- ${NS}::label .tf.bar.numcommits -width 7 -textvariable numcommits \
+ ttk::label .tf.bar.rowlabel2 -text "/"
+ ttk::label .tf.bar.numcommits -width 7 -textvariable numcommits \
-relief sunken -anchor e
pack .tf.bar.rowlabel .tf.bar.rownum .tf.bar.rowlabel2 .tf.bar.numcommits \
-side left
- if {!$use_ttk} {
- foreach w {rownum numcommits} {.tf.bar.$w configure -font textfont}
- }
global selectedline
trace add variable selectedline write selectedline_change
# Status label and progress bar
set statusw .tf.bar.status
- ${NS}::label $statusw -width 15 -relief sunken
+ ttk::label $statusw -width 15 -relief sunken
pack $statusw -side left -padx 5
- if {$use_ttk} {
- set progresscanv [ttk::progressbar .tf.bar.progress]
- } else {
- set h [expr {[font metrics uifont -linespace] + 2}]
- set progresscanv .tf.bar.progress
- canvas $progresscanv -relief sunken -height $h -borderwidth 2
- set progressitem [$progresscanv create rect -1 0 0 $h -fill "#00ff00"]
- set fprogitem [$progresscanv create rect -1 0 0 $h -fill yellow]
- set rprogitem [$progresscanv create rect -1 0 0 $h -fill red]
- }
+ set progresscanv [ttk::progressbar .tf.bar.progress]
pack $progresscanv -side right -expand 1 -fill x -padx {0 2}
set progresscoords {0 0}
set fprogcoord 0
@@ -2413,7 +2501,7 @@ proc makewindow {} {
set progupdatepending 0
# build up the bottom bar of upper window
- ${NS}::label .tf.lbar.flabel -text "[mc "Find"] "
+ ttk::label .tf.lbar.flabel -text "[mc "Find"] "
set bm_down_data {
#define down_width 16
@@ -2425,7 +2513,7 @@ proc makewindow {} {
0xf0, 0x0f, 0xe0, 0x07, 0xc0, 0x03, 0x80, 0x01};
}
image create bitmap bm-down -data $bm_down_data -foreground $uifgcolor
- ${NS}::button .tf.lbar.fnext -width 26 -command {dofind 1 1}
+ ttk::button .tf.lbar.fnext -width 26 -command {dofind 1 1}
.tf.lbar.fnext configure -image bm-down
set bm_up_data {
@@ -2438,10 +2526,10 @@ proc makewindow {} {
0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01};
}
image create bitmap bm-up -data $bm_up_data -foreground $uifgcolor
- ${NS}::button .tf.lbar.fprev -width 26 -command {dofind -1 1}
+ ttk::button .tf.lbar.fprev -width 26 -command {dofind -1 1}
.tf.lbar.fprev configure -image bm-up
- ${NS}::label .tf.lbar.flab2 -text " [mc "commit"] "
+ ttk::label .tf.lbar.flab2 -text " [mc "commit"] "
pack .tf.lbar.flabel .tf.lbar.fnext .tf.lbar.fprev .tf.lbar.flab2 \
-side left -fill y
@@ -2457,7 +2545,7 @@ proc makewindow {} {
set findstring {}
set fstring .tf.lbar.findstring
lappend entries $fstring
- ${NS}::entry $fstring -width 30 -textvariable findstring
+ ttk::entry $fstring -width 30 -textvariable findstring
trace add variable findstring write find_change
set findtype [mc "Exact"]
set findtypemenu [makedroplist .tf.lbar.findtype \
@@ -2476,45 +2564,41 @@ proc makewindow {} {
pack .tf.bar -in .tf -side bottom -fill x
pack .tf.histframe -fill both -side top -expand 1
.ctop add .tf
- if {!$use_ttk} {
- .ctop paneconfigure .tf -height $geometry(topheight)
- .ctop paneconfigure .tf -width $geometry(topwidth)
- }
# now build up the bottom
- ${NS}::panedwindow .pwbottom -orient horizontal
+ ttk::panedwindow .pwbottom -orient horizontal
# lower left, a text box over search bar, scroll bar to the right
# if we know window height, then that will set the lower text height, otherwise
# we set lower text height which will drive window height
if {[info exists geometry(main)]} {
- ${NS}::frame .bleft -width $geometry(botwidth)
+ ttk::frame .bleft -width $geometry(botwidth)
} else {
- ${NS}::frame .bleft -width $geometry(botwidth) -height $geometry(botheight)
+ ttk::frame .bleft -width $geometry(botwidth) -height $geometry(botheight)
}
- ${NS}::frame .bleft.top
- ${NS}::frame .bleft.mid
- ${NS}::frame .bleft.bottom
+ ttk::frame .bleft.top
+ ttk::frame .bleft.mid
+ ttk::frame .bleft.bottom
# gap between sub-widgets
set wgap [font measure uifont "i"]
- ${NS}::button .bleft.top.search -text [mc "Search"] -command dosearch
+ ttk::button .bleft.top.search -text [mc "Search"] -command dosearch
pack .bleft.top.search -side left -padx 5
set sstring .bleft.top.sstring
set searchstring ""
- ${NS}::entry $sstring -width 20 -textvariable searchstring
+ ttk::entry $sstring -width 20 -textvariable searchstring
lappend entries $sstring
trace add variable searchstring write incrsearch
pack $sstring -side left -expand 1 -fill x
- ${NS}::radiobutton .bleft.mid.diff -text [mc "Diff"] \
+ ttk::radiobutton .bleft.mid.diff -text [mc "Diff"] \
-command changediffdisp -variable diffelide -value {0 0}
- ${NS}::radiobutton .bleft.mid.old -text [mc "Old version"] \
+ ttk::radiobutton .bleft.mid.old -text [mc "Old version"] \
-command changediffdisp -variable diffelide -value {0 1}
- ${NS}::radiobutton .bleft.mid.new -text [mc "New version"] \
+ ttk::radiobutton .bleft.mid.new -text [mc "New version"] \
-command changediffdisp -variable diffelide -value {1 0}
- ${NS}::label .bleft.mid.labeldiffcontext -text " [mc "Lines of context"]: "
+ ttk::label .bleft.mid.labeldiffcontext -text " [mc "Lines of context"]: "
pack .bleft.mid.diff .bleft.mid.old .bleft.mid.new -side left -ipadx $wgap
spinbox .bleft.mid.diffcontext -width 5 \
-from 0 -increment 1 -to 10000000 \
@@ -2524,28 +2608,24 @@ proc makewindow {} {
trace add variable diffcontextstring write diffcontextchange
lappend entries .bleft.mid.diffcontext
pack .bleft.mid.labeldiffcontext .bleft.mid.diffcontext -side left -ipadx $wgap
- ${NS}::checkbutton .bleft.mid.ignspace -text [mc "Ignore space change"] \
+ ttk::checkbutton .bleft.mid.ignspace -text [mc "Ignore space change"] \
-command changeignorespace -variable ignorespace
pack .bleft.mid.ignspace -side left -padx 5
set worddiff [mc "Line diff"]
- if {[package vcompare $git_version "1.7.2"] >= 0} {
- makedroplist .bleft.mid.worddiff worddiff [mc "Line diff"] \
- [mc "Markup words"] [mc "Color words"]
- trace add variable worddiff write changeworddiff
- pack .bleft.mid.worddiff -side left -padx 5
- }
+ makedroplist .bleft.mid.worddiff worddiff [mc "Line diff"] \
+ [mc "Markup words"] [mc "Color words"]
+ trace add variable worddiff write changeworddiff
+ pack .bleft.mid.worddiff -side left -padx 5
set ctext .bleft.bottom.ctext
text $ctext -background $bgcolor -foreground $fgcolor \
-state disabled -undo 0 -font textfont \
-yscrollcommand scrolltext -wrap $wrapdefault \
-xscrollcommand ".bleft.bottom.sbhorizontal set"
- if {$have_tk85} {
- $ctext conf -tabstyle wordprocessor
- }
- ${NS}::scrollbar .bleft.bottom.sb -command "$ctext yview"
- ${NS}::scrollbar .bleft.bottom.sbhorizontal -command "$ctext xview" -orient h
+ $ctext conf -tabstyle wordprocessor
+ ttk::scrollbar .bleft.bottom.sb -command "$ctext yview"
+ ttk::scrollbar .bleft.bottom.sbhorizontal -command "$ctext xview" -orient h
pack .bleft.top -side top -fill x
pack .bleft.mid -side top -fill x
grid $ctext .bleft.bottom.sb -sticky nsew
@@ -2596,16 +2676,13 @@ proc makewindow {} {
$ctext tag lower d0
.pwbottom add .bleft
- if {!$use_ttk} {
- .pwbottom paneconfigure .bleft -width $geometry(botwidth)
- }
# lower right
- ${NS}::frame .bright
- ${NS}::frame .bright.mode
- ${NS}::radiobutton .bright.mode.patch -text [mc "Patch"] \
+ ttk::frame .bright
+ ttk::frame .bright.mode
+ ttk::radiobutton .bright.mode.patch -text [mc "Patch"] \
-command reselectline -variable cmitmode -value "patch"
- ${NS}::radiobutton .bright.mode.tree -text [mc "Tree"] \
+ ttk::radiobutton .bright.mode.tree -text [mc "Tree"] \
-command reselectline -variable cmitmode -value "tree"
grid .bright.mode.patch .bright.mode.tree -sticky ew
pack .bright.mode -side top -fill x
@@ -2621,7 +2698,7 @@ proc makewindow {} {
-spacing1 1 -spacing3 1
lappend bglist $cflist
lappend fglist $cflist
- ${NS}::scrollbar .bright.sb -command "$cflist yview"
+ ttk::scrollbar .bright.sb -command "$cflist yview"
pack .bright.sb -side right -fill y
pack $cflist -side left -fill both -expand 1
$cflist tag configure highlight \
@@ -2656,44 +2733,31 @@ proc makewindow {} {
set ::BM "2"
}
- if {$use_ttk} {
- bind .ctop <Map> {
- bind %W <Map> {}
- %W sashpos 0 $::geometry(topheight)
- }
- bind .pwbottom <Map> {
- bind %W <Map> {}
- %W sashpos 0 $::geometry(botwidth)
- }
- bind .pwbottom <Configure> {resizecdetpanes %W %w}
+ bind .ctop <Map> {
+ bind %W <Map> {}
+ %W sashpos 0 $::geometry(topheight)
+ }
+ bind .pwbottom <Map> {
+ bind %W <Map> {}
+ %W sashpos 0 $::geometry(botwidth)
}
+ bind .pwbottom <Configure> {resizecdetpanes %W %w}
pack .ctop -fill both -expand 1
bindall <1> {selcanvline %W %x %y}
- #bindall <B1-Motion> {selcanvline %W %x %y}
+
+ #Mouse / touchpad scrolling
if {[tk windowingsystem] == "win32"} {
- bind . <MouseWheel> { windows_mousewheel_redirector %W %X %Y %D }
- bind $ctext <MouseWheel> { windows_mousewheel_redirector %W %X %Y %D ; break }
+ set scroll_D0 120
+ bind_mousewheel
+ } elseif {[tk windowingsystem] == "x11"} {
+ set scroll_D0 1
+ bind_mousewheel_buttons
+ } elseif {[tk windowingsystem] == "aqua"} {
+ set scroll_D0 1
+ bind_mousewheel
} else {
- bindall <ButtonRelease-4> "allcanvs yview scroll -5 units"
- bindall <ButtonRelease-5> "allcanvs yview scroll 5 units"
- bind $ctext <Button> {
- if {"%b" eq 6} {
- $ctext xview scroll -5 units
- } elseif {"%b" eq 7} {
- $ctext xview scroll 5 units
- }
- }
- if {[tk windowingsystem] eq "aqua"} {
- bindall <MouseWheel> {
- set delta [expr {- (%D)}]
- allcanvs yview scroll $delta units
- }
- bindall <Shift-MouseWheel> {
- set delta [expr {- (%D)}]
- $canv xview scroll $delta units
- }
- }
+ puts stderr [mc "Unknown windowing system, cannot bind mouse"]
}
bindall <$::BM> "canvscan mark %W %x %y"
bindall <B$::BM-Motion> "canvscan dragto %W %x %y"
@@ -2705,13 +2769,8 @@ proc makewindow {} {
bind . <Key-Down> "selnextline 1"
bind . <Shift-Key-Up> "dofind -1 0"
bind . <Shift-Key-Down> "dofind 1 0"
- if {$have_tk86} {
- bindkey <<NextChar>> "goforw"
- bindkey <<PrevChar>> "goback"
- } else {
- bindkey <Key-Right> "goforw"
- bindkey <Key-Left> "goback"
- }
+ bindkey <<NextChar>> "goforw"
+ bindkey <<PrevChar>> "goback"
bind . <Key-Prior> "selnextpage -1"
bind . <Key-Next> "selnextpage 1"
bind . <$M1B-Home> "allcanvs yview moveto 0.0"
@@ -2838,24 +2897,6 @@ proc makewindow {} {
$diff_menu configure -tearoff 0
}
-# Windows sends all mouse wheel events to the current focused window, not
-# the one where the mouse hovers, so bind those events here and redirect
-# to the correct window
-proc windows_mousewheel_redirector {W X Y D} {
- global canv canv2 canv3
- set w [winfo containing -displayof $W $X $Y]
- if {$w ne ""} {
- set u [expr {$D < 0 ? 5 : -5}]
- if {$w == $canv || $w == $canv2 || $w == $canv3} {
- allcanvs yview scroll $u units
- } else {
- catch {
- $w yview scroll $u units
- }
- }
- }
-}
-
# Update row number label when selectedline changes
proc selectedline_change {n1 n2 op} {
global selectedline rownumsel
@@ -2918,30 +2959,10 @@ proc click {w} {
# Adjust the progress bar for a change in requested extent or canvas size
proc adjustprogress {} {
- global progresscanv progressitem progresscoords
- global fprogitem fprogcoord lastprogupdate progupdatepending
- global rprogitem rprogcoord use_ttk
+ global progresscanv
+ global fprogcoord
- if {$use_ttk} {
- $progresscanv configure -value [expr {int($fprogcoord * 100)}]
- return
- }
-
- set w [expr {[winfo width $progresscanv] - 4}]
- set x0 [expr {$w * [lindex $progresscoords 0]}]
- set x1 [expr {$w * [lindex $progresscoords 1]}]
- set h [winfo height $progresscanv]
- $progresscanv coords $progressitem $x0 0 $x1 $h
- $progresscanv coords $fprogitem 0 0 [expr {$w * $fprogcoord}] $h
- $progresscanv coords $rprogitem 0 0 [expr {$w * $rprogcoord}] $h
- set now [clock clicks -milliseconds]
- if {$now >= $lastprogupdate + 100} {
- set progupdatepending 0
- update
- } elseif {!$progupdatepending} {
- set progupdatepending 1
- after [expr {$lastprogupdate + 100 - $now}] doprogupdate
- }
+ $progresscanv configure -value [expr {int($fprogcoord * 100)}]
}
proc doprogupdate {} {
@@ -3000,14 +3021,13 @@ proc savestuff {w} {
upvar #0 viewargscmd current_viewargscmd
upvar #0 viewperm current_viewperm
upvar #0 nextviewnum current_nextviewnum
- upvar #0 use_ttk current_use_ttk
if {$stuffsaved} return
if {![winfo viewable .]} return
set remove_tmp 0
if {[catch {
set try_count 0
- while {[catch {set f [open $config_file_tmp {WRONLY CREAT EXCL}]}]} {
+ while {[catch {set f [safe_open_file $config_file_tmp {WRONLY CREAT EXCL}]}]} {
if {[incr try_count] > 50} {
error "Unable to write config file: $config_file_tmp exists"
}
@@ -3034,13 +3054,8 @@ proc savestuff {w} {
puts $f "set geometry(state) [wm state .]"
puts $f "set geometry(topwidth) [winfo width .tf]"
puts $f "set geometry(topheight) [winfo height .tf]"
- if {$current_use_ttk} {
- puts $f "set geometry(pwsash0) \"[.tf.histframe.pwclist sashpos 0] 1\""
- puts $f "set geometry(pwsash1) \"[.tf.histframe.pwclist sashpos 1] 1\""
- } else {
- puts $f "set geometry(pwsash0) \"[.tf.histframe.pwclist sash coord 0]\""
- puts $f "set geometry(pwsash1) \"[.tf.histframe.pwclist sash coord 1]\""
- }
+ puts $f "set geometry(pwsash0) \"[.tf.histframe.pwclist sashpos 0] 1\""
+ puts $f "set geometry(pwsash1) \"[.tf.histframe.pwclist sashpos 1] 1\""
puts $f "set geometry(botwidth) [winfo width .bleft]"
puts $f "set geometry(botheight) [winfo height .bleft]"
@@ -3086,17 +3101,14 @@ proc savestuff {w} {
}
proc resizeclistpanes {win w} {
- global oldwidth oldsash use_ttk
+ global oldwidth oldsash
if {[info exists oldwidth($win)]} {
if {[info exists oldsash($win)]} {
set s0 [lindex $oldsash($win) 0]
set s1 [lindex $oldsash($win) 1]
- } elseif {$use_ttk} {
+ } else {
set s0 [$win sashpos 0]
set s1 [$win sashpos 1]
- } else {
- set s0 [$win sash coord 0]
- set s1 [$win sash coord 1]
}
if {$w < 60} {
set sash0 [expr {int($w/2 - 2)}]
@@ -3118,29 +3130,20 @@ proc resizeclistpanes {win w} {
}
}
}
- if {$use_ttk} {
- $win sashpos 0 $sash0
- $win sashpos 1 $sash1
- } else {
- $win sash place 0 $sash0 [lindex $s0 1]
- $win sash place 1 $sash1 [lindex $s1 1]
- set sash0 [list $sash0 [lindex $s0 1]]
- set sash1 [list $sash1 [lindex $s1 1]]
- }
+ $win sashpos 0 $sash0
+ $win sashpos 1 $sash1
set oldsash($win) [list $sash0 $sash1]
}
set oldwidth($win) $w
}
proc resizecdetpanes {win w} {
- global oldwidth oldsash use_ttk
+ global oldwidth oldsash
if {[info exists oldwidth($win)]} {
if {[info exists oldsash($win)]} {
set s0 $oldsash($win)
- } elseif {$use_ttk} {
- set s0 [$win sashpos 0]
} else {
- set s0 [$win sash coord 0]
+ set s0 [$win sashpos 0]
}
if {$w < 60} {
set sash0 [expr {int($w*3/4 - 2)}]
@@ -3154,12 +3157,7 @@ proc resizecdetpanes {win w} {
set sash0 [expr {$w - 15}]
}
}
- if {$use_ttk} {
- $win sashpos 0 $sash0
- } else {
- $win sash place 0 $sash0 [lindex $s0 1]
- set sash0 [list $sash0 [lindex $s0 1]]
- }
+ $win sashpos 0 $sash0
set oldsash($win) $sash0
}
set oldwidth($win) $w
@@ -3180,7 +3178,7 @@ proc bindall {event action} {
}
proc about {} {
- global bgcolor NS
+ global bgcolor
set w .about
if {[winfo exists $w]} {
raise $w
@@ -3197,7 +3195,7 @@ Copyright \u00a9 2005-2016 Paul Mackerras
Use and redistribute under the terms of the GNU General Public License"] \
-justify center -aspect 400 -border 2 -bg $bgcolor -relief groove
pack $w.m -side top -fill x -padx 2 -pady 2
- ${NS}::button $w.ok -text [mc "Close"] -command "destroy $w" -default active
+ ttk::button $w.ok -text [mc "Close"] -command "destroy $w" -default active
pack $w.ok -side bottom
bind $w <Visibility> "focus $w.ok"
bind $w <Key-Escape> "destroy $w"
@@ -3206,7 +3204,7 @@ Use and redistribute under the terms of the GNU General Public License"] \
}
proc keys {} {
- global bgcolor NS
+ global bgcolor
set w .keys
if {[winfo exists $w]} {
raise $w
@@ -3264,7 +3262,7 @@ proc keys {} {
" \
-justify left -bg $bgcolor -border 2 -relief groove
pack $w.m -side top -fill both -padx 2 -pady 2
- ${NS}::button $w.ok -text [mc "Close"] -command "destroy $w" -default active
+ ttk::button $w.ok -text [mc "Close"] -command "destroy $w" -default active
bind $w <Key-Escape> [list destroy $w]
pack $w.ok -side bottom
bind $w <Visibility> "focus $w.ok"
@@ -3723,7 +3721,7 @@ proc gitknewtmpdir {} {
set tmpdir $gitdir
}
set gitktmpformat [file join $tmpdir ".gitk-tmp.XXXXXX"]
- if {[catch {set gitktmpdir [exec mktemp -d $gitktmpformat]}]} {
+ if {[catch {set gitktmpdir [safe_exec [list mktemp -d $gitktmpformat]]}]} {
set gitktmpdir [file join $gitdir [format ".gitk-tmp.%s" [pid]]]
}
if {[catch {file mkdir $gitktmpdir} err]} {
@@ -3745,7 +3743,7 @@ proc gitknewtmpdir {} {
proc save_file_from_commit {filename output what} {
global nullfile
- if {[catch {exec git show $filename -- > $output} err]} {
+ if {[catch {safe_exec_redirect [list git show $filename --] [list > $output]} err]} {
if {[string match "fatal: bad revision *" $err]} {
return $nullfile
}
@@ -3810,7 +3808,7 @@ proc external_diff {} {
if {$difffromfile ne {} && $difftofile ne {}} {
set cmd [list [shellsplit $extdifftool] $difffromfile $difftofile]
- if {[catch {set fl [open |$cmd r]} err]} {
+ if {[catch {set fl [safe_open_command $cmd]} err]} {
file delete -force $diffdir
error_popup "$extdifftool: [mc "command failed:"] $err"
} else {
@@ -3914,7 +3912,7 @@ proc external_blame_diff {} {
# Find the SHA1 ID of the blob for file $fname in the index
# at stage 0 or 2
proc index_sha1 {fname} {
- set f [open [list | git ls-files -s $fname] r]
+ set f [safe_open_command [list git ls-files -s $fname]]
while {[gets $f line] >= 0} {
set info [lindex [split $line "\t"] 0]
set stage [lindex $info 2]
@@ -3974,7 +3972,7 @@ proc external_blame {parent_idx {line {}}} {
# being given an absolute path...
set f [make_relative $f]
lappend cmdline $base_commit $f
- if {[catch {eval exec $cmdline &} err]} {
+ if {[catch {safe_exec_redirect $cmdline [list &]} err]} {
error_popup "[mc "git gui blame: command failed:"] $err"
}
}
@@ -4002,7 +4000,7 @@ proc show_line_source {} {
# must be a merge in progress...
if {[catch {
# get the last line from .git/MERGE_HEAD
- set f [open [file join $gitdir MERGE_HEAD] r]
+ set f [safe_open_file [file join $gitdir MERGE_HEAD] r]
set id [lindex [split [read $f] "\n"] end-1]
close $f
} err]} {
@@ -4025,19 +4023,17 @@ proc show_line_source {} {
}
set line [lindex $h 1]
}
- set blameargs {}
+ set blamefile [file join $cdup $flist_menu_file]
if {$from_index ne {}} {
- lappend blameargs | git cat-file blob $from_index
- }
- lappend blameargs | git blame -p -L$line,+1
- if {$from_index ne {}} {
- lappend blameargs --contents -
+ set blameargs [list \
+ [list git cat-file blob $from_index] \
+ [list git blame -p -L$line,+1 --contents - -- $blamefile]]
} else {
- lappend blameargs $id
+ set blameargs [list \
+ [list git blame -p -L$line,+1 $id -- $blamefile]]
}
- lappend blameargs -- [file join $cdup $flist_menu_file]
if {[catch {
- set f [open $blameargs r]
+ set f [safe_open_pipeline $blameargs]
} err]} {
error_popup [mc "Couldn't start git blame: %s" $err]
return
@@ -4062,6 +4058,7 @@ proc stopblaming {} {
proc read_line_source {fd inst} {
global blamestuff curview commfd blameinst nullid nullid2
+ global hashlength
while {[gets $fd line] >= 0} {
lappend blamestuff($inst) $line
@@ -4082,7 +4079,7 @@ proc read_line_source {fd inst} {
set line [split [lindex $blamestuff($inst) 0] " "]
set id [lindex $line 0]
set lnum [lindex $line 1]
- if {[string length $id] == 40 && [string is xdigit $id] &&
+ if {[string length $id] == $hashlength && [string is xdigit $id] &&
[string is digit -strict $lnum]} {
# look for "filename" line
foreach l $blamestuff($inst) {
@@ -4410,16 +4407,16 @@ proc editview {} {
proc vieweditor {top n title} {
global newviewname newviewopts viewfiles bgcolor
- global known_view_options NS
+ global known_view_options
ttk_toplevel $top
wm title $top [concat $title [mc "-- criteria for selecting revisions"]]
make_transient $top .
# View name
- ${NS}::frame $top.nfr
- ${NS}::label $top.nl -text [mc "View Name"]
- ${NS}::entry $top.name -width 20 -textvariable newviewname($n)
+ ttk::frame $top.nfr
+ ttk::label $top.nl -text [mc "View Name"]
+ ttk::entry $top.name -width 20 -textvariable newviewname($n)
pack $top.nfr -in $top -fill x -pady 5 -padx 3
pack $top.nl -in $top.nfr -side left -padx {0 5}
pack $top.name -in $top.nfr -side left -padx {0 25}
@@ -4438,13 +4435,13 @@ proc vieweditor {top n title} {
if {$flags eq "+" || $flags eq "*"} {
set cframe $top.fr$cnt
incr cnt
- ${NS}::frame $cframe
+ ttk::frame $cframe
pack $cframe -in $top -fill x -pady 3 -padx 3
set cexpand [expr {$flags eq "*"}]
} elseif {$flags eq ".." || $flags eq "*."} {
set cframe $top.fr$cnt
incr cnt
- ${NS}::frame $cframe
+ ttk::frame $cframe
pack $cframe -in $top -fill x -pady 3 -padx [list 15 3]
set cexpand [expr {$flags eq "*."}]
} else {
@@ -4452,31 +4449,31 @@ proc vieweditor {top n title} {
}
if {$type eq "l"} {
- ${NS}::label $cframe.l_$id -text $title
+ ttk::label $cframe.l_$id -text $title
pack $cframe.l_$id -in $cframe -side left -pady [list 3 0] -anchor w
} elseif {$type eq "b"} {
- ${NS}::checkbutton $cframe.c_$id -text $title -variable newviewopts($n,$id)
+ ttk::checkbutton $cframe.c_$id -text $title -variable newviewopts($n,$id)
pack $cframe.c_$id -in $cframe -side left \
-padx [list $lxpad 0] -expand $cexpand -anchor w
} elseif {[regexp {^r(\d+)$} $type type sz]} {
regexp {^(.*_)} $id uselessvar button_id
- ${NS}::radiobutton $cframe.c_$id -text $title -variable newviewopts($n,$button_id) -value $sz
+ ttk::radiobutton $cframe.c_$id -text $title -variable newviewopts($n,$button_id) -value $sz
pack $cframe.c_$id -in $cframe -side left \
-padx [list $lxpad 0] -expand $cexpand -anchor w
} elseif {[regexp {^t(\d+)$} $type type sz]} {
- ${NS}::label $cframe.l_$id -text $title
- ${NS}::entry $cframe.e_$id -width $sz -background $bgcolor \
+ ttk::label $cframe.l_$id -text $title
+ ttk::entry $cframe.e_$id -width $sz -background $bgcolor \
-textvariable newviewopts($n,$id)
pack $cframe.l_$id -in $cframe -side left -padx [list $lxpad 0]
pack $cframe.e_$id -in $cframe -side left -expand 1 -fill x
} elseif {[regexp {^t(\d+)=$} $type type sz]} {
- ${NS}::label $cframe.l_$id -text $title
- ${NS}::entry $cframe.e_$id -width $sz -background $bgcolor \
+ ttk::label $cframe.l_$id -text $title
+ ttk::entry $cframe.e_$id -width $sz -background $bgcolor \
-textvariable newviewopts($n,$id)
pack $cframe.l_$id -in $cframe -side top -pady [list 3 0] -anchor w
pack $cframe.e_$id -in $cframe -side top -fill x
} elseif {$type eq "path"} {
- ${NS}::label $top.l -text $title
+ ttk::label $top.l -text $title
pack $top.l -in $top -side top -pady [list 3 0] -anchor w -padx 3
text $top.t -width 40 -height 5 -background $bgcolor
if {[info exists viewfiles($n)]} {
@@ -4491,10 +4488,10 @@ proc vieweditor {top n title} {
}
}
- ${NS}::frame $top.buts
- ${NS}::button $top.buts.ok -text [mc "OK"] -command [list newviewok $top $n]
- ${NS}::button $top.buts.apply -text [mc "Apply (F5)"] -command [list newviewok $top $n 1]
- ${NS}::button $top.buts.can -text [mc "Cancel"] -command [list destroy $top]
+ ttk::frame $top.buts
+ ttk::button $top.buts.ok -text [mc "OK"] -command [list newviewok $top $n]
+ ttk::button $top.buts.apply -text [mc "Apply (F5)"] -command [list newviewok $top $n 1]
+ ttk::button $top.buts.can -text [mc "Cancel"] -command [list destroy $top]
bind $top <Control-Return> [list newviewok $top $n]
bind $top <F5> [list newviewok $top $n 1]
bind $top <Escape> [list destroy $top]
@@ -4962,8 +4959,8 @@ proc do_file_hl {serial} {
# must be "containing:", i.e. we're searching commit info
return
}
- set cmd [concat | git diff-tree -r -s --stdin $gdtargs]
- set filehighlight [open $cmd r+]
+ set cmd [concat git diff-tree -r -s --stdin $gdtargs]
+ set filehighlight [safe_open_command_rw $cmd]
fconfigure $filehighlight -blocking 0
filerun $filehighlight readfhighlight
set fhl_list {}
@@ -5226,11 +5223,13 @@ proc askrelhighlight {row id} {
# Graph layout functions
proc shortids {ids} {
+ global hashlength
+
set res {}
foreach id $ids {
if {[llength $id] > 1} {
lappend res [shortids $id]
- } elseif {[regexp {^[0-9a-f]{40}$} $id]} {
+ } elseif {[regexp [string map "@@ $hashlength" {^[0-9a-f]{@@}$}] $id]} {
lappend res [string range $id 0 7]
} else {
lappend res $id
@@ -5392,8 +5391,8 @@ proc get_viewmainhead {view} {
global viewmainheadid vfilelimit viewinstances mainheadid
catch {
- set rfd [open [concat | git rev-list -1 $mainheadid \
- -- $vfilelimit($view)] r]
+ set rfd [safe_open_command [concat git rev-list -1 $mainheadid \
+ -- $vfilelimit($view)]]
set j [reg_instance $rfd]
lappend viewinstances($view) $j
fconfigure $rfd -blocking 0
@@ -5405,13 +5404,14 @@ proc get_viewmainhead {view} {
# git rev-list should give us just 1 line to use as viewmainheadid($view)
proc getviewhead {fd inst view} {
global viewmainheadid commfd curview viewinstances showlocalchanges
+ global hashlength
set id {}
if {[gets $fd line] < 0} {
if {![eof $fd]} {
return 1
}
- } elseif {[string length $line] == 40 && [string is xdigit $line]} {
+ } elseif {[string length $line] == $hashlength && [string is xdigit $line]} {
set id $line
}
set viewmainheadid($view) $id
@@ -5453,19 +5453,15 @@ proc dohidelocalchanges {} {
# spawn off a process to do git diff-index --cached HEAD
proc dodiffindex {} {
global lserial showlocalchanges vfilelimit curview
- global hasworktree git_version
+ global hasworktree
if {!$showlocalchanges || !$hasworktree} return
incr lserial
- if {[package vcompare $git_version "1.7.2"] >= 0} {
- set cmd "|git diff-index --cached --ignore-submodules=dirty HEAD"
- } else {
- set cmd "|git diff-index --cached HEAD"
- }
+ set cmd "git diff-index --cached --ignore-submodules=dirty HEAD"
if {$vfilelimit($curview) ne {}} {
set cmd [concat $cmd -- $vfilelimit($curview)]
}
- set fd [open $cmd r]
+ set fd [safe_open_command $cmd]
fconfigure $fd -blocking 0
set i [reg_instance $fd]
filerun $fd [list readdiffindex $fd $lserial $i]
@@ -5490,11 +5486,11 @@ proc readdiffindex {fd serial inst} {
}
# now see if there are any local changes not checked in to the index
- set cmd "|git diff-files"
+ set cmd "git diff-files"
if {$vfilelimit($curview) ne {}} {
set cmd [concat $cmd -- $vfilelimit($curview)]
}
- set fd [open $cmd r]
+ set fd [safe_open_command $cmd]
fconfigure $fd -blocking 0
set i [reg_instance $fd]
filerun $fd [list readdifffiles $fd $serial $i]
@@ -6689,13 +6685,7 @@ proc bindline {t id} {
}
proc graph_pane_width {} {
- global use_ttk
-
- if {$use_ttk} {
- set g [.tf.histframe.pwclist sashpos 0]
- } else {
- set g [.tf.histframe.pwclist sash coord 0]
- }
+ set g [.tf.histframe.pwclist sashpos 0]
return [lindex $g 0]
}
@@ -7175,10 +7165,11 @@ proc commit_descriptor {p} {
# Also look for URLs of the form "http[s]://..." and make them web links.
proc appendwithlinks {text tags} {
global ctext linknum curview
+ global hashlength
set start [$ctext index "end - 1c"]
$ctext insert end $text $tags
- set links [regexp -indices -all -inline {(?:\m|-g)[0-9a-f]{6,40}\M} $text]
+ set links [regexp -indices -all -inline [string map "@@ $hashlength" {(?:\m|-g)[0-9a-f]{6,@@}\M}] $text]
foreach l $links {
set s [lindex $l 0]
set e [lindex $l 1]
@@ -7206,13 +7197,14 @@ proc appendwithlinks {text tags} {
proc setlink {id lk} {
global curview ctext pendinglinks
global linkfgcolor
+ global hashlength
if {[string range $id 0 1] eq "-g"} {
set id [string range $id 2 end]
}
set known 0
- if {[string length $id] < 40} {
+ if {[string length $id] < $hashlength} {
set matches [longid $id]
if {[llength $matches] > 0} {
if {[llength $matches] > 1} return
@@ -7283,8 +7275,8 @@ proc browseweb {url} {
global web_browser
if {$web_browser eq {}} return
- # Use eval here in case $web_browser is a command plus some arguments
- if {[catch {eval exec $web_browser [list $url] &} err]} {
+ # Use concat here in case $web_browser is a command plus some arguments
+ if {[catch {safe_exec_redirect [concat $web_browser [list $url]] [list &]} err]} {
error_popup "[mc "Error starting web browser:"] $err"
}
}
@@ -7790,13 +7782,13 @@ proc gettree {id} {
if {![info exists treefilelist($id)]} {
if {![info exists treepending]} {
if {$id eq $nullid} {
- set cmd [list | git ls-files]
+ set cmd [list git ls-files]
} elseif {$id eq $nullid2} {
- set cmd [list | git ls-files --stage -t]
+ set cmd [list git ls-files --stage -t]
} else {
- set cmd [list | git ls-tree -r $id]
+ set cmd [list git ls-tree -r $id]
}
- if {[catch {set gtf [open $cmd r]}]} {
+ if {[catch {set gtf [safe_open_command $cmd]}]} {
return
}
set treepending $id
@@ -7860,13 +7852,13 @@ proc showfile {f} {
return
}
if {$diffids eq $nullid} {
- if {[catch {set bf [open $f r]} err]} {
+ if {[catch {set bf [safe_open_file $f r]} err]} {
puts "oops, can't read $f: $err"
return
}
} else {
set blob [lindex $treeidlist($diffids) $i]
- if {[catch {set bf [open [concat | git cat-file blob $blob] r]} err]} {
+ if {[catch {set bf [safe_open_command [concat git cat-file blob $blob]]} err]} {
puts "oops, error reading blob $blob: $err"
return
}
@@ -8009,14 +8001,14 @@ proc addtocflist {ids} {
}
proc diffcmd {ids flags} {
- global log_showroot nullid nullid2 git_version
+ global log_showroot nullid nullid2
set i [lsearch -exact $ids $nullid]
set j [lsearch -exact $ids $nullid2]
if {$i >= 0} {
if {[llength $ids] > 1 && $j < 0} {
# comparing working directory with some specific revision
- set cmd [concat | git diff-index $flags]
+ set cmd [concat git diff-index $flags]
if {$i == 0} {
lappend cmd -R [lindex $ids 1]
} else {
@@ -8024,16 +8016,14 @@ proc diffcmd {ids flags} {
}
} else {
# comparing working directory with index
- set cmd [concat | git diff-files $flags]
+ set cmd [concat git diff-files $flags]
if {$j == 1} {
lappend cmd -R
}
}
} elseif {$j >= 0} {
- if {[package vcompare $git_version "1.7.2"] >= 0} {
- set flags "$flags --ignore-submodules=dirty"
- }
- set cmd [concat | git diff-index --cached $flags]
+ set flags "$flags --ignore-submodules=dirty"
+ set cmd [concat git diff-index --cached $flags]
if {[llength $ids] > 1} {
# comparing index with specific revision
if {$j == 0} {
@@ -8049,7 +8039,7 @@ proc diffcmd {ids flags} {
if {$log_showroot} {
lappend flags --root
}
- set cmd [concat | git diff-tree -r $flags $ids]
+ set cmd [concat git diff-tree -r $flags $ids]
}
return $cmd
}
@@ -8061,7 +8051,7 @@ proc gettreediffs {ids} {
if {$limitdiffs && $vfilelimit($curview) ne {}} {
set cmd [concat $cmd -- $vfilelimit($curview)]
}
- if {[catch {set gdtf [open $cmd r]}]} return
+ if {[catch {set gdtf [safe_open_command $cmd]}]} return
set treepending $ids
set treediff {}
@@ -8161,17 +8151,8 @@ proc getblobdiffs {ids} {
global ignorespace
global worddiff
global limitdiffs vfilelimit curview
- global git_version
- set textconv {}
- if {[package vcompare $git_version "1.6.1"] >= 0} {
- set textconv "--textconv"
- }
- set submodule {}
- if {[package vcompare $git_version "1.6.6"] >= 0} {
- set submodule "--submodule"
- }
- set cmd [diffcmd $ids "-p $textconv $submodule -C --cc --no-commit-id -U$diffcontext"]
+ set cmd [diffcmd $ids "-p --textconv --submodule -C --cc --no-commit-id -U$diffcontext"]
if {$ignorespace} {
append cmd " -w"
}
@@ -8181,7 +8162,7 @@ proc getblobdiffs {ids} {
if {$limitdiffs && $vfilelimit($curview) ne {}} {
set cmd [concat $cmd -- $vfilelimit($curview)]
}
- if {[catch {set bdf [open $cmd r]} err]} {
+ if {[catch {set bdf [safe_open_command $cmd]} err]} {
error_popup [mc "Error getting diffs: %s" $err]
return
}
@@ -8576,19 +8557,17 @@ proc clear_ctext {{first 1.0}} {
}
proc settabs {{firstab {}}} {
- global firsttabstop tabstop ctext have_tk85
+ global firsttabstop tabstop ctext
- if {$firstab ne {} && $have_tk85} {
+ if {$firstab ne {}} {
set firsttabstop $firstab
}
set w [font measure textfont "0"]
if {$firsttabstop != 0} {
$ctext conf -tabs [list [expr {($firsttabstop + $tabstop) * $w}] \
[expr {($firsttabstop + 2 * $tabstop) * $w}]]
- } elseif {$have_tk85 || $tabstop != 8} {
- $ctext conf -tabs [expr {$tabstop * $w}]
} else {
- $ctext conf -tabs {}
+ $ctext conf -tabs [expr {$tabstop * $w}]
}
}
@@ -8857,13 +8836,16 @@ proc incrfont {inc} {
proc clearsha1 {} {
global sha1entry sha1string
- if {[string length $sha1string] == 40} {
+ global hashlength
+
+ if {[string length $sha1string] == $hashlength} {
$sha1entry delete 0 end
}
}
proc sha1change {n1 n2 op} {
global sha1string currentid sha1but
+
if {$sha1string == {}
|| ([info exists currentid] && $sha1string == $currentid)} {
set state disabled
@@ -8880,6 +8862,7 @@ proc sha1change {n1 n2 op} {
proc gotocommit {} {
global sha1string tagids headids curview varcid
+ global hashlength
if {$sha1string == {}
|| ([info exists currentid] && $sha1string == $currentid)} return
@@ -8889,7 +8872,7 @@ proc gotocommit {} {
set id $headids($sha1string)
} else {
set id [string tolower $sha1string]
- if {[regexp {^[0-9a-f]{4,39}$} $id]} {
+ if {[regexp {^[0-9a-f]{4,63}$} $id]} {
set matches [longid $id]
if {$matches ne {}} {
if {[llength $matches] > 1} {
@@ -8899,7 +8882,7 @@ proc gotocommit {} {
set id [lindex $matches 0]
}
} else {
- if {[catch {set id [exec git rev-parse --verify $sha1string]}]} {
+ if {[catch {set id [safe_exec [list git rev-parse --verify $sha1string]]}]} {
error_popup [mc "Revision %s is not known" $sha1string]
return
}
@@ -9205,10 +9188,8 @@ proc getpatchid {id} {
if {![info exists patchids($id)]} {
set cmd [diffcmd [list $id] {-p --root}]
- # trim off the initial "|"
- set cmd [lrange $cmd 1 end]
if {[catch {
- set x [eval exec $cmd | git patch-id]
+ set x [safe_exec_redirect $cmd [list | git patch-id]]
set patchids($id) [lindex $x 0]
}]} {
set patchids($id) "error"
@@ -9304,14 +9285,14 @@ proc diffcommits {a b} {
set fna [file join $tmpdir "commit-[string range $a 0 7]"]
set fnb [file join $tmpdir "commit-[string range $b 0 7]"]
if {[catch {
- exec git diff-tree -p --pretty $a >$fna
- exec git diff-tree -p --pretty $b >$fnb
+ safe_exec_redirect [list git diff-tree -p --pretty $a] [list >$fna]
+ safe_exec_redirect [list git diff-tree -p --pretty $b] [list >$fnb]
} err]} {
error_popup [mc "Error writing commit to file: %s" $err]
return
}
if {[catch {
- set fd [open "| diff -U$diffcontext $fna $fnb" r]
+ set fd [safe_open_command "diff -U$diffcontext $fna $fnb"]
} err]} {
error_popup [mc "Error diffing commits: %s" $err]
return
@@ -9377,7 +9358,8 @@ proc doseldiff {oldid newid} {
}
proc mkpatch {} {
- global rowmenuid currentid commitinfo patchtop patchnum NS
+ global rowmenuid currentid commitinfo patchtop patchnum
+ global hashlength
if {![info exists currentid]} return
set oldid $currentid
@@ -9389,36 +9371,36 @@ proc mkpatch {} {
catch {destroy $top}
ttk_toplevel $top
make_transient $top .
- ${NS}::label $top.title -text [mc "Generate patch"]
+ ttk::label $top.title -text [mc "Generate patch"]
grid $top.title - -pady 10
- ${NS}::label $top.from -text [mc "From:"]
- ${NS}::entry $top.fromsha1 -width 40
+ ttk::label $top.from -text [mc "From:"]
+ ttk::entry $top.fromsha1 -width $hashlength
$top.fromsha1 insert 0 $oldid
$top.fromsha1 conf -state readonly
grid $top.from $top.fromsha1 -sticky w
- ${NS}::entry $top.fromhead -width 60
+ ttk::entry $top.fromhead -width 60
$top.fromhead insert 0 $oldhead
$top.fromhead conf -state readonly
grid x $top.fromhead -sticky w
- ${NS}::label $top.to -text [mc "To:"]
- ${NS}::entry $top.tosha1 -width 40
+ ttk::label $top.to -text [mc "To:"]
+ ttk::entry $top.tosha1 -width $hashlength
$top.tosha1 insert 0 $newid
$top.tosha1 conf -state readonly
grid $top.to $top.tosha1 -sticky w
- ${NS}::entry $top.tohead -width 60
+ ttk::entry $top.tohead -width 60
$top.tohead insert 0 $newhead
$top.tohead conf -state readonly
grid x $top.tohead -sticky w
- ${NS}::button $top.rev -text [mc "Reverse"] -command mkpatchrev
+ ttk::button $top.rev -text [mc "Reverse"] -command mkpatchrev
grid $top.rev x -pady 10 -padx 5
- ${NS}::label $top.flab -text [mc "Output file:"]
- ${NS}::entry $top.fname -width 60
+ ttk::label $top.flab -text [mc "Output file:"]
+ ttk::entry $top.fname -width 60
$top.fname insert 0 [file normalize "patch$patchnum.patch"]
incr patchnum
grid $top.flab $top.fname -sticky w
- ${NS}::frame $top.buts
- ${NS}::button $top.buts.gen -text [mc "Generate"] -command mkpatchgo
- ${NS}::button $top.buts.can -text [mc "Cancel"] -command mkpatchcan
+ ttk::frame $top.buts
+ ttk::button $top.buts.gen -text [mc "Generate"] -command mkpatchgo
+ ttk::button $top.buts.can -text [mc "Cancel"] -command mkpatchcan
bind $top <Key-Return> mkpatchgo
bind $top <Key-Escape> mkpatchcan
grid $top.buts.gen $top.buts.can
@@ -9451,10 +9433,7 @@ proc mkpatchgo {} {
set newid [$patchtop.tosha1 get]
set fname [$patchtop.fname get]
set cmd [diffcmd [list $oldid $newid] -p]
- # trim off the initial "|"
- set cmd [lrange $cmd 1 end]
- lappend cmd >$fname &
- if {[catch {eval exec $cmd} err]} {
+ if {[catch {safe_exec_redirect $cmd [list >$fname &]} err]} {
error_popup "[mc "Error creating patch:"] $err" $patchtop
}
catch {destroy $patchtop}
@@ -9469,35 +9448,36 @@ proc mkpatchcan {} {
}
proc mktag {} {
- global rowmenuid mktagtop commitinfo NS
+ global rowmenuid mktagtop commitinfo
+ global hashlength
set top .maketag
set mktagtop $top
catch {destroy $top}
ttk_toplevel $top
make_transient $top .
- ${NS}::label $top.title -text [mc "Create tag"]
+ ttk::label $top.title -text [mc "Create tag"]
grid $top.title - -pady 10
- ${NS}::label $top.id -text [mc "ID:"]
- ${NS}::entry $top.sha1 -width 40
+ ttk::label $top.id -text [mc "ID:"]
+ ttk::entry $top.sha1 -width $hashlength
$top.sha1 insert 0 $rowmenuid
$top.sha1 conf -state readonly
grid $top.id $top.sha1 -sticky w
- ${NS}::entry $top.head -width 60
+ ttk::entry $top.head -width 60
$top.head insert 0 [lindex $commitinfo($rowmenuid) 0]
$top.head conf -state readonly
grid x $top.head -sticky w
- ${NS}::label $top.tlab -text [mc "Tag name:"]
- ${NS}::entry $top.tag -width 60
+ ttk::label $top.tlab -text [mc "Tag name:"]
+ ttk::entry $top.tag -width 60
grid $top.tlab $top.tag -sticky w
- ${NS}::label $top.op -text [mc "Tag message is optional"]
+ ttk::label $top.op -text [mc "Tag message is optional"]
grid $top.op -columnspan 2 -sticky we
- ${NS}::label $top.mlab -text [mc "Tag message:"]
- ${NS}::entry $top.msg -width 60
+ ttk::label $top.mlab -text [mc "Tag message:"]
+ ttk::entry $top.msg -width 60
grid $top.mlab $top.msg -sticky w
- ${NS}::frame $top.buts
- ${NS}::button $top.buts.gen -text [mc "Create"] -command mktaggo
- ${NS}::button $top.buts.can -text [mc "Cancel"] -command mktagcan
+ ttk::frame $top.buts
+ ttk::button $top.buts.gen -text [mc "Create"] -command mktaggo
+ ttk::button $top.buts.can -text [mc "Cancel"] -command mktagcan
bind $top <Key-Return> mktaggo
bind $top <Key-Escape> mktagcan
grid $top.buts.gen $top.buts.can
@@ -9523,9 +9503,9 @@ proc domktag {} {
}
if {[catch {
if {$msg != {}} {
- exec git tag -a -m $msg $tag $id
+ safe_exec [list git tag -a -m $msg $tag $id]
} else {
- exec git tag $tag $id
+ safe_exec [list git tag $tag $id]
}
} err]} {
error_popup "[mc "Error creating tag:"] $err" $mktagtop
@@ -9587,47 +9567,49 @@ proc mktaggo {} {
proc copyreference {} {
global rowmenuid autosellen
+ global hashlength
set format "%h (\"%s\", %ad)"
set cmd [list git show -s --pretty=format:$format --date=short]
- if {$autosellen < 40} {
+ if {$autosellen < $hashlength} {
lappend cmd --abbrev=$autosellen
}
- set reference [eval exec $cmd $rowmenuid]
+ set reference [safe_exec [concat $cmd $rowmenuid]]
clipboard clear
clipboard append $reference
}
proc writecommit {} {
- global rowmenuid wrcomtop commitinfo wrcomcmd NS
+ global rowmenuid wrcomtop commitinfo wrcomcmd
+ global hashlength
set top .writecommit
set wrcomtop $top
catch {destroy $top}
ttk_toplevel $top
make_transient $top .
- ${NS}::label $top.title -text [mc "Write commit to file"]
+ ttk::label $top.title -text [mc "Write commit to file"]
grid $top.title - -pady 10
- ${NS}::label $top.id -text [mc "ID:"]
- ${NS}::entry $top.sha1 -width 40
+ ttk::label $top.id -text [mc "ID:"]
+ ttk::entry $top.sha1 -width $hashlength
$top.sha1 insert 0 $rowmenuid
$top.sha1 conf -state readonly
grid $top.id $top.sha1 -sticky w
- ${NS}::entry $top.head -width 60
+ ttk::entry $top.head -width 60
$top.head insert 0 [lindex $commitinfo($rowmenuid) 0]
$top.head conf -state readonly
grid x $top.head -sticky w
- ${NS}::label $top.clab -text [mc "Command:"]
- ${NS}::entry $top.cmd -width 60 -textvariable wrcomcmd
+ ttk::label $top.clab -text [mc "Command:"]
+ ttk::entry $top.cmd -width 60 -textvariable wrcomcmd
grid $top.clab $top.cmd -sticky w -pady 10
- ${NS}::label $top.flab -text [mc "Output file:"]
- ${NS}::entry $top.fname -width 60
+ ttk::label $top.flab -text [mc "Output file:"]
+ ttk::entry $top.fname -width 60
$top.fname insert 0 [file normalize "commit-[string range $rowmenuid 0 6]"]
grid $top.flab $top.fname -sticky w
- ${NS}::frame $top.buts
- ${NS}::button $top.buts.gen -text [mc "Write"] -command wrcomgo
- ${NS}::button $top.buts.can -text [mc "Cancel"] -command wrcomcan
+ ttk::frame $top.buts
+ ttk::button $top.buts.gen -text [mc "Write"] -command wrcomgo
+ ttk::button $top.buts.can -text [mc "Cancel"] -command wrcomcan
bind $top <Key-Return> wrcomgo
bind $top <Key-Escape> wrcomcan
grid $top.buts.gen $top.buts.can
@@ -9643,7 +9625,7 @@ proc wrcomgo {} {
set id [$wrcomtop.sha1 get]
set cmd "echo $id | [$wrcomtop.cmd get]"
set fname [$wrcomtop.fname get]
- if {[catch {exec sh -c $cmd >$fname &} err]} {
+ if {[catch {safe_exec_redirect [list sh -c $cmd] [list >$fname &]} err]} {
error_popup "[mc "Error writing commit:"] $err" $wrcomtop
}
catch {destroy $wrcomtop}
@@ -9658,7 +9640,7 @@ proc wrcomcan {} {
}
proc mkbranch {} {
- global NS rowmenuid
+ global rowmenuid
set top .branchdialog
@@ -9673,7 +9655,6 @@ proc mkbranch {} {
}
proc mvbranch {} {
- global NS
global headmenuid headmenuhead
set top .branchdialog
@@ -9689,31 +9670,32 @@ proc mvbranch {} {
}
proc branchdia {top valvar uivar} {
- global NS commitinfo
+ global commitinfo
+ global hashlength
upvar $valvar val $uivar ui
catch {destroy $top}
ttk_toplevel $top
make_transient $top .
- ${NS}::label $top.title -text $ui(title)
+ ttk::label $top.title -text $ui(title)
grid $top.title - -pady 10
- ${NS}::label $top.id -text [mc "ID:"]
- ${NS}::entry $top.sha1 -width 40
+ ttk::label $top.id -text [mc "ID:"]
+ ttk::entry $top.sha1 -width $hashlength
$top.sha1 insert 0 $val(id)
$top.sha1 conf -state readonly
grid $top.id $top.sha1 -sticky w
- ${NS}::entry $top.head -width 60
+ ttk::entry $top.head -width 60
$top.head insert 0 [lindex $commitinfo($val(id)) 0]
$top.head conf -state readonly
grid x $top.head -sticky ew
grid columnconfigure $top 1 -weight 1
- ${NS}::label $top.nlab -text [mc "Name:"]
- ${NS}::entry $top.name -width 40
+ ttk::label $top.nlab -text [mc "Name:"]
+ ttk::entry $top.name -width $hashlength
$top.name insert 0 $val(name)
grid $top.nlab $top.name -sticky w
- ${NS}::frame $top.buts
- ${NS}::button $top.buts.go -text $ui(accept) -command $val(command)
- ${NS}::button $top.buts.can -text [mc "Cancel"] -command "catch {destroy $top}"
+ ttk::frame $top.buts
+ ttk::button $top.buts.go -text $ui(accept) -command $val(command)
+ ttk::button $top.buts.can -text [mc "Cancel"] -command "catch {destroy $top}"
bind $top <Key-Return> $val(command)
bind $top <Key-Escape> "catch {destroy $top}"
grid $top.buts.go $top.buts.can
@@ -9747,7 +9729,7 @@ proc mkbrgo {top} {
nowbusy newbranch
update
if {[catch {
- eval exec git branch $cmdargs
+ safe_exec [concat git branch $cmdargs]
} err]} {
notbusy newbranch
error_popup $err
@@ -9788,7 +9770,7 @@ proc mvbrgo {top prevname} {
nowbusy renamebranch
update
if {[catch {
- eval exec git branch $cmdargs
+ safe_exec [concat git branch $cmdargs]
} err]} {
notbusy renamebranch
error_popup $err
@@ -9829,7 +9811,7 @@ proc exec_citool {tool_args {baseid {}}} {
}
}
- eval exec git citool $tool_args &
+ safe_exec_redirect [concat git citool $tool_args] [list &]
array unset env GIT_AUTHOR_*
array set env $save_env
@@ -9852,7 +9834,7 @@ proc cherrypick {} {
update
# Unfortunately git-cherry-pick writes stuff to stderr even when
# no error occurs, and exec takes that as an indication of error...
- if {[catch {exec sh -c "git cherry-pick -r $rowmenuid 2>&1"} err]} {
+ if {[catch {safe_exec [list sh -c "git cherry-pick -r $rowmenuid 2>&1"]} err]} {
notbusy cherrypick
if {[regexp -line \
{Entry '(.*)' (would be overwritten by merge|not uptodate)} \
@@ -9914,7 +9896,7 @@ proc revert {} {
nowbusy revert [mc "Reverting"]
update
- if [catch {exec git revert --no-edit $rowmenuid} err] {
+ if [catch {safe_exec [list git revert --no-edit $rowmenuid]} err] {
notbusy revert
if [regexp {files would be overwritten by merge:(\n(( |\t)+[^\n]+\n)+)}\
$err match files] {
@@ -9960,38 +9942,38 @@ proc revert {} {
}
proc resethead {} {
- global mainhead rowmenuid confirm_ok resettype NS
+ global mainhead rowmenuid confirm_ok resettype
set confirm_ok 0
set w ".confirmreset"
ttk_toplevel $w
make_transient $w .
wm title $w [mc "Confirm reset"]
- ${NS}::label $w.m -text \
+ ttk::label $w.m -text \
[mc "Reset branch %s to %s?" $mainhead [string range $rowmenuid 0 7]]
pack $w.m -side top -fill x -padx 20 -pady 20
- ${NS}::labelframe $w.f -text [mc "Reset type:"]
+ ttk::labelframe $w.f -text [mc "Reset type:"]
set resettype mixed
- ${NS}::radiobutton $w.f.soft -value soft -variable resettype \
+ ttk::radiobutton $w.f.soft -value soft -variable resettype \
-text [mc "Soft: Leave working tree and index untouched"]
grid $w.f.soft -sticky w
- ${NS}::radiobutton $w.f.mixed -value mixed -variable resettype \
+ ttk::radiobutton $w.f.mixed -value mixed -variable resettype \
-text [mc "Mixed: Leave working tree untouched, reset index"]
grid $w.f.mixed -sticky w
- ${NS}::radiobutton $w.f.hard -value hard -variable resettype \
+ ttk::radiobutton $w.f.hard -value hard -variable resettype \
-text [mc "Hard: Reset working tree and index\n(discard ALL local changes)"]
grid $w.f.hard -sticky w
pack $w.f -side top -fill x -padx 4
- ${NS}::button $w.ok -text [mc OK] -command "set confirm_ok 1; destroy $w"
+ ttk::button $w.ok -text [mc OK] -command "set confirm_ok 1; destroy $w"
pack $w.ok -side left -fill x -padx 20 -pady 20
- ${NS}::button $w.cancel -text [mc Cancel] -command "destroy $w"
+ ttk::button $w.cancel -text [mc Cancel] -command "destroy $w"
bind $w <Key-Escape> [list destroy $w]
pack $w.cancel -side right -fill x -padx 20 -pady 20
bind $w <Visibility> "grab $w; focus $w"
tkwait window $w
if {!$confirm_ok} return
- if {[catch {set fd [open \
- [list | git reset --$resettype $rowmenuid 2>@1] r]} err]} {
+ if {[catch {set fd [safe_open_command_redirect \
+ [list git reset --$resettype $rowmenuid] [list 2>@1]]} err]} {
error_popup $err
} else {
dohidelocalchanges
@@ -10062,7 +10044,7 @@ proc cobranch {} {
# check the tree is clean first??
set newhead $headmenuhead
- set command [list | git checkout]
+ set command [list git checkout]
if {[string match "remotes/*" $newhead]} {
set remote $newhead
set newhead [string range $newhead [expr [string last / $newhead] + 1] end]
@@ -10076,12 +10058,11 @@ proc cobranch {} {
} else {
lappend command $newhead
}
- lappend command 2>@1
nowbusy checkout [mc "Checking out"]
update
dohidelocalchanges
if {[catch {
- set fd [open $command r]
+ set fd [safe_open_command_redirect $command [list 2>@1]]
} err]} {
notbusy checkout
error_popup $err
@@ -10147,7 +10128,7 @@ proc rmbranch {} {
}
nowbusy rmbranch
update
- if {[catch {exec git branch -D $head} err]} {
+ if {[catch {safe_exec [list git branch -D $head]} err]} {
notbusy rmbranch
error_popup $err
return
@@ -10162,7 +10143,7 @@ proc rmbranch {} {
# Display a list of tags and heads
proc showrefs {} {
- global showrefstop bgcolor fgcolor selectbgcolor NS
+ global showrefstop bgcolor fgcolor selectbgcolor
global bglist fglist reflistfilter reflist maincursor
set top .showrefs
@@ -10185,19 +10166,22 @@ proc showrefs {} {
lappend bglist $top.list
lappend fglist $top.list
}
- ${NS}::scrollbar $top.ysb -command "$top.list yview" -orient vertical
- ${NS}::scrollbar $top.xsb -command "$top.list xview" -orient horizontal
+ ttk::scrollbar $top.ysb -command "$top.list yview" -orient vertical
+ ttk::scrollbar $top.xsb -command "$top.list xview" -orient horizontal
grid $top.list $top.ysb -sticky nsew
grid $top.xsb x -sticky ew
- ${NS}::frame $top.f
- ${NS}::label $top.f.l -text "[mc "Filter"]: "
- ${NS}::entry $top.f.e -width 20 -textvariable reflistfilter
+ ttk::frame $top.f
+ ttk::label $top.f.l -text "[mc "Filter"]: "
+ ttk::entry $top.f.e -width 20 -textvariable reflistfilter
set reflistfilter "*"
trace add variable reflistfilter write reflistfilter_change
pack $top.f.e -side right -fill x -expand 1
pack $top.f.l -side left
grid $top.f - -sticky ew -pady 2
- ${NS}::button $top.close -command [list destroy $top] -text [mc "Close"]
+ ttk::checkbutton $top.sort -text [mc "Sort refs by type"] \
+ -variable sortrefsbytype -command {refill_reflist}
+ grid $top.sort - -sticky w -pady 2
+ ttk::button $top.close -command [list destroy $top] -text [mc "Close"]
bind $top <Key-Escape> [list destroy $top]
grid $top.close -
grid columnconfigure $top 0 -weight 1
@@ -10240,43 +10224,71 @@ proc reflistfilter_change {n1 n2 op} {
}
proc refill_reflist {} {
- global reflist reflistfilter showrefstop headids tagids otherrefids
- global curview
+ global reflist reflistfilter showrefstop headids tagids otherrefids sortrefsbytype
+ global curview upstreamofref
if {![info exists showrefstop] || ![winfo exists $showrefstop]} return
- set refs {}
+ set localrefs {}
+ set remoterefs {}
+ set trackedremoterefs {}
+ set tagrefs {}
+ set otherrefs {}
+
foreach n [array names headids] {
- if {[string match $reflistfilter $n]} {
+ if {![string match "remotes/*" $n] && [string match $reflistfilter $n]} {
if {[commitinview $headids($n) $curview]} {
- if {[string match "remotes/*" $n]} {
- lappend refs [list $n R]
- } else {
- lappend refs [list $n H]
+ lappend localrefs [list $n H]
+ if {[info exists upstreamofref($n)]} {
+ lappend trackedremoterefs [list $upstreamofref($n) R]
}
} else {
interestedin $headids($n) {run refill_reflist}
}
}
}
+ set trackedremoterefs [lsort -index 0 $trackedremoterefs]
+ set localrefs [lsort -index 0 $localrefs]
+
+ foreach n [array names headids] {
+ if {[string match "remotes/*" $n] && [string match $reflistfilter $n]} {
+ if {[commitinview $headids($n) $curview]} {
+ if {[lsearch -exact $trackedremoterefs [list $n R]] < 0} {
+ lappend remoterefs [list $n R]
+ }
+ } else {
+ interestedin $headids($n) {run refill_reflist}
+ }
+ }
+ }
+ set remoterefs [lsort -index 0 $remoterefs]
+
foreach n [array names tagids] {
if {[string match $reflistfilter $n]} {
if {[commitinview $tagids($n) $curview]} {
- lappend refs [list $n T]
+ lappend tagrefs [list $n T]
} else {
interestedin $tagids($n) {run refill_reflist}
}
}
}
+ set tagrefs [lsort -index 0 $tagrefs]
+
foreach n [array names otherrefids] {
if {[string match $reflistfilter $n]} {
if {[commitinview $otherrefids($n) $curview]} {
- lappend refs [list $n o]
+ lappend otherrefs [list "$n" o]
} else {
interestedin $otherrefids($n) {run refill_reflist}
}
}
}
- set refs [lsort -index 0 $refs]
+ set otherrefs [lsort -index 0 $otherrefs]
+
+ set refs [concat $localrefs $trackedremoterefs $remoterefs $tagrefs $otherrefs]
+ if {!$sortrefsbytype} {
+ set refs [lsort -index 0 $refs]
+ }
+
if {$refs eq $reflist} return
# Update the contents of $showrefstop.list according to the
@@ -10338,7 +10350,7 @@ proc getallcommits {} {
set cachedarcs 0
set allccache [file join $gitdir "gitk.cache"]
if {![catch {
- set f [open $allccache r]
+ set f [safe_open_file $allccache r]
set allcwait 1
getcache $f
}]} return
@@ -10347,7 +10359,7 @@ proc getallcommits {} {
if {$allcwait} {
return
}
- set cmd [list | git rev-list --parents]
+ set cmd [list git rev-list --parents]
set allcupdate [expr {$seeds ne {}}]
if {!$allcupdate} {
set ids "--all"
@@ -10375,10 +10387,11 @@ proc getallcommits {} {
if {$ids ne {}} {
if {$ids eq "--all"} {
set cmd [concat $cmd "--all"]
+ set fd [safe_open_command $cmd]
} else {
- set cmd [concat $cmd --stdin "<<[join $ids "\\n"]"]
+ set cmd [concat $cmd --stdin]
+ set fd [safe_open_command_redirect $cmd [list "<<[join $ids "\n"]"]]
}
- set fd [open $cmd r]
fconfigure $fd -blocking 0
incr allcommits
nowbusy allcommits
@@ -10768,7 +10781,7 @@ proc savecache {} {
set cachearc 0
set cachedarcs $nextarc
catch {
- set f [open $allccache w]
+ set f [safe_open_file $allccache w]
puts $f [list 1 $cachedarcs]
run writecache $f
}
@@ -11471,7 +11484,7 @@ proc add_tag_ctext {tag} {
if {![info exists cached_tagcontent($tag)]} {
catch {
- set cached_tagcontent($tag) [exec git cat-file -p $tag]
+ set cached_tagcontent($tag) [safe_exec [list git cat-file -p $tag]]
}
}
$ctext insert end "[mc "Tag"]: $tag\n" bold
@@ -11534,84 +11547,16 @@ proc doquit {} {
}
proc mkfontdisp {font top which} {
- global fontattr fontpref $font NS use_ttk
+ global fontattr fontpref $font
set fontpref($font) [set $font]
- ${NS}::button $top.${font}but -text $which \
+ ttk::button $top.${font}but -text $which \
-command [list choosefont $font $which]
- ${NS}::label $top.$font -relief flat -font $font \
+ ttk::label $top.$font -relief flat -font $font \
-text $fontattr($font,family) -justify left
grid x $top.${font}but $top.$font -sticky w
}
-proc choosefont {font which} {
- global fontparam fontlist fonttop fontattr
- global prefstop NS
-
- set fontparam(which) $which
- set fontparam(font) $font
- set fontparam(family) [font actual $font -family]
- set fontparam(size) $fontattr($font,size)
- set fontparam(weight) $fontattr($font,weight)
- set fontparam(slant) $fontattr($font,slant)
- set top .gitkfont
- set fonttop $top
- if {![winfo exists $top]} {
- font create sample
- eval font config sample [font actual $font]
- ttk_toplevel $top
- make_transient $top $prefstop
- wm title $top [mc "Gitk font chooser"]
- ${NS}::label $top.l -textvariable fontparam(which)
- pack $top.l -side top
- set fontlist [lsort [font families]]
- ${NS}::frame $top.f
- listbox $top.f.fam -listvariable fontlist \
- -yscrollcommand [list $top.f.sb set]
- bind $top.f.fam <<ListboxSelect>> selfontfam
- ${NS}::scrollbar $top.f.sb -command [list $top.f.fam yview]
- pack $top.f.sb -side right -fill y
- pack $top.f.fam -side left -fill both -expand 1
- pack $top.f -side top -fill both -expand 1
- ${NS}::frame $top.g
- spinbox $top.g.size -from 4 -to 40 -width 4 \
- -textvariable fontparam(size) \
- -validatecommand {string is integer -strict %s}
- checkbutton $top.g.bold -padx 5 \
- -font {{Times New Roman} 12 bold} -text [mc "B"] -indicatoron 0 \
- -variable fontparam(weight) -onvalue bold -offvalue normal
- checkbutton $top.g.ital -padx 5 \
- -font {{Times New Roman} 12 italic} -text [mc "I"] -indicatoron 0 \
- -variable fontparam(slant) -onvalue italic -offvalue roman
- pack $top.g.size $top.g.bold $top.g.ital -side left
- pack $top.g -side top
- canvas $top.c -width 150 -height 50 -border 2 -relief sunk \
- -background white
- $top.c create text 100 25 -anchor center -text $which -font sample \
- -fill black -tags text
- bind $top.c <Configure> [list centertext $top.c]
- pack $top.c -side top -fill x
- ${NS}::frame $top.buts
- ${NS}::button $top.buts.ok -text [mc "OK"] -command fontok -default active
- ${NS}::button $top.buts.can -text [mc "Cancel"] -command fontcan -default normal
- bind $top <Key-Return> fontok
- bind $top <Key-Escape> fontcan
- grid $top.buts.ok $top.buts.can
- grid columnconfigure $top.buts 0 -weight 1 -uniform a
- grid columnconfigure $top.buts 1 -weight 1 -uniform a
- pack $top.buts -side bottom -fill x
- trace add variable fontparam write chg_fontparam
- } else {
- raise $top
- $top.c itemconf text -text $which
- }
- set i [lsearch -exact $fontlist $fontparam(family)]
- if {$i >= 0} {
- $top.f.fam selection set $i
- $top.f.fam see $i
- }
-}
-
proc centertext {w} {
$w coords text [expr {[winfo width $w] / 2}] [expr {[winfo height $w] / 2}]
}
@@ -11644,26 +11589,21 @@ proc fontcan {} {
}
}
-if {[package vsatisfies [package provide Tk] 8.6]} {
- # In Tk 8.6 we have a native font chooser dialog. Overwrite the above
- # function to make use of it.
- proc choosefont {font which} {
- tk fontchooser configure -title $which -font $font \
- -command [list on_choosefont $font $which]
- tk fontchooser show
- }
- proc on_choosefont {font which newfont} {
- global fontparam
- puts stderr "$font $newfont"
- array set f [font actual $newfont]
- set fontparam(which) $which
- set fontparam(font) $font
- set fontparam(family) $f(-family)
- set fontparam(size) $f(-size)
- set fontparam(weight) $f(-weight)
- set fontparam(slant) $f(-slant)
- fontok
- }
+proc choosefont {font which} {
+ tk fontchooser configure -title $which -font $font \
+ -command [list on_choosefont $font $which]
+ tk fontchooser show
+}
+proc on_choosefont {font which newfont} {
+ global fontparam
+ array set f [font actual $newfont]
+ set fontparam(which) $which
+ set fontparam(font) $font
+ set fontparam(family) $f(-family)
+ set fontparam(size) $f(-size)
+ set fontparam(weight) $f(-weight)
+ set fontparam(slant) $f(-slant)
+ fontok
}
proc selfontfam {} {
@@ -11683,172 +11623,157 @@ proc chg_fontparam {v sub op} {
# Create a property sheet tab page
proc create_prefs_page {w} {
- global NS
- set parent [join [lrange [split $w .] 0 end-1] .]
- if {[winfo class $parent] eq "TNotebook"} {
- ${NS}::frame $w
- } else {
- ${NS}::labelframe $w
- }
+ ttk::frame $w
}
proc prefspage_general {notebook} {
- global NS maxwidth maxgraphpct showneartags showlocalchanges
- global tabstop wrapcomment wrapdefault limitdiffs
- global autocopy autoselect autosellen extdifftool perfile_attrs
- global hideremotes want_ttk have_ttk maxrefs web_browser
+ global {*}$::config_variables
+ global hashlength
set page [create_prefs_page $notebook.general]
- ${NS}::label $page.ldisp -text [mc "Commit list display options"] -font mainfontbold
+ ttk::label $page.ldisp -text [mc "Commit list display options"] -font mainfontbold
grid $page.ldisp - -sticky w -pady 10
- ${NS}::label $page.spacer -text " "
- ${NS}::label $page.maxwidthl -text [mc "Maximum graph width (lines)"]
+ ttk::label $page.spacer -text " "
+ ttk::label $page.maxwidthl -text [mc "Maximum graph width (lines)"]
spinbox $page.maxwidth -from 0 -to 100 -width 4 -textvariable maxwidth
grid $page.spacer $page.maxwidthl $page.maxwidth -sticky w
#xgettext:no-tcl-format
- ${NS}::label $page.maxpctl -text [mc "Maximum graph width (% of pane)"]
+ ttk::label $page.maxpctl -text [mc "Maximum graph width (% of pane)"]
spinbox $page.maxpct -from 1 -to 100 -width 4 -textvariable maxgraphpct
grid x $page.maxpctl $page.maxpct -sticky w
- ${NS}::checkbutton $page.showlocal -text [mc "Show local changes"] \
+ ttk::checkbutton $page.showlocal -text [mc "Show local changes"] \
-variable showlocalchanges
grid x $page.showlocal -sticky w
- ${NS}::checkbutton $page.hideremotes -text [mc "Hide remote refs"] \
+ ttk::checkbutton $page.hideremotes -text [mc "Hide remote refs"] \
-variable hideremotes
grid x $page.hideremotes -sticky w
- ${NS}::checkbutton $page.autocopy -text [mc "Copy commit ID to clipboard"] \
+ ttk::checkbutton $page.autocopy -text [mc "Copy commit ID to clipboard"] \
-variable autocopy
grid x $page.autocopy -sticky w
if {[haveselectionclipboard]} {
- ${NS}::checkbutton $page.autoselect -text [mc "Copy commit ID to X11 selection"] \
+ ttk::checkbutton $page.autoselect -text [mc "Copy commit ID to X11 selection"] \
-variable autoselect
grid x $page.autoselect -sticky w
}
- spinbox $page.autosellen -from 1 -to 40 -width 4 -textvariable autosellen
- ${NS}::label $page.autosellenl -text [mc "Length of commit ID to copy"]
+
+ spinbox $page.autosellen -from 1 -to $hashlength -width 4 -textvariable autosellen
+ ttk::label $page.autosellenl -text [mc "Length of commit ID to copy"]
grid x $page.autosellenl $page.autosellen -sticky w
+ ttk::label $page.kscroll1 -text [mc "Wheel scrolling multiplier"]
+ spinbox $page.kscroll -from 1 -to 20 -width 4 -textvariable kscroll
+ grid x $page.kscroll1 $page.kscroll -sticky w
- ${NS}::label $page.ddisp -text [mc "Diff display options"] -font mainfontbold
+ ttk::label $page.ddisp -text [mc "Diff display options"] -font mainfontbold
grid $page.ddisp - -sticky w -pady 10
- ${NS}::label $page.tabstopl -text [mc "Tab spacing"]
+ ttk::label $page.tabstopl -text [mc "Tab spacing"]
spinbox $page.tabstop -from 1 -to 20 -width 4 -textvariable tabstop
grid x $page.tabstopl $page.tabstop -sticky w
- ${NS}::label $page.wrapcommentl -text [mc "Wrap comment text"]
+ ttk::label $page.wrapcommentl -text [mc "Wrap comment text"]
makedroplist $page.wrapcomment wrapcomment none char word
grid x $page.wrapcommentl $page.wrapcomment -sticky w
- ${NS}::label $page.wrapdefaultl -text [mc "Wrap other text"]
+ ttk::label $page.wrapdefaultl -text [mc "Wrap other text"]
makedroplist $page.wrapdefault wrapdefault none char word
grid x $page.wrapdefaultl $page.wrapdefault -sticky w
- ${NS}::checkbutton $page.ntag -text [mc "Display nearby tags/heads"] \
+ ttk::checkbutton $page.ntag -text [mc "Display nearby tags/heads"] \
-variable showneartags
grid x $page.ntag -sticky w
- ${NS}::label $page.maxrefsl -text [mc "Maximum # tags/heads to show"]
+ ttk::label $page.maxrefsl -text [mc "Maximum # tags/heads to show"]
spinbox $page.maxrefs -from 1 -to 1000 -width 4 -textvariable maxrefs
grid x $page.maxrefsl $page.maxrefs -sticky w
- ${NS}::checkbutton $page.ldiff -text [mc "Limit diffs to listed paths"] \
+ ttk::checkbutton $page.ldiff -text [mc "Limit diffs to listed paths"] \
-variable limitdiffs
grid x $page.ldiff -sticky w
- ${NS}::checkbutton $page.lattr -text [mc "Support per-file encodings"] \
+ ttk::checkbutton $page.lattr -text [mc "Support per-file encodings"] \
-variable perfile_attrs
grid x $page.lattr -sticky w
- ${NS}::entry $page.extdifft -textvariable extdifftool
- ${NS}::frame $page.extdifff
- ${NS}::label $page.extdifff.l -text [mc "External diff tool" ]
- ${NS}::button $page.extdifff.b -text [mc "Choose..."] -command choose_extdiff
+ ttk::entry $page.extdifft -textvariable extdifftool
+ ttk::frame $page.extdifff
+ ttk::label $page.extdifff.l -text [mc "External diff tool" ]
+ ttk::button $page.extdifff.b -text [mc "Choose..."] -command choose_extdiff
pack $page.extdifff.l $page.extdifff.b -side left
pack configure $page.extdifff.l -padx 10
grid x $page.extdifff $page.extdifft -sticky ew
- ${NS}::entry $page.webbrowser -textvariable web_browser
- ${NS}::frame $page.webbrowserf
- ${NS}::label $page.webbrowserf.l -text [mc "Web browser" ]
+ ttk::entry $page.webbrowser -textvariable web_browser
+ ttk::frame $page.webbrowserf
+ ttk::label $page.webbrowserf.l -text [mc "Web browser" ]
pack $page.webbrowserf.l -side left
pack configure $page.webbrowserf.l -padx 10
grid x $page.webbrowserf $page.webbrowser -sticky ew
- ${NS}::label $page.lgen -text [mc "General options"] -font mainfontbold
- grid $page.lgen - -sticky w -pady 10
- ${NS}::checkbutton $page.want_ttk -variable want_ttk \
- -text [mc "Use themed widgets"]
- if {$have_ttk} {
- ${NS}::label $page.ttk_note -text [mc "(change requires restart)"]
- } else {
- ${NS}::label $page.ttk_note -text [mc "(currently unavailable)"]
- }
- grid x $page.want_ttk $page.ttk_note -sticky w
return $page
}
proc prefspage_colors {notebook} {
- global NS uicolor bgcolor fgcolor ctext diffcolors selectbgcolor markbgcolor
+ global uicolor bgcolor fgcolor ctext diffcolors selectbgcolor markbgcolor
global diffbgcolors
set page [create_prefs_page $notebook.colors]
- ${NS}::label $page.cdisp -text [mc "Colors: press to choose"] -font mainfontbold
+ ttk::label $page.cdisp -text [mc "Colors: press to choose"] -font mainfontbold
grid $page.cdisp - -sticky w -pady 10
label $page.ui -padx 40 -relief sunk -background $uicolor
- ${NS}::button $page.uibut -text [mc "Interface"] \
+ ttk::button $page.uibut -text [mc "Interface"] \
-command [list choosecolor uicolor {} $page.ui [mc "interface"] setui]
grid x $page.uibut $page.ui -sticky w
label $page.bg -padx 40 -relief sunk -background $bgcolor
- ${NS}::button $page.bgbut -text [mc "Background"] \
+ ttk::button $page.bgbut -text [mc "Background"] \
-command [list choosecolor bgcolor {} $page.bg [mc "background"] setbg]
grid x $page.bgbut $page.bg -sticky w
label $page.fg -padx 40 -relief sunk -background $fgcolor
- ${NS}::button $page.fgbut -text [mc "Foreground"] \
+ ttk::button $page.fgbut -text [mc "Foreground"] \
-command [list choosecolor fgcolor {} $page.fg [mc "foreground"] setfg]
grid x $page.fgbut $page.fg -sticky w
label $page.diffold -padx 40 -relief sunk -background [lindex $diffcolors 0]
- ${NS}::button $page.diffoldbut -text [mc "Diff: old lines"] \
+ ttk::button $page.diffoldbut -text [mc "Diff: old lines"] \
-command [list choosecolor diffcolors 0 $page.diffold [mc "diff old lines"] \
[list $ctext tag conf d0 -foreground]]
grid x $page.diffoldbut $page.diffold -sticky w
label $page.diffoldbg -padx 40 -relief sunk -background [lindex $diffbgcolors 0]
- ${NS}::button $page.diffoldbgbut -text [mc "Diff: old lines bg"] \
+ ttk::button $page.diffoldbgbut -text [mc "Diff: old lines bg"] \
-command [list choosecolor diffbgcolors 0 $page.diffoldbg \
[mc "diff old lines bg"] \
[list $ctext tag conf d0 -background]]
grid x $page.diffoldbgbut $page.diffoldbg -sticky w
label $page.diffnew -padx 40 -relief sunk -background [lindex $diffcolors 1]
- ${NS}::button $page.diffnewbut -text [mc "Diff: new lines"] \
+ ttk::button $page.diffnewbut -text [mc "Diff: new lines"] \
-command [list choosecolor diffcolors 1 $page.diffnew [mc "diff new lines"] \
[list $ctext tag conf dresult -foreground]]
grid x $page.diffnewbut $page.diffnew -sticky w
label $page.diffnewbg -padx 40 -relief sunk -background [lindex $diffbgcolors 1]
- ${NS}::button $page.diffnewbgbut -text [mc "Diff: new lines bg"] \
+ ttk::button $page.diffnewbgbut -text [mc "Diff: new lines bg"] \
-command [list choosecolor diffbgcolors 1 $page.diffnewbg \
[mc "diff new lines bg"] \
[list $ctext tag conf dresult -background]]
grid x $page.diffnewbgbut $page.diffnewbg -sticky w
label $page.hunksep -padx 40 -relief sunk -background [lindex $diffcolors 2]
- ${NS}::button $page.hunksepbut -text [mc "Diff: hunk header"] \
+ ttk::button $page.hunksepbut -text [mc "Diff: hunk header"] \
-command [list choosecolor diffcolors 2 $page.hunksep \
[mc "diff hunk header"] \
[list $ctext tag conf hunksep -foreground]]
grid x $page.hunksepbut $page.hunksep -sticky w
label $page.markbgsep -padx 40 -relief sunk -background $markbgcolor
- ${NS}::button $page.markbgbut -text [mc "Marked line bg"] \
+ ttk::button $page.markbgbut -text [mc "Marked line bg"] \
-command [list choosecolor markbgcolor {} $page.markbgsep \
[mc "marked line background"] \
[list $ctext tag conf omark -background]]
grid x $page.markbgbut $page.markbgsep -sticky w
label $page.selbgsep -padx 40 -relief sunk -background $selectbgcolor
- ${NS}::button $page.selbgbut -text [mc "Select bg"] \
+ ttk::button $page.selbgbut -text [mc "Select bg"] \
-command [list choosecolor selectbgcolor {} $page.selbgsep [mc "background"] setselbg]
grid x $page.selbgbut $page.selbgsep -sticky w
return $page
}
proc prefspage_fonts {notebook} {
- global NS
set page [create_prefs_page $notebook.fonts]
- ${NS}::label $page.cfont -text [mc "Fonts: press to choose"] -font mainfontbold
+ ttk::label $page.cfont -text [mc "Fonts: press to choose"] -font mainfontbold
grid $page.cfont - -sticky w -pady 10
mkfontdisp mainfont $page [mc "Main font"]
mkfontdisp textfont $page [mc "Diff display font"]
@@ -11857,11 +11782,8 @@ proc prefspage_fonts {notebook} {
}
proc doprefs {} {
- global maxwidth maxgraphpct use_ttk NS
- global oldprefs prefstop showneartags showlocalchanges
- global uicolor bgcolor fgcolor ctext diffcolors selectbgcolor markbgcolor
- global tabstop limitdiffs autoselect autosellen extdifftool perfile_attrs
- global hideremotes want_ttk have_ttk wrapcomment wrapdefault
+ global oldprefs prefstop
+ global {*}$::config_variables
set top .gitkprefs
set prefstop $top
@@ -11869,49 +11791,34 @@ proc doprefs {} {
raise $top
return
}
- foreach v {maxwidth maxgraphpct showneartags showlocalchanges \
- limitdiffs tabstop perfile_attrs hideremotes want_ttk wrapcomment wrapdefault} {
+ foreach v $::config_variables {
set oldprefs($v) [set $v]
}
ttk_toplevel $top
wm title $top [mc "Gitk preferences"]
make_transient $top .
- if {[set use_notebook [expr {$use_ttk && [info command ::ttk::notebook] ne ""}]]} {
- set notebook [ttk::notebook $top.notebook]
- } else {
- set notebook [${NS}::frame $top.notebook -borderwidth 0 -relief flat]
- }
+ set notebook [ttk::notebook $top.notebook]
lappend pages [prefspage_general $notebook] [mc "General"]
lappend pages [prefspage_colors $notebook] [mc "Colors"]
lappend pages [prefspage_fonts $notebook] [mc "Fonts"]
set col 0
foreach {page title} $pages {
- if {$use_notebook} {
- $notebook add $page -text $title
- } else {
- set btn [${NS}::button $notebook.b_[string map {. X} $page] \
- -text $title -command [list raise $page]]
- $page configure -text $title
- grid $btn -row 0 -column [incr col] -sticky w
- grid $page -row 1 -column 0 -sticky news -columnspan 100
- }
+ $notebook add $page -text $title
}
- if {!$use_notebook} {
- grid columnconfigure $notebook 0 -weight 1
- grid rowconfigure $notebook 1 -weight 1
- raise [lindex $pages 0]
- }
+ grid columnconfigure $notebook 0 -weight 1
+ grid rowconfigure $notebook 1 -weight 1
+ raise [lindex $pages 0]
grid $notebook -sticky news -padx 2 -pady 2
grid rowconfigure $top 0 -weight 1
grid columnconfigure $top 0 -weight 1
- ${NS}::frame $top.buts
- ${NS}::button $top.buts.ok -text [mc "OK"] -command prefsok -default active
- ${NS}::button $top.buts.can -text [mc "Cancel"] -command prefscan -default normal
+ ttk::frame $top.buts
+ ttk::button $top.buts.ok -text [mc "OK"] -command prefsok -default active
+ ttk::button $top.buts.can -text [mc "Cancel"] -command prefscan -default normal
bind $top <Key-Return> prefsok
bind $top <Key-Escape> prefscan
grid $top.buts.ok $top.buts.can
@@ -11994,10 +11901,9 @@ proc setfg {c} {
proc prefscan {} {
global oldprefs prefstop
+ global {*}$::config_variables
- foreach v {maxwidth maxgraphpct showneartags showlocalchanges \
- limitdiffs tabstop perfile_attrs hideremotes want_ttk wrapcomment wrapdefault} {
- global $v
+ foreach v $::config_variables {
set $v $oldprefs($v)
}
catch {destroy $prefstop}
@@ -12006,11 +11912,8 @@ proc prefscan {} {
}
proc prefsok {} {
- global maxwidth maxgraphpct
- global oldprefs prefstop showneartags showlocalchanges
- global fontpref mainfont textfont uifont
- global limitdiffs treediffs perfile_attrs
- global hideremotes wrapcomment wrapdefault
+ global oldprefs prefstop fontpref treediffs
+ global {*}$::config_variables
global ctext
catch {destroy $prefstop}
@@ -12382,7 +12285,7 @@ proc gitattr {path attr default} {
set r $path_attr_cache($attr,$path)
} else {
set r "unspecified"
- if {![catch {set line [exec git check-attr $attr -- $path]}]} {
+ if {![catch {set line [safe_exec [list git check-attr $attr -- $path]]}]} {
regexp "(.*): $attr: (.*)" $line m f r
}
set path_attr_cache($attr,$path) $r
@@ -12409,7 +12312,7 @@ proc cache_gitattr {attr pathlist} {
while {$newlist ne {}} {
set head [lrange $newlist 0 [expr {$lim - 1}]]
set newlist [lrange $newlist $lim end]
- if {![catch {set rlist [eval exec git check-attr $attr -- $head]}]} {
+ if {![catch {set rlist [safe_exec [concat git check-attr $attr -- $head]]}]} {
foreach row [split $rlist "\n"] {
if {[regexp "(.*): $attr: (.*)" $row m path value]} {
if {[string index $path 0] eq "\""} {
@@ -12452,20 +12355,13 @@ namespace import ::msgcat::mc
## And eventually load the actual message catalog
::msgcat::mcload $gitk_msgsdir
-# First check that Tcl/Tk is recent enough
-if {[catch {package require Tk 8.4} err]} {
- show_error {} . [mc "Sorry, gitk cannot run with this version of Tcl/Tk.\n\
- Gitk requires at least Tcl/Tk 8.4."]
- exit 1
-}
-
# on OSX bring the current Wish process window to front
if {[tk windowingsystem] eq "aqua"} {
- exec osascript -e [format {
+ safe_exec [list osascript -e [format {
tell application "System Events"
set frontmost of processes whose unix id is %d to true
end tell
- } [pid] ]
+ } [pid] ]]
}
# Unset GIT_TRACE var if set
@@ -12504,6 +12400,17 @@ catch {
}
}
+# Use object format as hash algorightm (either "sha1" or "sha256")
+set hashalgorithm [exec git rev-parse --show-object-format]
+if {$hashalgorithm eq "sha1"} {
+ set hashlength 40
+} elseif {$hashalgorithm eq "sha256"} {
+ set hashlength 64
+} else {
+ puts stderr "Unknown hash algorithm: $hashalgorithm"
+ exit 1
+}
+
set log_showroot true
catch {
set log_showroot [exec git config --bool --get log.showroot]
@@ -12537,17 +12444,18 @@ set wrapcomment "none"
set wrapdefault "none"
set showneartags 1
set hideremotes 0
+set sortrefsbytype 1
set maxrefs 20
set visiblerefs {"master"}
set maxlinelen 200
set showlocalchanges 1
set limitdiffs 1
+set kscroll 3
set datetimeformat "%Y-%m-%d %H:%M:%S"
set autocopy 0
set autoselect 1
-set autosellen 40
+set autosellen $hashlength
set perfile_attrs 0
-set want_ttk 1
if {[tk windowingsystem] eq "aqua"} {
set extdifftool "opendiff"
@@ -12640,19 +12548,66 @@ catch {
config_check_tmp_exists 50
set config_variables {
- mainfont textfont uifont tabstop findmergefiles maxgraphpct maxwidth
- cmitmode wrapcomment wrapdefault autocopy autoselect autosellen
- showneartags maxrefs visiblerefs
- hideremotes showlocalchanges datetimeformat limitdiffs uicolor want_ttk
- bgcolor fgcolor uifgcolor uifgdisabledcolor colors diffcolors mergecolors
- markbgcolor diffcontext selectbgcolor foundbgcolor currentsearchhitbgcolor
- extdifftool perfile_attrs headbgcolor headfgcolor headoutlinecolor
- remotebgcolor tagbgcolor tagfgcolor tagoutlinecolor reflinecolor
- filesepbgcolor filesepfgcolor linehoverbgcolor linehoverfgcolor
- linehoveroutlinecolor mainheadcirclecolor workingfilescirclecolor
- indexcirclecolor circlecolors linkfgcolor circleoutlinecolor diffbgcolors
+ autocopy
+ autoselect
+ autosellen
+ bgcolor
+ circlecolors
+ circleoutlinecolor
+ cmitmode
+ colors
+ currentsearchhitbgcolor
+ datetimeformat
+ diffbgcolors
+ diffcolors
+ diffcontext
+ extdifftool
+ fgcolor
+ filesepbgcolor
+ filesepfgcolor
+ findmergefiles
+ foundbgcolor
+ headbgcolor
+ headfgcolor
+ headoutlinecolor
+ hideremotes
+ indexcirclecolor
+ kscroll
+ limitdiffs
+ linehoverbgcolor
+ linehoverfgcolor
+ linehoveroutlinecolor
+ linkfgcolor
+ mainfont
+ mainheadcirclecolor
+ markbgcolor
+ maxgraphpct
+ maxrefs
+ maxwidth
+ mergecolors
+ perfile_attrs
+ reflinecolor
+ remotebgcolor
+ selectbgcolor
+ showlocalchanges
+ showneartags
+ sortrefsbytype
+ tabstop
+ tagbgcolor
+ tagfgcolor
+ tagoutlinecolor
+ textfont
+ uicolor
+ uifgcolor
+ uifgdisabledcolor
+ uifont
+ visiblerefs
web_browser
+ workingfilescirclecolor
+ wrapcomment
+ wrapdefault
}
+
foreach var $config_variables {
config_init_trace $var
trace add variable $var write config_variable_change_cb
@@ -12713,7 +12668,7 @@ if {$selecthead eq "HEAD"} {
if {$i >= [llength $argv] && $revtreeargs ne {}} {
# no -- on command line, but some arguments (other than --argscmd)
if {[catch {
- set f [eval exec git rev-parse --no-revs --no-flags $revtreeargs]
+ set f [safe_exec [concat git rev-parse --no-revs --no-flags $revtreeargs]]
set cmdline_files [split $f "\n"]
set n [llength $cmdline_files]
set revtreeargs [lrange $revtreeargs 0 end-$n]
@@ -12743,25 +12698,7 @@ set nullid "0000000000000000000000000000000000000000"
set nullid2 "0000000000000000000000000000000000000001"
set nullfile "/dev/null"
-set have_tk85 [expr {[package vcompare $tk_version "8.5"] >= 0}]
-set have_tk86 [expr {[package vcompare $tk_version "8.6"] >= 0}]
-if {![info exists have_ttk]} {
- set have_ttk [llength [info commands ::ttk::style]]
-}
-set use_ttk [expr {$have_ttk && $want_ttk}]
-set NS [expr {$use_ttk ? "ttk" : ""}]
-
-if {$use_ttk} {
- setttkstyle
-}
-
-regexp {^git version ([\d.]*\d)} [exec git version] _ git_version
-
-set show_notes {}
-if {[package vcompare $git_version "1.6.6.2"] >= 0} {
- set show_notes "--show-notes"
-}
-
+setttkstyle
set appname "gitk"
set runq {}
diff --git a/gpg-interface.c b/gpg-interface.c
index 0896458de5..d26c7135b0 100644
--- a/gpg-interface.c
+++ b/gpg-interface.c
@@ -144,6 +144,18 @@ static struct gpg_format *get_format_by_sig(const char *sig)
return NULL;
}
+const char *get_signature_format(const char *buf)
+{
+ struct gpg_format *format = get_format_by_sig(buf);
+ return format ? format->name : "unknown";
+}
+
+int valid_signature_format(const char *format)
+{
+ return (!!get_format_by_name(format) ||
+ !strcmp(format, "unknown"));
+}
+
void signature_check_clear(struct signature_check *sigc)
{
FREE_AND_NULL(sigc->payload);
@@ -783,7 +795,7 @@ static int git_gpg_config(const char *var, const char *value,
if (fmtname) {
fmt = get_format_by_name(fmtname);
- return git_config_string((char **) &fmt->program, var, value);
+ return git_config_pathname((char **) &fmt->program, var, value);
}
return 0;
@@ -1048,7 +1060,7 @@ static int sign_buffer_ssh(struct strbuf *buffer, struct strbuf *signature,
key_file->filename.buf);
goto out;
}
- ssh_signing_key_file = strbuf_detach(&key_file->filename, NULL);
+ ssh_signing_key_file = xstrdup(key_file->filename.buf);
} else {
/* We assume a file */
ssh_signing_key_file = interpolate_path(signing_key, 1);
diff --git a/gpg-interface.h b/gpg-interface.h
index e09f12e8d0..60ddf8bbfa 100644
--- a/gpg-interface.h
+++ b/gpg-interface.h
@@ -48,6 +48,18 @@ struct signature_check {
void signature_check_clear(struct signature_check *sigc);
/*
+ * Return the format of the signature (like "openpgp", "x509", "ssh"
+ * or "unknown").
+ */
+const char *get_signature_format(const char *buf);
+
+/*
+ * Is the signature format valid (like "openpgp", "x509", "ssh" or
+ * "unknown")
+ */
+int valid_signature_format(const char *format);
+
+/*
* Look at a GPG signed tag object. If such a signature exists, store it in
* signature and the signed content in payload. Return 1 if a signature was
* found, and 0 otherwise.
diff --git a/grep.c b/grep.c
index f8d535182c..932647e4a6 100644
--- a/grep.c
+++ b/grep.c
@@ -5,7 +5,7 @@
#include "gettext.h"
#include "grep.h"
#include "hex.h"
-#include "object-store.h"
+#include "odb.h"
#include "pretty.h"
#include "userdiff.h"
#include "xdiff-interface.h"
@@ -1931,8 +1931,8 @@ static int grep_source_load_oid(struct grep_source *gs)
{
enum object_type type;
- gs->buf = repo_read_object_file(gs->repo, gs->identifier, &type,
- &gs->size);
+ gs->buf = odb_read_object(gs->repo->objects, gs->identifier,
+ &type, &gs->size);
if (!gs->buf)
return error(_("'%s': unable to read %s"),
gs->name,
diff --git a/hash.h b/hash.h
index d6422ddf45..fae966b23c 100644
--- a/hash.h
+++ b/hash.h
@@ -175,6 +175,16 @@ static inline void git_SHA256_Clone(git_SHA256_CTX *dst, const git_SHA256_CTX *s
/* Number of algorithms supported (including unknown). */
#define GIT_HASH_NALGOS (GIT_HASH_SHA256 + 1)
+/* Default hash algorithm if unspecified. */
+#ifdef WITH_BREAKING_CHANGES
+# define GIT_HASH_DEFAULT GIT_HASH_SHA256
+#else
+# define GIT_HASH_DEFAULT GIT_HASH_SHA1
+#endif
+
+/* Legacy hash algorithm. Implied for older data formats which don't specify. */
+#define GIT_HASH_SHA1_LEGACY GIT_HASH_SHA1
+
/* "sha1", big-endian */
#define GIT_SHA1_FORMAT_ID 0x73686131
@@ -216,6 +226,7 @@ struct object_id {
#define GET_OID_REQUIRE_PATH 010000
#define GET_OID_HASH_ANY 020000
#define GET_OID_SKIP_AMBIGUITY_CHECK 040000
+#define GET_OID_GENTLY 0100000
#define GET_OID_DISAMBIGUATORS \
(GET_OID_COMMIT | GET_OID_COMMITTISH | \
diff --git a/help.c b/help.c
index 21b778707a..652efebf00 100644
--- a/help.c
+++ b/help.c
@@ -810,6 +810,9 @@ void get_version_info(struct strbuf *buf, int show_build_options)
SHA1_UNSAFE_BACKEND);
#endif
strbuf_addf(buf, "SHA-256: %s\n", SHA256_BACKEND);
+ strbuf_addf(buf, "default-ref-format: %s\n",
+ ref_storage_format_to_name(REF_STORAGE_FORMAT_DEFAULT));
+ strbuf_addf(buf, "default-hash: %s\n", hash_algos[GIT_HASH_DEFAULT].name);
}
}
diff --git a/http-backend.c b/http-backend.c
index 0c575aa88a..ad8c403749 100644
--- a/http-backend.c
+++ b/http-backend.c
@@ -18,7 +18,7 @@
#include "url.h"
#include "strvec.h"
#include "packfile.h"
-#include "object-store.h"
+#include "odb.h"
#include "protocol.h"
#include "date.h"
#include "write-or-die.h"
diff --git a/http-push.c b/http-push.c
index f5a92529a8..91a5465afb 100644
--- a/http-push.c
+++ b/http-push.c
@@ -20,7 +20,7 @@
#include "url.h"
#include "packfile.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "commit-reach.h"
#ifdef EXPAT_NEEDS_XMLPARSE_H
@@ -369,8 +369,8 @@ static void start_put(struct transfer_request *request)
ssize_t size;
git_zstream stream;
- unpacked = repo_read_object_file(the_repository, &request->obj->oid,
- &type, &len);
+ unpacked = odb_read_object(the_repository->objects, &request->obj->oid,
+ &type, &len);
hdrlen = format_object_header(hdr, sizeof(hdr), type, len);
/* Set it up */
@@ -1447,8 +1447,8 @@ static void one_remote_ref(const char *refname)
* may be required for updating server info later.
*/
if (repo->can_update_info_refs &&
- !has_object(the_repository, &ref->old_oid,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR)) {
+ !odb_has_object(the_repository->objects, &ref->old_oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR)) {
obj = lookup_unknown_object(the_repository, &ref->old_oid);
fprintf(stderr, " fetch %s for %s\n",
oid_to_hex(&ref->old_oid), refname);
@@ -1653,14 +1653,16 @@ static int delete_remote_branch(const char *pattern, int force)
return error("Remote HEAD symrefs too deep");
if (is_null_oid(&head_oid))
return error("Unable to resolve remote HEAD");
- if (!has_object(the_repository, &head_oid, HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
+ if (!odb_has_object(the_repository->objects, &head_oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
return error("Remote HEAD resolves to object %s\nwhich does not exist locally, perhaps you need to fetch?", oid_to_hex(&head_oid));
/* Remote branch must resolve to a known object */
if (is_null_oid(&remote_ref->old_oid))
return error("Unable to resolve remote branch %s",
remote_ref->name);
- if (!has_object(the_repository, &remote_ref->old_oid, HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
+ if (!odb_has_object(the_repository->objects, &remote_ref->old_oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
return error("Remote branch %s resolves to object %s\nwhich does not exist locally, perhaps you need to fetch?", remote_ref->name, oid_to_hex(&remote_ref->old_oid));
/* Remote branch must be an ancestor of remote HEAD */
@@ -1881,8 +1883,8 @@ int cmd_main(int argc, const char **argv)
if (!force_all &&
!is_null_oid(&ref->old_oid) &&
!ref->force) {
- if (!has_object(the_repository, &ref->old_oid,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR) ||
+ if (!odb_has_object(the_repository->objects, &ref->old_oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR) ||
!ref_newer(&ref->peer_ref->new_oid,
&ref->old_oid)) {
/*
diff --git a/http-walker.c b/http-walker.c
index 463f7b119a..0f7ae46d7f 100644
--- a/http-walker.c
+++ b/http-walker.c
@@ -10,7 +10,7 @@
#include "transport.h"
#include "packfile.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
struct alt_base {
char *base;
@@ -138,8 +138,8 @@ static int fill_active_slot(void *data UNUSED)
list_for_each_safe(pos, tmp, head) {
obj_req = list_entry(pos, struct object_request, node);
if (obj_req->state == WAITING) {
- if (has_object(the_repository, &obj_req->oid,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
+ if (odb_has_object(the_repository->objects, &obj_req->oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
obj_req->state = COMPLETE;
else {
start_object_request(obj_req);
@@ -497,8 +497,8 @@ static int fetch_object(struct walker *walker, const struct object_id *oid)
if (!obj_req)
return error("Couldn't find request for %s in the queue", hex);
- if (has_object(the_repository, &obj_req->oid,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR)) {
+ if (odb_has_object(the_repository->objects, &obj_req->oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR)) {
if (obj_req->req)
abort_http_object_request(&obj_req->req);
abort_object_request(obj_req);
@@ -543,7 +543,7 @@ static int fetch_object(struct walker *walker, const struct object_id *oid)
ret = error("File %s has bad hash", hex);
} else if (req->rename < 0) {
struct strbuf buf = STRBUF_INIT;
- odb_loose_path(the_repository->objects->odb, &buf, &req->oid);
+ odb_loose_path(the_repository->objects->sources, &buf, &req->oid);
ret = error("unable to write sha1 filename %s", buf.buf);
strbuf_release(&buf);
}
diff --git a/http.c b/http.c
index d88e79fbde..9b62f627dc 100644
--- a/http.c
+++ b/http.c
@@ -19,7 +19,7 @@
#include "packfile.h"
#include "string-list.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "tempfile.h"
static struct trace_key trace_curl = TRACE_KEY_INIT(CURL);
@@ -2662,7 +2662,7 @@ struct http_object_request *new_http_object_request(const char *base_url,
oidcpy(&freq->oid, oid);
freq->localfile = -1;
- odb_loose_path(the_repository->objects->odb, &filename, oid);
+ odb_loose_path(the_repository->objects->sources, &filename, oid);
strbuf_addf(&freq->tmpfile, "%s.temp", filename.buf);
strbuf_addf(&prevfile, "%s.prev", filename.buf);
@@ -2814,7 +2814,7 @@ int finish_http_object_request(struct http_object_request *freq)
unlink_or_warn(freq->tmpfile.buf);
return -1;
}
- odb_loose_path(the_repository->objects->odb, &filename, &freq->oid);
+ odb_loose_path(the_repository->objects->sources, &filename, &freq->oid);
freq->rename = finalize_object_file(freq->tmpfile.buf, filename.buf);
strbuf_release(&filename);
diff --git a/ident.c b/ident.c
index 967895d885..281e830573 100644
--- a/ident.c
+++ b/ident.c
@@ -412,6 +412,10 @@ void apply_mailmap_to_header(struct strbuf *buf, const char **header,
found_header = 1;
buf_offset += endp - line;
buf_offset += rewrite_ident_line(person, endp - person, buf, mailmap);
+ /* Recompute endp after potential buffer reallocation */
+ endp = buf->buf + buf_offset;
+ if (*endp == '\n')
+ buf_offset++;
break;
}
diff --git a/imap-send.c b/imap-send.c
index 2e812f5a6e..f5a656ac71 100644
--- a/imap-send.c
+++ b/imap-send.c
@@ -25,6 +25,7 @@
#define DISABLE_SIGN_COMPARE_WARNINGS
#include "git-compat-util.h"
+#include "advice.h"
#include "config.h"
#include "credential.h"
#include "gettext.h"
@@ -45,13 +46,21 @@
#endif
static int verbosity;
+static int list_folders;
static int use_curl = USE_CURL_DEFAULT;
+static char *opt_folder;
-static const char * const imap_send_usage[] = { "git imap-send [-v] [-q] [--[no-]curl] < <mbox>", NULL };
+static char const * const imap_send_usage[] = {
+ N_("git imap-send [-v] [-q] [--[no-]curl] [(--folder|-f) <folder>] < <mbox>"),
+ "git imap-send --list",
+ NULL
+};
static struct option imap_send_options[] = {
OPT__VERBOSITY(&verbosity),
OPT_BOOL(0, "curl", &use_curl, "use libcurl to communicate with the IMAP server"),
+ OPT_STRING('f', "folder", &opt_folder, "folder", "specify the IMAP folder"),
+ OPT_BOOL(0, "list", &list_folders, "list all folders on the IMAP server"),
OPT_END()
};
@@ -139,7 +148,10 @@ enum CAPABILITY {
LITERALPLUS,
NAMESPACE,
STARTTLS,
- AUTH_CRAM_MD5
+ AUTH_PLAIN,
+ AUTH_CRAM_MD5,
+ AUTH_OAUTHBEARER,
+ AUTH_XOAUTH2,
};
static const char *cap_list[] = {
@@ -148,7 +160,10 @@ static const char *cap_list[] = {
"LITERAL+",
"NAMESPACE",
"STARTTLS",
+ "AUTH=PLAIN",
"AUTH=CRAM-MD5",
+ "AUTH=OAUTHBEARER",
+ "AUTH=XOAUTH2",
};
#define RESP_OK 0
@@ -197,7 +212,7 @@ static int ssl_socket_connect(struct imap_socket *sock UNUSED,
const struct imap_server_conf *cfg UNUSED,
int use_tls_only UNUSED)
{
- fprintf(stderr, "SSL requested but SSL support not compiled in\n");
+ fprintf(stderr, "SSL requested, but SSL support is not compiled in\n");
return -1;
}
@@ -421,7 +436,7 @@ static int buffer_gets(struct imap_buffer *b, char **s)
if (b->buf[b->offset + 1] == '\n') {
b->buf[b->offset] = 0; /* terminate the string */
b->offset += 2; /* next line */
- if (0 < verbosity)
+ if ((0 < verbosity) || (list_folders && strstr(*s, "* LIST")))
puts(*s);
return 0;
}
@@ -847,6 +862,38 @@ static char hexchar(unsigned int b)
}
#define ENCODED_SIZE(n) (4 * DIV_ROUND_UP((n), 3))
+static char *plain_base64(const char *user, const char *pass)
+{
+ struct strbuf raw = STRBUF_INIT;
+ int b64_len;
+ char *b64;
+
+ /*
+ * Compose the PLAIN string
+ *
+ * The username and password are combined to one string and base64 encoded.
+ * "\0user\0pass"
+ *
+ * The method has been described in RFC4616.
+ *
+ * https://datatracker.ietf.org/doc/html/rfc4616
+ */
+ strbuf_addch(&raw, '\0');
+ strbuf_addstr(&raw, user);
+ strbuf_addch(&raw, '\0');
+ strbuf_addstr(&raw, pass);
+
+ b64 = xmallocz(ENCODED_SIZE(raw.len));
+ b64_len = EVP_EncodeBlock((unsigned char *)b64, (unsigned char *)raw.buf, raw.len);
+ strbuf_release(&raw);
+
+ if (b64_len < 0) {
+ free(b64);
+ return NULL;
+ }
+ return b64;
+}
+
static char *cram(const char *challenge_64, const char *user, const char *pass)
{
int i, resp_len, encoded_len, decoded_len;
@@ -885,17 +932,83 @@ static char *cram(const char *challenge_64, const char *user, const char *pass)
return (char *)response_64;
}
-#else
+static char *oauthbearer_base64(const char *user, const char *access_token)
+{
+ int b64_len;
+ char *raw, *b64;
-static char *cram(const char *challenge_64 UNUSED,
- const char *user UNUSED,
- const char *pass UNUSED)
+ /*
+ * Compose the OAUTHBEARER string
+ *
+ * "n,a=" {User} ",^Ahost=" {Host} "^Aport=" {Port} "^Aauth=Bearer " {Access Token} "^A^A
+ *
+ * The first part `n,a=" {User} ",` is the gs2 header described in RFC5801.
+ * * gs2-cb-flag `n` -> client does not support CB
+ * * gs2-authzid `a=" {User} "`
+ *
+ * The second part are key value pairs containing host, port and auth as
+ * described in RFC7628.
+ *
+ * https://datatracker.ietf.org/doc/html/rfc5801
+ * https://datatracker.ietf.org/doc/html/rfc7628
+ */
+ raw = xstrfmt("n,a=%s,\001auth=Bearer %s\001\001", user, access_token);
+
+ /* Base64 encode */
+ b64 = xmallocz(ENCODED_SIZE(strlen(raw)));
+ b64_len = EVP_EncodeBlock((unsigned char *)b64, (unsigned char *)raw, strlen(raw));
+ free(raw);
+
+ if (b64_len < 0) {
+ free(b64);
+ return NULL;
+ }
+ return b64;
+}
+
+static char *xoauth2_base64(const char *user, const char *access_token)
{
- die("If you want to use CRAM-MD5 authenticate method, "
- "you have to build git-imap-send with OpenSSL library.");
+ int b64_len;
+ char *raw, *b64;
+
+ /*
+ * Compose the XOAUTH2 string
+ * "user=" {User} "^Aauth=Bearer " {Access Token} "^A^A"
+ * https://developers.google.com/workspace/gmail/imap/xoauth2-protocol#initial_client_response
+ */
+ raw = xstrfmt("user=%s\001auth=Bearer %s\001\001", user, access_token);
+
+ /* Base64 encode */
+ b64 = xmallocz(ENCODED_SIZE(strlen(raw)));
+ b64_len = EVP_EncodeBlock((unsigned char *)b64, (unsigned char *)raw, strlen(raw));
+ free(raw);
+
+ if (b64_len < 0) {
+ free(b64);
+ return NULL;
+ }
+ return b64;
}
-#endif
+static int auth_plain(struct imap_store *ctx, const char *prompt UNUSED)
+{
+ int ret;
+ char *b64;
+
+ b64 = plain_base64(ctx->cfg->user, ctx->cfg->pass);
+ if (!b64)
+ return error("PLAIN: base64 encoding failed");
+
+ /* Send the base64-encoded response */
+ ret = socket_write(&ctx->imap->buf.sock, b64, strlen(b64));
+ if (ret != (int)strlen(b64)) {
+ free(b64);
+ return error("IMAP error: sending PLAIN response failed");
+ }
+
+ free(b64);
+ return 0;
+}
static int auth_cram_md5(struct imap_store *ctx, const char *prompt)
{
@@ -905,21 +1018,72 @@ static int auth_cram_md5(struct imap_store *ctx, const char *prompt)
response = cram(prompt, ctx->cfg->user, ctx->cfg->pass);
ret = socket_write(&ctx->imap->buf.sock, response, strlen(response));
- if (ret != strlen(response))
- return error("IMAP error: sending response failed");
+ if (ret != strlen(response)) {
+ free(response);
+ return error("IMAP error: sending CRAM-MD5 response failed");
+ }
free(response);
return 0;
}
+static int auth_oauthbearer(struct imap_store *ctx, const char *prompt UNUSED)
+{
+ int ret;
+ char *b64;
+
+ b64 = oauthbearer_base64(ctx->cfg->user, ctx->cfg->pass);
+ if (!b64)
+ return error("OAUTHBEARER: base64 encoding failed");
+
+ /* Send the base64-encoded response */
+ ret = socket_write(&ctx->imap->buf.sock, b64, strlen(b64));
+ if (ret != (int)strlen(b64)) {
+ free(b64);
+ return error("IMAP error: sending OAUTHBEARER response failed");
+ }
+
+ free(b64);
+ return 0;
+}
+
+static int auth_xoauth2(struct imap_store *ctx, const char *prompt UNUSED)
+{
+ int ret;
+ char *b64;
+
+ b64 = xoauth2_base64(ctx->cfg->user, ctx->cfg->pass);
+ if (!b64)
+ return error("XOAUTH2: base64 encoding failed");
+
+ /* Send the base64-encoded response */
+ ret = socket_write(&ctx->imap->buf.sock, b64, strlen(b64));
+ if (ret != (int)strlen(b64)) {
+ free(b64);
+ return error("IMAP error: sending XOAUTH2 response failed");
+ }
+
+ free(b64);
+ return 0;
+}
+
+#else
+
+#define auth_plain NULL
+#define auth_cram_md5 NULL
+#define auth_oauthbearer NULL
+#define auth_xoauth2 NULL
+
+#endif
+
static void server_fill_credential(struct imap_server_conf *srvc, struct credential *cred)
{
if (srvc->user && srvc->pass)
return;
cred->protocol = xstrdup(srvc->use_ssl ? "imaps" : "imap");
- cred->host = xstrdup(srvc->host);
+ cred->host = xstrfmt("%s:%d", srvc->host, srvc->port);
cred->username = xstrdup_or_null(srvc->user);
cred->password = xstrdup_or_null(srvc->pass);
@@ -932,6 +1096,38 @@ static void server_fill_credential(struct imap_server_conf *srvc, struct credent
srvc->pass = xstrdup(cred->password);
}
+static int try_auth_method(struct imap_server_conf *srvc,
+ struct imap_store *ctx,
+ struct imap *imap,
+ const char *auth_method,
+ enum CAPABILITY cap,
+ int (*fn)(struct imap_store *, const char *))
+{
+ struct imap_cmd_cb cb = {0};
+
+ if (!CAP(cap)) {
+ fprintf(stderr, "You specified "
+ "%s as authentication method, "
+ "but %s doesn't support it.\n",
+ auth_method, srvc->host);
+ return -1;
+ }
+ cb.cont = fn;
+
+ if (NOT_CONSTANT(!cb.cont)) {
+ fprintf(stderr, "If you want to use %s authentication mechanism, "
+ "you have to build git-imap-send with OpenSSL library.",
+ auth_method);
+ return -1;
+ }
+ if (imap_exec(ctx, &cb, "AUTHENTICATE %s", auth_method) != RESP_OK) {
+ fprintf(stderr, "IMAP error: AUTHENTICATE %s failed\n",
+ auth_method);
+ return -1;
+ }
+ return 0;
+}
+
static struct imap_store *imap_open_store(struct imap_server_conf *srvc, const char *folder)
{
struct credential cred = CREDENTIAL_INIT;
@@ -964,7 +1160,7 @@ static struct imap_store *imap_open_store(struct imap_server_conf *srvc, const c
imap->buf.sock.fd[0] = tunnel.out;
imap->buf.sock.fd[1] = tunnel.in;
- imap_info("ok\n");
+ imap_info("OK\n");
} else {
#ifndef NO_IPV6
struct addrinfo hints, *ai0, *ai;
@@ -983,7 +1179,7 @@ static struct imap_store *imap_open_store(struct imap_server_conf *srvc, const c
fprintf(stderr, "getaddrinfo: %s\n", gai_strerror(gai));
goto bail;
}
- imap_info("ok\n");
+ imap_info("OK\n");
for (ai0 = ai; ai; ai = ai->ai_next) {
char addr[NI_MAXHOST];
@@ -1021,7 +1217,7 @@ static struct imap_store *imap_open_store(struct imap_server_conf *srvc, const c
perror("gethostbyname");
goto bail;
}
- imap_info("ok\n");
+ imap_info("OK\n");
addr.sin_addr.s_addr = *((int *) he->h_addr_list[0]);
@@ -1035,7 +1231,7 @@ static struct imap_store *imap_open_store(struct imap_server_conf *srvc, const c
}
#endif
if (s < 0) {
- fputs("Error: unable to connect to server.\n", stderr);
+ fputs("error: unable to connect to server\n", stderr);
goto bail;
}
@@ -1047,7 +1243,7 @@ static struct imap_store *imap_open_store(struct imap_server_conf *srvc, const c
close(s);
goto bail;
}
- imap_info("ok\n");
+ imap_info("OK\n");
}
/* read the greeting string */
@@ -1087,30 +1283,25 @@ static struct imap_store *imap_open_store(struct imap_server_conf *srvc, const c
server_fill_credential(srvc, &cred);
if (srvc->auth_method) {
- struct imap_cmd_cb cb;
-
- if (!strcmp(srvc->auth_method, "CRAM-MD5")) {
- if (!CAP(AUTH_CRAM_MD5)) {
- fprintf(stderr, "You specified "
- "CRAM-MD5 as authentication method, "
- "but %s doesn't support it.\n", srvc->host);
+ if (!strcmp(srvc->auth_method, "PLAIN")) {
+ if (try_auth_method(srvc, ctx, imap, "PLAIN", AUTH_PLAIN, auth_plain))
goto bail;
- }
- /* CRAM-MD5 */
-
- memset(&cb, 0, sizeof(cb));
- cb.cont = auth_cram_md5;
- if (imap_exec(ctx, &cb, "AUTHENTICATE CRAM-MD5") != RESP_OK) {
- fprintf(stderr, "IMAP error: AUTHENTICATE CRAM-MD5 failed\n");
+ } else if (!strcmp(srvc->auth_method, "CRAM-MD5")) {
+ if (try_auth_method(srvc, ctx, imap, "CRAM-MD5", AUTH_CRAM_MD5, auth_cram_md5))
+ goto bail;
+ } else if (!strcmp(srvc->auth_method, "OAUTHBEARER")) {
+ if (try_auth_method(srvc, ctx, imap, "OAUTHBEARER", AUTH_OAUTHBEARER, auth_oauthbearer))
+ goto bail;
+ } else if (!strcmp(srvc->auth_method, "XOAUTH2")) {
+ if (try_auth_method(srvc, ctx, imap, "XOAUTH2", AUTH_XOAUTH2, auth_xoauth2))
goto bail;
- }
} else {
- fprintf(stderr, "Unknown authentication method:%s\n", srvc->host);
+ fprintf(stderr, "unknown authentication mechanism: %s\n", srvc->auth_method);
goto bail;
}
} else {
if (CAP(NOLOGIN)) {
- fprintf(stderr, "Skipping account %s@%s, server forbids LOGIN\n",
+ fprintf(stderr, "skipping account %s@%s, server forbids LOGIN\n",
srvc->user, srvc->host);
goto bail;
}
@@ -1316,16 +1507,16 @@ static int git_imap_config(const char *var, const char *val,
FREE_AND_NULL(cfg->folder);
return git_config_string(&cfg->folder, var, val);
} else if (!strcmp("imap.user", var)) {
- FREE_AND_NULL(cfg->folder);
+ FREE_AND_NULL(cfg->user);
return git_config_string(&cfg->user, var, val);
} else if (!strcmp("imap.pass", var)) {
- FREE_AND_NULL(cfg->folder);
+ FREE_AND_NULL(cfg->pass);
return git_config_string(&cfg->pass, var, val);
} else if (!strcmp("imap.tunnel", var)) {
- FREE_AND_NULL(cfg->folder);
+ FREE_AND_NULL(cfg->tunnel);
return git_config_string(&cfg->tunnel, var, val);
} else if (!strcmp("imap.authmethod", var)) {
- FREE_AND_NULL(cfg->folder);
+ FREE_AND_NULL(cfg->auth_method);
return git_config_string(&cfg->auth_method, var, val);
} else if (!strcmp("imap.port", var)) {
cfg->port = git_config_int(var, val, ctx->kvi);
@@ -1366,7 +1557,8 @@ static int append_msgs_to_imap(struct imap_server_conf *server,
}
ctx->name = server->folder;
- fprintf(stderr, "sending %d message%s\n", total, (total != 1) ? "s" : "");
+ fprintf(stderr, "Sending %d message%s to %s folder...\n",
+ total, (total != 1) ? "s" : "", server->folder);
while (1) {
unsigned percent = n * 100 / total;
@@ -1388,6 +1580,26 @@ static int append_msgs_to_imap(struct imap_server_conf *server,
return 0;
}
+static int list_imap_folders(struct imap_server_conf *server)
+{
+ struct imap_store *ctx = imap_open_store(server, "INBOX");
+ if (!ctx) {
+ fprintf(stderr, "failed to connect to IMAP server\n");
+ return 1;
+ }
+
+ fprintf(stderr, "Fetching the list of available folders...\n");
+ /* Issue the LIST command and print the results */
+ if (imap_exec(ctx, NULL, "LIST \"\" \"*\"") != RESP_OK) {
+ fprintf(stderr, "failed to list folders\n");
+ imap_close_store(ctx);
+ return 1;
+ }
+
+ imap_close_store(ctx);
+ return 0;
+}
+
#ifdef USE_CURL_FOR_IMAP_SEND
static CURL *setup_curl(struct imap_server_conf *srvc, struct credential *cred)
{
@@ -1405,29 +1617,51 @@ static CURL *setup_curl(struct imap_server_conf *srvc, struct credential *cred)
server_fill_credential(srvc, cred);
curl_easy_setopt(curl, CURLOPT_USERNAME, srvc->user);
- curl_easy_setopt(curl, CURLOPT_PASSWORD, srvc->pass);
+
+ /*
+ * Use CURLOPT_PASSWORD irrespective of whether there is
+ * an auth method specified or not, unless it's OAuth2.0,
+ * where we use CURLOPT_XOAUTH2_BEARER.
+ */
+ if (!srvc->auth_method ||
+ (strcmp(srvc->auth_method, "XOAUTH2") &&
+ strcmp(srvc->auth_method, "OAUTHBEARER")))
+ curl_easy_setopt(curl, CURLOPT_PASSWORD, srvc->pass);
strbuf_addstr(&path, srvc->use_ssl ? "imaps://" : "imap://");
strbuf_addstr(&path, srvc->host);
if (!path.len || path.buf[path.len - 1] != '/')
strbuf_addch(&path, '/');
- uri_encoded_folder = curl_easy_escape(curl, srvc->folder, 0);
- if (!uri_encoded_folder)
- die("failed to encode server folder");
- strbuf_addstr(&path, uri_encoded_folder);
- curl_free(uri_encoded_folder);
+ if (!list_folders) {
+ uri_encoded_folder = curl_easy_escape(curl, srvc->folder, 0);
+ if (!uri_encoded_folder)
+ die("failed to encode server folder");
+ strbuf_addstr(&path, uri_encoded_folder);
+ curl_free(uri_encoded_folder);
+ }
curl_easy_setopt(curl, CURLOPT_URL, path.buf);
strbuf_release(&path);
curl_easy_setopt(curl, CURLOPT_PORT, (long)srvc->port);
if (srvc->auth_method) {
- struct strbuf auth = STRBUF_INIT;
- strbuf_addstr(&auth, "AUTH=");
- strbuf_addstr(&auth, srvc->auth_method);
- curl_easy_setopt(curl, CURLOPT_LOGIN_OPTIONS, auth.buf);
- strbuf_release(&auth);
+ if (!strcmp(srvc->auth_method, "XOAUTH2") ||
+ !strcmp(srvc->auth_method, "OAUTHBEARER")) {
+
+ /*
+ * While CURLOPT_XOAUTH2_BEARER looks as if it only supports XOAUTH2,
+ * upon debugging, it has been found that it is capable of detecting
+ * the best option out of OAUTHBEARER and XOAUTH2.
+ */
+ curl_easy_setopt(curl, CURLOPT_XOAUTH2_BEARER, srvc->pass);
+ } else {
+ struct strbuf auth = STRBUF_INIT;
+ strbuf_addstr(&auth, "AUTH=");
+ strbuf_addstr(&auth, srvc->auth_method);
+ curl_easy_setopt(curl, CURLOPT_LOGIN_OPTIONS, auth.buf);
+ strbuf_release(&auth);
+ }
}
if (!srvc->use_ssl)
@@ -1436,10 +1670,6 @@ static CURL *setup_curl(struct imap_server_conf *srvc, struct credential *cred)
curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, (long)srvc->ssl_verify);
curl_easy_setopt(curl, CURLOPT_SSL_VERIFYHOST, (long)srvc->ssl_verify);
- curl_easy_setopt(curl, CURLOPT_READFUNCTION, fread_buffer);
-
- curl_easy_setopt(curl, CURLOPT_UPLOAD, 1L);
-
if (0 < verbosity || getenv("GIT_CURL_VERBOSE"))
http_trace_curl_no_data();
setup_curl_trace(curl);
@@ -1458,9 +1688,14 @@ static int curl_append_msgs_to_imap(struct imap_server_conf *server,
struct credential cred = CREDENTIAL_INIT;
curl = setup_curl(server, &cred);
+
+ curl_easy_setopt(curl, CURLOPT_READFUNCTION, fread_buffer);
+ curl_easy_setopt(curl, CURLOPT_UPLOAD, 1L);
+
curl_easy_setopt(curl, CURLOPT_READDATA, &msgbuf);
- fprintf(stderr, "sending %d message%s\n", total, (total != 1) ? "s" : "");
+ fprintf(stderr, "Sending %d message%s to %s folder...\n",
+ total, (total != 1) ? "s" : "", server->folder);
while (1) {
unsigned percent = n * 100 / total;
int prev_len;
@@ -1503,6 +1738,31 @@ static int curl_append_msgs_to_imap(struct imap_server_conf *server,
return res != CURLE_OK;
}
+
+static int curl_list_imap_folders(struct imap_server_conf *server)
+{
+ CURL *curl;
+ CURLcode res = CURLE_OK;
+ struct credential cred = CREDENTIAL_INIT;
+
+ fprintf(stderr, "Fetching the list of available folders...\n");
+ curl = setup_curl(server, &cred);
+ res = curl_easy_perform(curl);
+
+ curl_easy_cleanup(curl);
+ curl_global_cleanup();
+
+ if (cred.username) {
+ if (res == CURLE_OK)
+ credential_approve(the_repository, &cred);
+ else if (res == CURLE_LOGIN_DENIED)
+ credential_reject(the_repository, &cred);
+ }
+
+ credential_clear(&cred);
+
+ return res != CURLE_OK;
+}
#endif
int cmd_main(int argc, const char **argv)
@@ -1520,6 +1780,11 @@ int cmd_main(int argc, const char **argv)
argc = parse_options(argc, (const char **)argv, "", imap_send_options, imap_send_usage, 0);
+ if (opt_folder) {
+ free(server.folder);
+ server.folder = xstrdup(opt_folder);
+ }
+
if (argc)
usage_with_options(imap_send_usage, imap_send_options);
@@ -1538,20 +1803,37 @@ int cmd_main(int argc, const char **argv)
if (!server.port)
server.port = server.use_ssl ? 993 : 143;
- if (!server.folder) {
- fprintf(stderr, "no imap store specified\n");
- ret = 1;
- goto out;
- }
if (!server.host) {
if (!server.tunnel) {
- fprintf(stderr, "no imap host specified\n");
+ error(_("no IMAP host specified"));
+ advise(_("set the IMAP host with 'git config imap.host <host>'.\n"
+ "(e.g., 'git config imap.host imaps://imap.example.com')"));
ret = 1;
goto out;
}
server.host = xstrdup("tunnel");
}
+ if (list_folders) {
+ if (server.tunnel)
+ ret = list_imap_folders(&server);
+#ifdef USE_CURL_FOR_IMAP_SEND
+ else if (use_curl)
+ ret = curl_list_imap_folders(&server);
+#endif
+ else
+ ret = list_imap_folders(&server);
+ goto out;
+ }
+
+ if (!server.folder) {
+ error(_("no IMAP folder specified"));
+ advise(_("set the target folder with 'git config imap.folder <folder>'.\n"
+ "(e.g., 'git config imap.folder Drafts')"));
+ ret = 1;
+ goto out;
+ }
+
/* read the messages */
if (strbuf_read(&all_msgs, 0, 0) < 0) {
error_errno(_("could not read from stdin"));
@@ -1567,7 +1849,7 @@ int cmd_main(int argc, const char **argv)
total = count_messages(&all_msgs);
if (!total) {
- fprintf(stderr, "no messages to send\n");
+ fprintf(stderr, "no messages found to send\n");
ret = 1;
goto out;
}
diff --git a/line-log.c b/line-log.c
index 628e3fe3ae..07f2154e84 100644
--- a/line-log.c
+++ b/line-log.c
@@ -1172,12 +1172,13 @@ static int bloom_filter_check(struct rev_info *rev,
return 0;
while (!result && range) {
- fill_bloom_key(range->path, strlen(range->path), &key, rev->bloom_filter_settings);
+ bloom_key_fill(&key, range->path, strlen(range->path),
+ rev->bloom_filter_settings);
if (bloom_filter_contains(filter, &key, rev->bloom_filter_settings))
result = 1;
- clear_bloom_key(&key);
+ bloom_key_clear(&key);
range = range->next;
}
diff --git a/list-objects-filter.c b/list-objects-filter.c
index 78b397bc19..7ecd4d9ef5 100644
--- a/list-objects-filter.c
+++ b/list-objects-filter.c
@@ -12,7 +12,7 @@
#include "oidmap.h"
#include "oidset.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
/* Remember to update object flag allocation in object.h */
/*
@@ -310,7 +310,7 @@ static enum list_objects_filter_result filter_blobs_limit(
assert(obj->type == OBJ_BLOB);
assert((obj->flags & SEEN) == 0);
- t = oid_object_info(r, &obj->oid, &object_length);
+ t = odb_read_object_info(r->objects, &obj->oid, &object_length);
if (t != OBJ_BLOB) { /* probably OBJ_NONE */
/*
* We DO NOT have the blob locally, so we cannot
diff --git a/list-objects.c b/list-objects.c
index 597114281f..42c17d9573 100644
--- a/list-objects.c
+++ b/list-objects.c
@@ -14,7 +14,7 @@
#include "list-objects-filter.h"
#include "list-objects-filter-options.h"
#include "packfile.h"
-#include "object-store.h"
+#include "odb.h"
#include "trace.h"
#include "environment.h"
@@ -74,8 +74,8 @@ static void process_blob(struct traversal_context *ctx,
* of missing objects.
*/
if (ctx->revs->exclude_promisor_objects &&
- !has_object(the_repository, &obj->oid,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR) &&
+ !odb_has_object(the_repository->objects, &obj->oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR) &&
is_promisor_object(ctx->revs->repo, &obj->oid))
return;
diff --git a/log-tree.c b/log-tree.c
index 1d05dc1c70..233bf9f227 100644
--- a/log-tree.c
+++ b/log-tree.c
@@ -176,7 +176,7 @@ static int add_ref_decoration(const char *refname, const char *referent UNUSED,
return 0;
}
- objtype = oid_object_info(the_repository, oid, NULL);
+ objtype = odb_read_object_info(the_repository->objects, oid, NULL);
if (objtype < 0)
return 0;
obj = lookup_object_by_type(the_repository, oid, objtype);
diff --git a/loose.c b/loose.c
index bb602aaa36..519f5db793 100644
--- a/loose.c
+++ b/loose.c
@@ -1,7 +1,7 @@
#include "git-compat-util.h"
#include "hash.h"
#include "path.h"
-#include "object-store.h"
+#include "odb.h"
#include "hex.h"
#include "repository.h"
#include "wrapper.h"
@@ -44,36 +44,36 @@ static int insert_oid_pair(kh_oid_map_t *map, const struct object_id *key, const
return 1;
}
-static int insert_loose_map(struct object_directory *odb,
+static int insert_loose_map(struct odb_source *source,
const struct object_id *oid,
const struct object_id *compat_oid)
{
- struct loose_object_map *map = odb->loose_map;
+ struct loose_object_map *map = source->loose_map;
int inserted = 0;
inserted |= insert_oid_pair(map->to_compat, oid, compat_oid);
inserted |= insert_oid_pair(map->to_storage, compat_oid, oid);
if (inserted)
- oidtree_insert(odb->loose_objects_cache, compat_oid);
+ oidtree_insert(source->loose_objects_cache, compat_oid);
return inserted;
}
-static int load_one_loose_object_map(struct repository *repo, struct object_directory *dir)
+static int load_one_loose_object_map(struct repository *repo, struct odb_source *source)
{
struct strbuf buf = STRBUF_INIT, path = STRBUF_INIT;
FILE *fp;
- if (!dir->loose_map)
- loose_object_map_init(&dir->loose_map);
- if (!dir->loose_objects_cache) {
- ALLOC_ARRAY(dir->loose_objects_cache, 1);
- oidtree_init(dir->loose_objects_cache);
+ if (!source->loose_map)
+ loose_object_map_init(&source->loose_map);
+ if (!source->loose_objects_cache) {
+ ALLOC_ARRAY(source->loose_objects_cache, 1);
+ oidtree_init(source->loose_objects_cache);
}
- insert_loose_map(dir, repo->hash_algo->empty_tree, repo->compat_hash_algo->empty_tree);
- insert_loose_map(dir, repo->hash_algo->empty_blob, repo->compat_hash_algo->empty_blob);
- insert_loose_map(dir, repo->hash_algo->null_oid, repo->compat_hash_algo->null_oid);
+ insert_loose_map(source, repo->hash_algo->empty_tree, repo->compat_hash_algo->empty_tree);
+ insert_loose_map(source, repo->hash_algo->empty_blob, repo->compat_hash_algo->empty_blob);
+ insert_loose_map(source, repo->hash_algo->null_oid, repo->compat_hash_algo->null_oid);
repo_common_path_replace(repo, &path, "objects/loose-object-idx");
fp = fopen(path.buf, "rb");
@@ -93,7 +93,7 @@ static int load_one_loose_object_map(struct repository *repo, struct object_dire
parse_oid_hex_algop(p, &compat_oid, &p, repo->compat_hash_algo) ||
p != buf.buf + buf.len)
goto err;
- insert_loose_map(dir, &oid, &compat_oid);
+ insert_loose_map(source, &oid, &compat_oid);
}
strbuf_release(&buf);
@@ -107,15 +107,15 @@ err:
int repo_read_loose_object_map(struct repository *repo)
{
- struct object_directory *dir;
+ struct odb_source *source;
if (!should_use_loose_object_map(repo))
return 0;
- prepare_alt_odb(repo);
+ odb_prepare_alternates(repo->objects);
- for (dir = repo->objects->odb; dir; dir = dir->next) {
- if (load_one_loose_object_map(repo, dir) < 0) {
+ for (source = repo->objects->sources; source; source = source->next) {
+ if (load_one_loose_object_map(repo, source) < 0) {
return -1;
}
}
@@ -124,7 +124,7 @@ int repo_read_loose_object_map(struct repository *repo)
int repo_write_loose_object_map(struct repository *repo)
{
- kh_oid_map_t *map = repo->objects->odb->loose_map->to_compat;
+ kh_oid_map_t *map = repo->objects->sources->loose_map->to_compat;
struct lock_file lock;
int fd;
khiter_t iter;
@@ -212,7 +212,7 @@ int repo_add_loose_object_map(struct repository *repo, const struct object_id *o
if (!should_use_loose_object_map(repo))
return 0;
- inserted = insert_loose_map(repo->objects->odb, oid, compat_oid);
+ inserted = insert_loose_map(repo->objects->sources, oid, compat_oid);
if (inserted)
return write_one_object(repo, oid, compat_oid);
return 0;
@@ -223,12 +223,12 @@ int repo_loose_object_map_oid(struct repository *repo,
const struct git_hash_algo *to,
struct object_id *dest)
{
- struct object_directory *dir;
+ struct odb_source *source;
kh_oid_map_t *map;
khiter_t pos;
- for (dir = repo->objects->odb; dir; dir = dir->next) {
- struct loose_object_map *loose_map = dir->loose_map;
+ for (source = repo->objects->sources; source; source = source->next) {
+ struct loose_object_map *loose_map = source->loose_map;
if (!loose_map)
continue;
map = (to == repo->compat_hash_algo) ?
diff --git a/mailinfo.c b/mailinfo.c
index ee4597da6b..b4e815b2d8 100644
--- a/mailinfo.c
+++ b/mailinfo.c
@@ -266,6 +266,8 @@ static void handle_content_type(struct mailinfo *mi, struct strbuf *line)
error("Too many boundaries to handle");
mi->input_error = -1;
mi->content_top = &mi->content[MAX_BOUNDARIES] - 1;
+ strbuf_release(boundary);
+ free(boundary);
return;
}
*(mi->content_top) = boundary;
diff --git a/mailmap.c b/mailmap.c
index 9e2642a043..56c72102d9 100644
--- a/mailmap.c
+++ b/mailmap.c
@@ -6,7 +6,7 @@
#include "string-list.h"
#include "mailmap.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "setup.h"
char *git_mailmap_file;
@@ -196,7 +196,7 @@ int read_mailmap_blob(struct string_list *map, const char *name)
if (repo_get_oid(the_repository, name, &oid) < 0)
return 0;
- buf = repo_read_object_file(the_repository, &oid, &type, &size);
+ buf = odb_read_object(the_repository->objects, &oid, &type, &size);
if (!buf)
return error("unable to read mailmap object at %s", name);
if (type != OBJ_BLOB) {
diff --git a/match-trees.c b/match-trees.c
index 72922d5d64..5a8a5c39b0 100644
--- a/match-trees.c
+++ b/match-trees.c
@@ -7,7 +7,7 @@
#include "tree.h"
#include "tree-walk.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "repository.h"
static int score_missing(unsigned mode)
@@ -63,7 +63,7 @@ static void *fill_tree_desc_strict(struct repository *r,
enum object_type type;
unsigned long size;
- buffer = repo_read_object_file(r, hash, &type, &size);
+ buffer = odb_read_object(r->objects, hash, &type, &size);
if (!buffer)
die("unable to read tree (%s)", oid_to_hex(hash));
if (type != OBJ_TREE)
@@ -199,7 +199,7 @@ static int splice_tree(struct repository *r,
if (*subpath)
subpath++;
- buf = repo_read_object_file(r, oid1, &type, &sz);
+ buf = odb_read_object(r->objects, oid1, &type, &sz);
if (!buf)
die("cannot read tree %s", oid_to_hex(oid1));
init_tree_desc(&desc, oid1, buf, sz);
diff --git a/merge-blobs.c b/merge-blobs.c
index 53f36dbc17..6fc2799417 100644
--- a/merge-blobs.c
+++ b/merge-blobs.c
@@ -4,7 +4,7 @@
#include "merge-ll.h"
#include "blob.h"
#include "merge-blobs.h"
-#include "object-store.h"
+#include "odb.h"
static int fill_mmfile_blob(mmfile_t *f, struct blob *obj)
{
@@ -12,8 +12,8 @@ static int fill_mmfile_blob(mmfile_t *f, struct blob *obj)
unsigned long size;
enum object_type type;
- buf = repo_read_object_file(the_repository, &obj->object.oid, &type,
- &size);
+ buf = odb_read_object(the_repository->objects, &obj->object.oid,
+ &type, &size);
if (!buf)
return -1;
if (type != OBJ_BLOB) {
@@ -79,8 +79,8 @@ void *merge_blobs(struct index_state *istate, const char *path,
return NULL;
if (!our)
our = their;
- return repo_read_object_file(the_repository, &our->object.oid,
- &type, size);
+ return odb_read_object(the_repository->objects, &our->object.oid,
+ &type, size);
}
if (fill_mmfile_blob(&f1, our) < 0)
diff --git a/merge-ort.c b/merge-ort.c
index 47b3d1730e..473ff61e36 100644
--- a/merge-ort.c
+++ b/merge-ort.c
@@ -39,7 +39,7 @@
#include "mem-pool.h"
#include "object-file.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "oid-array.h"
#include "path.h"
#include "promisor-remote.h"
@@ -3629,7 +3629,7 @@ static int read_oid_strbuf(struct merge_options *opt,
void *buf;
enum object_type type;
unsigned long size;
- buf = repo_read_object_file(the_repository, oid, &type, &size);
+ buf = odb_read_object(the_repository->objects, oid, &type, &size);
if (!buf) {
path_msg(opt, ERROR_OBJECT_READ_FAILED, 0,
path, NULL, NULL, NULL,
@@ -4385,8 +4385,8 @@ static void prefetch_for_content_merges(struct merge_options *opt,
if ((ci->filemask & side_mask) &&
S_ISREG(vi->mode) &&
- oid_object_info_extended(opt->repo, &vi->oid, NULL,
- OBJECT_INFO_FOR_PREFETCH))
+ odb_read_object_info_extended(opt->repo->objects, &vi->oid, NULL,
+ OBJECT_INFO_FOR_PREFETCH))
oid_array_append(&to_fetch, &vi->oid);
}
}
diff --git a/mergetools/vimdiff b/mergetools/vimdiff
index 78710858e8..fca1044f65 100644
--- a/mergetools/vimdiff
+++ b/mergetools/vimdiff
@@ -274,8 +274,8 @@ gen_cmd () {
# definition.
#
# The syntax of the "layout definitions" is explained in "Documentation/
- # mergetools/vimdiff.txt" but you can already intuitively understand how
- # it works by knowing that...
+ # mergetools/vimdiff.adoc" but you can already intuitively understand
+ # how it works by knowing that...
#
# * "+" means "a new vim tab"
# * "/" means "a new vim horizontal split"
diff --git a/meson.build b/meson.build
index 5225efb4a6..c043a79dca 100644
--- a/meson.build
+++ b/meson.build
@@ -245,7 +245,7 @@ time = find_program('time', dirs: program_path, required: get_option('benchmarks
# "/bin/sh" over a PATH-based lookup, which provides a working shell on most
# supported systems. This path is also the default shell path used by our
# Makefile. This lookup can be overridden via `program_path`.
-target_shell = find_program('sh', dirs: program_path + [ '/bin' ], native: false)
+target_shell = find_program('/bin/sh', 'sh', dirs: program_path, native: false)
# Sanity-check that programs required for the build exist.
foreach tool : ['cat', 'cut', 'grep', 'sort', 'tr', 'uname']
@@ -396,8 +396,8 @@ libgit_sources = [
'object-file-convert.c',
'object-file.c',
'object-name.c',
- 'object-store.c',
'object.c',
+ 'odb.c',
'oid-array.c',
'oidmap.c',
'oidset.c',
@@ -866,9 +866,11 @@ if host_machine.system() == 'cygwin' or host_machine.system() == 'windows'
endif
build_options_config.set_quoted('X', executable_suffix)
-python = import('python').find_installation('python3', required: get_option('python'))
-target_python = find_program('python3', native: false, required: python.found())
-if python.found()
+# Python is not used for our build system, but exclusively for git-p4.
+# Consequently we only need to determine whether Python is available for the
+# build target.
+target_python = find_program('python3', native: false, required: get_option('python'))
+if target_python.found()
build_options_config.set('NO_PYTHON', '')
else
libgit_c_args += '-DNO_PYTHON'
@@ -1357,10 +1359,6 @@ if host_machine.system() != 'windows'
endif
endif
-if compiler.has_member('struct sysinfo', 'totalram', prefix: '#include <sys/sysinfo.h>')
- libgit_c_args += '-DHAVE_SYSINFO'
-endif
-
if compiler.has_member('struct stat', 'st_mtimespec.tv_nsec', prefix: '#include <sys/stat.h>')
libgit_c_args += '-DUSE_ST_TIMESPEC'
elif not compiler.has_member('struct stat', 'st_mtim.tv_nsec', prefix: '#include <sys/stat.h>')
@@ -1446,17 +1444,6 @@ if compiler.compiles('''
libgit_c_args += '-DHAVE_CLOCK_MONOTONIC'
endif
-if not compiler.compiles('''
- #include <inttypes.h>
-
- void func(void)
- {
- uintmax_t x = 0;
- }
-''', name: 'uintmax_t')
- libgit_c_args += '-DNO_UINTMAX_T'
-endif
-
has_bsd_sysctl = false
if compiler.has_header('sys/sysctl.h')
if compiler.compiles('''
@@ -1475,6 +1462,12 @@ if compiler.has_header('sys/sysctl.h')
endif
endif
+if not has_bsd_sysctl
+ if compiler.has_member('struct sysinfo', 'totalram', prefix: '#include <sys/sysinfo.h>')
+ libgit_c_args += '-DHAVE_SYSINFO'
+ endif
+endif
+
if not meson.is_cross_build() and compiler.run('''
#include <stdio.h>
@@ -1770,7 +1763,7 @@ git_builtin = executable('git',
sources: builtin_sources + 'git.c',
dependencies: [libgit_commonmain],
install: true,
- install_dir: get_option('libexecdir') / 'git-core',
+ install_dir: git_exec_path,
)
bin_wrappers += git_builtin
@@ -1778,35 +1771,35 @@ test_dependencies += executable('git-daemon',
sources: 'daemon.c',
dependencies: [libgit_commonmain],
install: true,
- install_dir: get_option('libexecdir') / 'git-core',
+ install_dir: git_exec_path,
)
test_dependencies += executable('git-sh-i18n--envsubst',
sources: 'sh-i18n--envsubst.c',
dependencies: [libgit_commonmain],
install: true,
- install_dir: get_option('libexecdir') / 'git-core',
+ install_dir: git_exec_path,
)
bin_wrappers += executable('git-shell',
sources: 'shell.c',
dependencies: [libgit_commonmain],
install: true,
- install_dir: get_option('libexecdir') / 'git-core',
+ install_dir: git_exec_path,
)
test_dependencies += executable('git-http-backend',
sources: 'http-backend.c',
dependencies: [libgit_commonmain],
install: true,
- install_dir: get_option('libexecdir') / 'git-core',
+ install_dir: git_exec_path,
)
bin_wrappers += executable('scalar',
sources: 'scalar.c',
dependencies: [libgit_commonmain],
install: true,
- install_dir: get_option('libexecdir') / 'git-core',
+ install_dir: git_exec_path,
)
if curl.found()
@@ -1822,14 +1815,14 @@ if curl.found()
sources: 'remote-curl.c',
dependencies: [libgit_curl],
install: true,
- install_dir: get_option('libexecdir') / 'git-core',
+ install_dir: git_exec_path,
)
test_dependencies += executable('git-http-fetch',
sources: 'http-fetch.c',
dependencies: [libgit_curl],
install: true,
- install_dir: get_option('libexecdir') / 'git-core',
+ install_dir: git_exec_path,
)
if expat.found()
@@ -1837,7 +1830,7 @@ if curl.found()
sources: 'http-push.c',
dependencies: [libgit_curl],
install: true,
- install_dir: get_option('libexecdir') / 'git-core',
+ install_dir: git_exec_path,
)
endif
@@ -1848,7 +1841,7 @@ if curl.found()
)
install_symlink(alias + executable_suffix,
- install_dir: get_option('libexecdir') / 'git-core',
+ install_dir: git_exec_path,
pointing_to: 'git-remote-http',
)
endforeach
@@ -1858,7 +1851,7 @@ test_dependencies += executable('git-imap-send',
sources: 'imap-send.c',
dependencies: [ use_curl_for_imap_send ? libgit_curl : libgit_commonmain ],
install: true,
- install_dir: get_option('libexecdir') / 'git-core',
+ install_dir: git_exec_path,
)
foreach alias : [ 'git-receive-pack', 'git-upload-archive', 'git-upload-pack' ]
@@ -1868,7 +1861,7 @@ foreach alias : [ 'git-receive-pack', 'git-upload-archive', 'git-upload-pack' ]
)
install_symlink(alias + executable_suffix,
- install_dir: get_option('libexecdir') / 'git-core',
+ install_dir: git_exec_path,
pointing_to: 'git',
)
endforeach
@@ -1882,9 +1875,9 @@ foreach symlink : [
'scalar',
]
if meson.version().version_compare('>=1.3.0')
- pointing_to = fs.relative_to(get_option('libexecdir') / 'git-core' / symlink, get_option('bindir'))
+ pointing_to = fs.relative_to(git_exec_path / symlink, get_option('bindir'))
else
- pointing_to = '../libexec/git-core' / symlink
+ pointing_to = '..' / git_exec_path / symlink
endif
install_symlink(symlink,
@@ -1924,7 +1917,7 @@ foreach script : scripts_sh
meson.project_build_root() / 'GIT-BUILD-OPTIONS',
],
install: true,
- install_dir: get_option('libexecdir') / 'git-core',
+ install_dir: git_exec_path,
)
endforeach
@@ -1957,7 +1950,7 @@ if perl_features_enabled
input: perl_header_template,
output: 'GIT-PERL-HEADER',
configuration: {
- 'GITEXECDIR_REL': get_option('libexecdir') / 'git-core',
+ 'GITEXECDIR_REL': git_exec_path,
'PERLLIBDIR_REL': perllibdir,
'LOCALEDIR_REL': get_option('datadir') / 'locale',
'INSTLIBDIR': perllibdir,
@@ -1981,7 +1974,7 @@ if perl_features_enabled
output: fs.stem(script),
command: generate_perl_command,
install: true,
- install_dir: get_option('libexecdir') / 'git-core',
+ install_dir: git_exec_path,
depends: [git_version_file],
)
test_dependencies += generated_script
@@ -1990,9 +1983,9 @@ if perl_features_enabled
bin_wrappers += generated_script
if meson.version().version_compare('>=1.3.0')
- pointing_to = fs.relative_to(get_option('libexecdir') / 'git-core' / fs.stem(script), get_option('bindir'))
+ pointing_to = fs.relative_to(git_exec_path / fs.stem(script), get_option('bindir'))
else
- pointing_to = '../libexec/git-core' / fs.stem(script)
+ pointing_to = '..' / git_exec_path / fs.stem(script)
endif
install_symlink(fs.stem(script),
@@ -2005,7 +1998,7 @@ if perl_features_enabled
subdir('perl')
endif
-if python.found()
+if target_python.found()
scripts_python = [
'git-p4.py'
]
@@ -2022,7 +2015,7 @@ if python.found()
'@OUTPUT@',
],
install: true,
- install_dir: get_option('libexecdir') / 'git-core',
+ install_dir: git_exec_path,
)
test_dependencies += generated_python
endforeach
@@ -2056,7 +2049,7 @@ mergetools = [
]
foreach mergetool : mergetools
- install_data(mergetool, install_dir: get_option('libexecdir') / 'git-core' / 'mergetools')
+ install_data(mergetool, install_dir: git_exec_path / 'mergetools')
endforeach
if intl.found()
@@ -2080,6 +2073,18 @@ subdir('templates')
# can properly set up test dependencies. The bin-wrappers themselves are set up
# at configuration time, so these are fine.
if get_option('tests')
+ test_kwargs = {
+ 'timeout': 0,
+ }
+
+ # The TAP protocol was already understood by previous versions of Meson, but
+ # it was incompatible with the `meson test --interactive` flag.
+ if meson.version().version_compare('>=1.8.0')
+ test_kwargs += {
+ 'protocol': 'tap',
+ }
+ endif
+
subdir('t')
endif
@@ -2158,6 +2163,18 @@ if headers_to_check.length() != 0 and compiler.get_argument_syntax() == 'gcc'
alias_target('check-headers', hdr_check)
endif
+git_clang_format = find_program('git-clang-format', required: false, native: true)
+if git_clang_format.found()
+ run_target('style',
+ command: [
+ git_clang_format,
+ '--style', 'file',
+ '--diff',
+ '--extensions', 'c,h'
+ ]
+ )
+endif
+
foreach key, value : {
'DIFF': diff.full_path(),
'GIT_SOURCE_DIR': meson.project_source_root(),
@@ -2208,16 +2225,15 @@ meson.add_dist_script(
summary({
'benchmarks': get_option('tests') and perl.found() and time.found(),
- 'curl': curl.found(),
- 'expat': expat.found(),
- 'gettext': intl.found(),
+ 'curl': curl,
+ 'expat': expat,
+ 'gettext': intl,
'gitweb': gitweb_option.allowed(),
- 'https': https_backend,
- 'iconv': iconv.found(),
- 'pcre2': pcre2.found(),
+ 'iconv': iconv,
+ 'pcre2': pcre2,
'perl': perl_features_enabled,
- 'python': python.found(),
-}, section: 'Auto-detected features')
+ 'python': target_python.found(),
+}, section: 'Auto-detected features', bool_yn: true)
summary({
'csprng': csprng_backend,
diff --git a/midx-write.c b/midx-write.c
index ba4a94950a..f2cfb85476 100644
--- a/midx-write.c
+++ b/midx-write.c
@@ -922,7 +922,7 @@ static struct multi_pack_index *lookup_multi_pack_index(struct repository *r,
struct strbuf cur_path_real = STRBUF_INIT;
/* Ensure the given object_dir is local, or a known alternate. */
- find_odb(r, obj_dir_real);
+ odb_find_source(r->objects, obj_dir_real);
for (cur = get_multi_pack_index(r); cur; cur = cur->next) {
strbuf_realpath(&cur_path_real, cur->object_dir, 1);
diff --git a/midx.c b/midx.c
index cd6e766ce2..3c5bc82173 100644
--- a/midx.c
+++ b/midx.c
@@ -832,7 +832,7 @@ void clear_midx_file(struct repository *r)
{
struct strbuf midx = STRBUF_INIT;
- get_midx_filename(r->hash_algo, &midx, r->objects->odb->path);
+ get_midx_filename(r->hash_algo, &midx, r->objects->sources->path);
if (r->objects && r->objects->multi_pack_index) {
close_midx(r->objects->multi_pack_index);
@@ -842,8 +842,8 @@ void clear_midx_file(struct repository *r)
if (remove_path(midx.buf))
die(_("failed to clear multi-pack-index at %s"), midx.buf);
- clear_midx_files_ext(r->objects->odb->path, MIDX_EXT_BITMAP, NULL);
- clear_midx_files_ext(r->objects->odb->path, MIDX_EXT_REV, NULL);
+ clear_midx_files_ext(r->objects->sources->path, MIDX_EXT_BITMAP, NULL);
+ clear_midx_files_ext(r->objects->sources->path, MIDX_EXT_REV, NULL);
strbuf_release(&midx);
}
diff --git a/notes-cache.c b/notes-cache.c
index 150241b15e..dd56feed6e 100644
--- a/notes-cache.c
+++ b/notes-cache.c
@@ -3,7 +3,7 @@
#include "git-compat-util.h"
#include "notes-cache.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "pretty.h"
#include "repository.h"
#include "commit.h"
@@ -87,7 +87,7 @@ char *notes_cache_get(struct notes_cache *c, struct object_id *key_oid,
value_oid = get_note(&c->tree, key_oid);
if (!value_oid)
return NULL;
- value = repo_read_object_file(the_repository, value_oid, &type, &size);
+ value = odb_read_object(the_repository->objects, value_oid, &type, &size);
*outsize = size;
return value;
diff --git a/notes-merge.c b/notes-merge.c
index dae8e6a281..586939939f 100644
--- a/notes-merge.c
+++ b/notes-merge.c
@@ -8,7 +8,7 @@
#include "refs.h"
#include "object-file.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "path.h"
#include "repository.h"
#include "diff.h"
@@ -340,7 +340,7 @@ static void write_note_to_worktree(const struct object_id *obj,
{
enum object_type type;
unsigned long size;
- void *buf = repo_read_object_file(the_repository, note, &type, &size);
+ void *buf = odb_read_object(the_repository->objects, note, &type, &size);
if (!buf)
die("cannot read note %s for object %s",
diff --git a/notes.c b/notes.c
index 0a128f1de9..97b995f3f2 100644
--- a/notes.c
+++ b/notes.c
@@ -8,7 +8,7 @@
#include "notes.h"
#include "object-file.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "utf8.h"
#include "strbuf.h"
#include "tree-walk.h"
@@ -794,8 +794,8 @@ static int prune_notes_helper(const struct object_id *object_oid,
struct note_delete_list **l = (struct note_delete_list **) cb_data;
struct note_delete_list *n;
- if (has_object(the_repository, object_oid,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
+ if (odb_has_object(the_repository->objects, object_oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
return 0; /* nothing to do for this note */
/* failed to find object => prune this note */
@@ -816,15 +816,15 @@ int combine_notes_concatenate(struct object_id *cur_oid,
/* read in both note blob objects */
if (!is_null_oid(new_oid))
- new_msg = repo_read_object_file(the_repository, new_oid,
- &new_type, &new_len);
+ new_msg = odb_read_object(the_repository->objects, new_oid,
+ &new_type, &new_len);
if (!new_msg || !new_len || new_type != OBJ_BLOB) {
free(new_msg);
return 0;
}
if (!is_null_oid(cur_oid))
- cur_msg = repo_read_object_file(the_repository, cur_oid,
- &cur_type, &cur_len);
+ cur_msg = odb_read_object(the_repository->objects, cur_oid,
+ &cur_type, &cur_len);
if (!cur_msg || !cur_len || cur_type != OBJ_BLOB) {
free(cur_msg);
free(new_msg);
@@ -880,7 +880,7 @@ static int string_list_add_note_lines(struct string_list *list,
return 0;
/* read_sha1_file NUL-terminates */
- data = repo_read_object_file(the_repository, oid, &t, &len);
+ data = odb_read_object(the_repository->objects, oid, &t, &len);
if (t != OBJ_BLOB || !data || !len) {
free(data);
return t != OBJ_BLOB || !data;
@@ -1290,7 +1290,8 @@ static void format_note(struct notes_tree *t, const struct object_id *object_oid
if (!oid)
return;
- if (!(msg = repo_read_object_file(the_repository, oid, &type, &msglen)) || type != OBJ_BLOB) {
+ if (!(msg = odb_read_object(the_repository->objects, oid, &type, &msglen)) ||
+ type != OBJ_BLOB) {
free(msg);
return;
}
diff --git a/object-file.c b/object-file.c
index 1ac04c2891..3d674d1093 100644
--- a/object-file.c
+++ b/object-file.c
@@ -21,7 +21,7 @@
#include "loose.h"
#include "object-file-convert.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "oidtree.h"
#include "pack.h"
#include "packfile.h"
@@ -55,12 +55,12 @@ static void fill_loose_path(struct strbuf *buf, const struct object_id *oid)
}
}
-const char *odb_loose_path(struct object_directory *odb,
+const char *odb_loose_path(struct odb_source *source,
struct strbuf *buf,
const struct object_id *oid)
{
strbuf_reset(buf);
- strbuf_addstr(buf, odb->path);
+ strbuf_addstr(buf, source->path);
strbuf_addch(buf, '/');
fill_loose_path(buf, oid);
return buf->buf;
@@ -88,27 +88,27 @@ int check_and_freshen_file(const char *fn, int freshen)
return 1;
}
-static int check_and_freshen_odb(struct object_directory *odb,
+static int check_and_freshen_odb(struct odb_source *source,
const struct object_id *oid,
int freshen)
{
static struct strbuf path = STRBUF_INIT;
- odb_loose_path(odb, &path, oid);
+ odb_loose_path(source, &path, oid);
return check_and_freshen_file(path.buf, freshen);
}
static int check_and_freshen_local(const struct object_id *oid, int freshen)
{
- return check_and_freshen_odb(the_repository->objects->odb, oid, freshen);
+ return check_and_freshen_odb(the_repository->objects->sources, oid, freshen);
}
static int check_and_freshen_nonlocal(const struct object_id *oid, int freshen)
{
- struct object_directory *odb;
+ struct odb_source *source;
- prepare_alt_odb(the_repository);
- for (odb = the_repository->objects->odb->next; odb; odb = odb->next) {
- if (check_and_freshen_odb(odb, oid, freshen))
+ odb_prepare_alternates(the_repository->objects);
+ for (source = the_repository->objects->sources->next; source; source = source->next) {
+ if (check_and_freshen_odb(source, oid, freshen))
return 1;
}
return 0;
@@ -202,12 +202,12 @@ int stream_object_signature(struct repository *r, const struct object_id *oid)
static int stat_loose_object(struct repository *r, const struct object_id *oid,
struct stat *st, const char **path)
{
- struct object_directory *odb;
+ struct odb_source *source;
static struct strbuf buf = STRBUF_INIT;
- prepare_alt_odb(r);
- for (odb = r->objects->odb; odb; odb = odb->next) {
- *path = odb_loose_path(odb, &buf, oid);
+ odb_prepare_alternates(r->objects);
+ for (source = r->objects->sources; source; source = source->next) {
+ *path = odb_loose_path(source, &buf, oid);
if (!lstat(*path, st))
return 0;
}
@@ -223,13 +223,13 @@ static int open_loose_object(struct repository *r,
const struct object_id *oid, const char **path)
{
int fd;
- struct object_directory *odb;
+ struct odb_source *source;
int most_interesting_errno = ENOENT;
static struct strbuf buf = STRBUF_INIT;
- prepare_alt_odb(r);
- for (odb = r->objects->odb; odb; odb = odb->next) {
- *path = odb_loose_path(odb, &buf, oid);
+ odb_prepare_alternates(r->objects);
+ for (source = r->objects->sources; source; source = source->next) {
+ *path = odb_loose_path(source, &buf, oid);
fd = git_open(*path);
if (fd >= 0)
return fd;
@@ -244,11 +244,11 @@ static int open_loose_object(struct repository *r,
static int quick_has_loose(struct repository *r,
const struct object_id *oid)
{
- struct object_directory *odb;
+ struct odb_source *source;
- prepare_alt_odb(r);
- for (odb = r->objects->odb; odb; odb = odb->next) {
- if (oidtree_contains(odb_loose_cache(odb, oid), oid))
+ odb_prepare_alternates(r->objects);
+ for (source = r->objects->sources; source; source = source->next) {
+ if (oidtree_contains(odb_loose_cache(source, oid), oid))
return 1;
}
return 0;
@@ -694,7 +694,7 @@ void hash_object_file(const struct git_hash_algo *algo, const void *buf,
/* Finalize a file on disk, and close it. */
static void close_loose_object(int fd, const char *filename)
{
- if (the_repository->objects->odb->will_destroy)
+ if (the_repository->objects->sources->will_destroy)
goto out;
if (batch_fsync_enabled(FSYNC_COMPONENT_LOOSE_OBJECT))
@@ -876,7 +876,7 @@ static int write_loose_object(const struct object_id *oid, char *hdr,
if (batch_fsync_enabled(FSYNC_COMPONENT_LOOSE_OBJECT))
prepare_loose_object_bulk_checkin();
- odb_loose_path(the_repository->objects->odb, &filename, oid);
+ odb_loose_path(the_repository->objects->sources, &filename, oid);
fd = start_loose_object_common(&tmp_file, filename.buf, flags,
&stream, compressed, sizeof(compressed),
@@ -1023,7 +1023,7 @@ int stream_loose_object(struct input_stream *in_stream, size_t len,
goto cleanup;
}
- odb_loose_path(the_repository->objects->odb, &filename, oid);
+ odb_loose_path(the_repository->objects->sources, &filename, oid);
/* We finally know the object path, and create the missing dir. */
dirlen = directory_size(filename.buf);
@@ -1108,7 +1108,7 @@ int force_object_loose(const struct object_id *oid, time_t mtime)
oi.typep = &type;
oi.sizep = &len;
oi.contentp = &buf;
- if (oid_object_info_extended(the_repository, oid, &oi, 0))
+ if (odb_read_object_info_extended(the_repository->objects, oid, &oi, 0))
return error(_("cannot read object for %s"), oid_to_hex(oid));
if (compat) {
if (repo_oid_to_algop(repo, oid, compat, &compat_oid))
@@ -1437,11 +1437,11 @@ int for_each_loose_file_in_objdir(const char *path,
int for_each_loose_object(each_loose_object_fn cb, void *data,
enum for_each_object_flags flags)
{
- struct object_directory *odb;
+ struct odb_source *source;
- prepare_alt_odb(the_repository);
- for (odb = the_repository->objects->odb; odb; odb = odb->next) {
- int r = for_each_loose_file_in_objdir(odb->path, cb, NULL,
+ odb_prepare_alternates(the_repository->objects);
+ for (source = the_repository->objects->sources; source; source = source->next) {
+ int r = for_each_loose_file_in_objdir(source->path, cb, NULL,
NULL, data);
if (r)
return r;
@@ -1461,43 +1461,43 @@ static int append_loose_object(const struct object_id *oid,
return 0;
}
-struct oidtree *odb_loose_cache(struct object_directory *odb,
- const struct object_id *oid)
+struct oidtree *odb_loose_cache(struct odb_source *source,
+ const struct object_id *oid)
{
int subdir_nr = oid->hash[0];
struct strbuf buf = STRBUF_INIT;
- size_t word_bits = bitsizeof(odb->loose_objects_subdir_seen[0]);
+ size_t word_bits = bitsizeof(source->loose_objects_subdir_seen[0]);
size_t word_index = subdir_nr / word_bits;
size_t mask = (size_t)1u << (subdir_nr % word_bits);
uint32_t *bitmap;
if (subdir_nr < 0 ||
- subdir_nr >= bitsizeof(odb->loose_objects_subdir_seen))
+ subdir_nr >= bitsizeof(source->loose_objects_subdir_seen))
BUG("subdir_nr out of range");
- bitmap = &odb->loose_objects_subdir_seen[word_index];
+ bitmap = &source->loose_objects_subdir_seen[word_index];
if (*bitmap & mask)
- return odb->loose_objects_cache;
- if (!odb->loose_objects_cache) {
- ALLOC_ARRAY(odb->loose_objects_cache, 1);
- oidtree_init(odb->loose_objects_cache);
+ return source->loose_objects_cache;
+ if (!source->loose_objects_cache) {
+ ALLOC_ARRAY(source->loose_objects_cache, 1);
+ oidtree_init(source->loose_objects_cache);
}
- strbuf_addstr(&buf, odb->path);
+ strbuf_addstr(&buf, source->path);
for_each_file_in_obj_subdir(subdir_nr, &buf,
append_loose_object,
NULL, NULL,
- odb->loose_objects_cache);
+ source->loose_objects_cache);
*bitmap |= mask;
strbuf_release(&buf);
- return odb->loose_objects_cache;
+ return source->loose_objects_cache;
}
-void odb_clear_loose_cache(struct object_directory *odb)
+void odb_clear_loose_cache(struct odb_source *source)
{
- oidtree_clear(odb->loose_objects_cache);
- FREE_AND_NULL(odb->loose_objects_cache);
- memset(&odb->loose_objects_subdir_seen, 0,
- sizeof(odb->loose_objects_subdir_seen));
+ oidtree_clear(source->loose_objects_cache);
+ FREE_AND_NULL(source->loose_objects_cache);
+ memset(&source->loose_objects_subdir_seen, 0,
+ sizeof(source->loose_objects_subdir_seen));
}
static int check_stream_oid(git_zstream *stream,
diff --git a/object-file.h b/object-file.h
index 6f41142452..67b4ffc480 100644
--- a/object-file.h
+++ b/object-file.h
@@ -3,12 +3,12 @@
#include "git-zlib.h"
#include "object.h"
-#include "object-store.h"
+#include "odb.h"
struct index_state;
/*
- * Set this to 0 to prevent oid_object_info_extended() from fetching missing
+ * Set this to 0 to prevent odb_read_object_info_extended() from fetching missing
* blobs. This has a difference only if extensions.partialClone is set.
*
* Its default value is 1.
@@ -24,23 +24,23 @@ enum {
int index_fd(struct index_state *istate, struct object_id *oid, int fd, struct stat *st, enum object_type type, const char *path, unsigned flags);
int index_path(struct index_state *istate, struct object_id *oid, const char *path, struct stat *st, unsigned flags);
-struct object_directory;
+struct odb_source;
/*
* Populate and return the loose object cache array corresponding to the
* given object ID.
*/
-struct oidtree *odb_loose_cache(struct object_directory *odb,
+struct oidtree *odb_loose_cache(struct odb_source *source,
const struct object_id *oid);
/* Empty the loose object cache for the specified object directory. */
-void odb_clear_loose_cache(struct object_directory *odb);
+void odb_clear_loose_cache(struct odb_source *source);
/*
* Put in `buf` the name of the file in the local object database that
* would be used to store a loose object with the specified oid.
*/
-const char *odb_loose_path(struct object_directory *odb,
+const char *odb_loose_path(struct odb_source *source,
struct strbuf *buf,
const struct object_id *oid);
diff --git a/object-name.c b/object-name.c
index 9288b2dd24..41930609e3 100644
--- a/object-name.c
+++ b/object-name.c
@@ -28,6 +28,7 @@
#include "commit-reach.h"
#include "date.h"
#include "object-file-convert.h"
+#include "prio-queue.h"
static int get_oid_oneline(struct repository *r, const char *, struct object_id *,
const struct commit_list *);
@@ -112,10 +113,10 @@ static enum cb_next match_prefix(const struct object_id *oid, void *arg)
static void find_short_object_filename(struct disambiguate_state *ds)
{
- struct object_directory *odb;
+ struct odb_source *source;
- for (odb = ds->repo->objects->odb; odb && !ds->ambiguous; odb = odb->next)
- oidtree_each(odb_loose_cache(odb, &ds->bin_pfx),
+ for (source = ds->repo->objects->sources; source && !ds->ambiguous; source = source->next)
+ oidtree_each(odb_loose_cache(source, &ds->bin_pfx),
&ds->bin_pfx, ds->len, match_prefix, ds);
}
@@ -251,7 +252,7 @@ static int disambiguate_commit_only(struct repository *r,
const struct object_id *oid,
void *cb_data UNUSED)
{
- int kind = oid_object_info(r, oid, NULL);
+ int kind = odb_read_object_info(r->objects, oid, NULL);
return kind == OBJ_COMMIT;
}
@@ -262,7 +263,7 @@ static int disambiguate_committish_only(struct repository *r,
struct object *obj;
int kind;
- kind = oid_object_info(r, oid, NULL);
+ kind = odb_read_object_info(r->objects, oid, NULL);
if (kind == OBJ_COMMIT)
return 1;
if (kind != OBJ_TAG)
@@ -279,7 +280,7 @@ static int disambiguate_tree_only(struct repository *r,
const struct object_id *oid,
void *cb_data UNUSED)
{
- int kind = oid_object_info(r, oid, NULL);
+ int kind = odb_read_object_info(r->objects, oid, NULL);
return kind == OBJ_TREE;
}
@@ -290,7 +291,7 @@ static int disambiguate_treeish_only(struct repository *r,
struct object *obj;
int kind;
- kind = oid_object_info(r, oid, NULL);
+ kind = odb_read_object_info(r->objects, oid, NULL);
if (kind == OBJ_TREE || kind == OBJ_COMMIT)
return 1;
if (kind != OBJ_TAG)
@@ -307,7 +308,7 @@ static int disambiguate_blob_only(struct repository *r,
const struct object_id *oid,
void *cb_data UNUSED)
{
- int kind = oid_object_info(r, oid, NULL);
+ int kind = odb_read_object_info(r->objects, oid, NULL);
return kind == OBJ_BLOB;
}
@@ -376,7 +377,7 @@ static int init_object_disambiguation(struct repository *r,
ds->hex_pfx[len] = '\0';
ds->repo = r;
ds->bin_pfx.algo = algo ? hash_algo_by_ptr(algo) : GIT_HASH_UNKNOWN;
- prepare_alt_odb(r);
+ odb_prepare_alternates(r->objects);
return 0;
}
@@ -399,7 +400,7 @@ static int show_ambiguous_object(const struct object_id *oid, void *data)
return 0;
hash = repo_find_unique_abbrev(ds->repo, oid, DEFAULT_ABBREV);
- type = oid_object_info(ds->repo, oid, NULL);
+ type = odb_read_object_info(ds->repo->objects, oid, NULL);
if (type < 0) {
/*
@@ -514,8 +515,8 @@ static int sort_ambiguous(const void *va, const void *vb, void *ctx)
{
struct repository *sort_ambiguous_repo = ctx;
const struct object_id *a = va, *b = vb;
- int a_type = oid_object_info(sort_ambiguous_repo, a, NULL);
- int b_type = oid_object_info(sort_ambiguous_repo, b, NULL);
+ int a_type = odb_read_object_info(sort_ambiguous_repo->objects, a, NULL);
+ int b_type = odb_read_object_info(sort_ambiguous_repo->objects, b, NULL);
int a_type_sort;
int b_type_sort;
@@ -1081,13 +1082,17 @@ static int get_oid_basic(struct repository *r, const char *str, int len,
* still fill in the oid with the "old" value,
* which we can use.
*/
- } else {
+ } else if (!(flags & GET_OID_GENTLY)) {
if (flags & GET_OID_QUIETLY) {
exit(128);
}
die(_("log for '%.*s' only has %d entries"),
len, str, co_cnt);
}
+ if (flags & GET_OID_GENTLY) {
+ free(real_ref);
+ return -1;
+ }
}
}
@@ -1457,7 +1462,7 @@ static int get_oid_oneline(struct repository *r,
const char *prefix, struct object_id *oid,
const struct commit_list *list)
{
- struct commit_list *copy = NULL, **copy_tail = &copy;
+ struct prio_queue copy = { compare_commits_by_commit_date };
const struct commit_list *l;
int found = 0;
int negative = 0;
@@ -1479,9 +1484,9 @@ static int get_oid_oneline(struct repository *r,
for (l = list; l; l = l->next) {
l->item->object.flags |= ONELINE_SEEN;
- copy_tail = &commit_list_insert(l->item, copy_tail)->next;
+ prio_queue_put(&copy, l->item);
}
- while (copy) {
+ while (copy.nr) {
const char *p, *buf;
struct commit *commit;
int matches;
@@ -1503,7 +1508,7 @@ static int get_oid_oneline(struct repository *r,
regfree(&regex);
for (l = list; l; l = l->next)
clear_commit_marks(l->item, ONELINE_SEEN);
- free_commit_list(copy);
+ clear_prio_queue(&copy);
return found ? 0 : -1;
}
@@ -2057,7 +2062,6 @@ static enum get_oid_result get_oid_with_context_1(struct repository *repo,
cb.list = &list;
refs_for_each_ref(get_main_ref_store(repo), handle_one_ref, &cb);
refs_head_ref(get_main_ref_store(repo), handle_one_ref, &cb);
- commit_list_sort_by_date(&list);
ret = get_oid_oneline(repo, name + 2, oid, list);
free_commit_list(list);
diff --git a/object-store.h b/object-store.h
deleted file mode 100644
index c589008535..0000000000
--- a/object-store.h
+++ /dev/null
@@ -1,338 +0,0 @@
-#ifndef OBJECT_STORE_H
-#define OBJECT_STORE_H
-
-#include "hashmap.h"
-#include "object.h"
-#include "list.h"
-#include "oidset.h"
-#include "oidmap.h"
-#include "thread-utils.h"
-
-struct oidmap;
-struct oidtree;
-struct strbuf;
-struct repository;
-
-struct object_directory {
- struct object_directory *next;
-
- /*
- * Used to store the results of readdir(3) calls when we are OK
- * sacrificing accuracy due to races for speed. That includes
- * object existence with OBJECT_INFO_QUICK, as well as
- * our search for unique abbreviated hashes. Don't use it for tasks
- * requiring greater accuracy!
- *
- * Be sure to call odb_load_loose_cache() before using.
- */
- uint32_t loose_objects_subdir_seen[8]; /* 256 bits */
- struct oidtree *loose_objects_cache;
-
- /* Map between object IDs for loose objects. */
- struct loose_object_map *loose_map;
-
- /*
- * This is a temporary object store created by the tmp_objdir
- * facility. Disable ref updates since the objects in the store
- * might be discarded on rollback.
- */
- int disable_ref_updates;
-
- /*
- * This object store is ephemeral, so there is no need to fsync.
- */
- int will_destroy;
-
- /*
- * Path to the alternative object store. If this is a relative path,
- * it is relative to the current working directory.
- */
- char *path;
-};
-
-void prepare_alt_odb(struct repository *r);
-int has_alt_odb(struct repository *r);
-char *compute_alternate_path(const char *path, struct strbuf *err);
-struct object_directory *find_odb(struct repository *r, const char *obj_dir);
-typedef int alt_odb_fn(struct object_directory *, void *);
-int foreach_alt_odb(alt_odb_fn, void*);
-typedef void alternate_ref_fn(const struct object_id *oid, void *);
-void for_each_alternate_ref(alternate_ref_fn, void *);
-
-/*
- * Add the directory to the on-disk alternates file; the new entry will also
- * take effect in the current process.
- */
-void add_to_alternates_file(const char *dir);
-
-/*
- * Add the directory to the in-memory list of alternates (along with any
- * recursive alternates it points to), but do not modify the on-disk alternates
- * file.
- */
-void add_to_alternates_memory(const char *dir);
-
-/*
- * Replace the current writable object directory with the specified temporary
- * object directory; returns the former primary object directory.
- */
-struct object_directory *set_temporary_primary_odb(const char *dir, int will_destroy);
-
-/*
- * Restore a previous ODB replaced by set_temporary_main_odb.
- */
-void restore_primary_odb(struct object_directory *restore_odb, const char *old_path);
-
-struct packed_git;
-struct multi_pack_index;
-struct cached_object_entry;
-
-struct raw_object_store {
- /*
- * Set of all object directories; the main directory is first (and
- * cannot be NULL after initialization). Subsequent directories are
- * alternates.
- */
- struct object_directory *odb;
- struct object_directory **odb_tail;
- struct kh_odb_path_map *odb_by_path;
-
- int loaded_alternates;
-
- /*
- * A list of alternate object directories loaded from the environment;
- * this should not generally need to be accessed directly, but will
- * populate the "odb" list when prepare_alt_odb() is run.
- */
- char *alternate_db;
-
- /*
- * Objects that should be substituted by other objects
- * (see git-replace(1)).
- */
- struct oidmap replace_map;
- unsigned replace_map_initialized : 1;
- pthread_mutex_t replace_mutex; /* protect object replace functions */
-
- struct commit_graph *commit_graph;
- unsigned commit_graph_attempted : 1; /* if loading has been attempted */
-
- /*
- * private data
- *
- * should only be accessed directly by packfile.c and midx.c
- */
- struct multi_pack_index *multi_pack_index;
-
- /*
- * private data
- *
- * should only be accessed directly by packfile.c
- */
-
- struct packed_git *packed_git;
- /* A most-recently-used ordered version of the packed_git list. */
- struct list_head packed_git_mru;
-
- struct {
- struct packed_git **packs;
- unsigned flags;
- } kept_pack_cache;
-
- /*
- * This is meant to hold a *small* number of objects that you would
- * want repo_read_object_file() to be able to return, but yet you do not want
- * to write them into the object store (e.g. a browse-only
- * application).
- */
- struct cached_object_entry *cached_objects;
- size_t cached_object_nr, cached_object_alloc;
-
- /*
- * A map of packfiles to packed_git structs for tracking which
- * packs have been loaded already.
- */
- struct hashmap pack_map;
-
- /*
- * A fast, rough count of the number of objects in the repository.
- * These two fields are not meant for direct access. Use
- * repo_approximate_object_count() instead.
- */
- unsigned long approximate_object_count;
- unsigned approximate_object_count_valid : 1;
-
- /*
- * Whether packed_git has already been populated with this repository's
- * packs.
- */
- unsigned packed_git_initialized : 1;
-};
-
-struct raw_object_store *raw_object_store_new(void);
-void raw_object_store_clear(struct raw_object_store *o);
-
-/*
- * Create a temporary file rooted in the object database directory, or
- * die on failure. The filename is taken from "pattern", which should have the
- * usual "XXXXXX" trailer, and the resulting filename is written into the
- * "template" buffer. Returns the open descriptor.
- */
-int odb_mkstemp(struct strbuf *temp_filename, const char *pattern);
-
-void *repo_read_object_file(struct repository *r,
- const struct object_id *oid,
- enum object_type *type,
- unsigned long *size);
-
-/* Read and unpack an object file into memory, write memory to an object file */
-int oid_object_info(struct repository *r, const struct object_id *, unsigned long *);
-
-/*
- * Add an object file to the in-memory object store, without writing it
- * to disk.
- *
- * Callers are responsible for calling write_object_file to record the
- * object in persistent storage before writing any other new objects
- * that reference it.
- */
-int pretend_object_file(struct repository *repo,
- void *buf, unsigned long len, enum object_type type,
- struct object_id *oid);
-
-struct object_info {
- /* Request */
- enum object_type *typep;
- unsigned long *sizep;
- off_t *disk_sizep;
- struct object_id *delta_base_oid;
- void **contentp;
-
- /* Response */
- enum {
- OI_CACHED,
- OI_LOOSE,
- OI_PACKED,
- OI_DBCACHED
- } whence;
- union {
- /*
- * struct {
- * ... Nothing to expose in this case
- * } cached;
- * struct {
- * ... Nothing to expose in this case
- * } loose;
- */
- struct {
- struct packed_git *pack;
- off_t offset;
- unsigned int is_delta;
- } packed;
- } u;
-};
-
-/*
- * Initializer for a "struct object_info" that wants no items. You may
- * also memset() the memory to all-zeroes.
- */
-#define OBJECT_INFO_INIT { 0 }
-
-/* Invoke lookup_replace_object() on the given hash */
-#define OBJECT_INFO_LOOKUP_REPLACE 1
-/* Do not retry packed storage after checking packed and loose storage */
-#define OBJECT_INFO_QUICK 8
-/*
- * Do not attempt to fetch the object if missing (even if fetch_is_missing is
- * nonzero).
- */
-#define OBJECT_INFO_SKIP_FETCH_OBJECT 16
-/*
- * This is meant for bulk prefetching of missing blobs in a partial
- * clone. Implies OBJECT_INFO_SKIP_FETCH_OBJECT and OBJECT_INFO_QUICK
- */
-#define OBJECT_INFO_FOR_PREFETCH (OBJECT_INFO_SKIP_FETCH_OBJECT | OBJECT_INFO_QUICK)
-
-/* Die if object corruption (not just an object being missing) was detected. */
-#define OBJECT_INFO_DIE_IF_CORRUPT 32
-
-int oid_object_info_extended(struct repository *r,
- const struct object_id *,
- struct object_info *, unsigned flags);
-
-enum {
- /* Retry packed storage after checking packed and loose storage */
- HAS_OBJECT_RECHECK_PACKED = (1 << 0),
- /* Allow fetching the object in case the repository has a promisor remote. */
- HAS_OBJECT_FETCH_PROMISOR = (1 << 1),
-};
-
-/*
- * Returns 1 if the object exists. This function will not lazily fetch objects
- * in a partial clone by default.
- */
-int has_object(struct repository *r, const struct object_id *oid,
- unsigned flags);
-
-void assert_oid_type(const struct object_id *oid, enum object_type expect);
-
-/*
- * Enabling the object read lock allows multiple threads to safely call the
- * following functions in parallel: repo_read_object_file(),
- * read_object_with_reference(), oid_object_info() and oid_object_info_extended().
- *
- * obj_read_lock() and obj_read_unlock() may also be used to protect other
- * section which cannot execute in parallel with object reading. Since the used
- * lock is a recursive mutex, these sections can even contain calls to object
- * reading functions. However, beware that in these cases zlib inflation won't
- * be performed in parallel, losing performance.
- *
- * TODO: oid_object_info_extended()'s call stack has a recursive behavior. If
- * any of its callees end up calling it, this recursive call won't benefit from
- * parallel inflation.
- */
-void enable_obj_read_lock(void);
-void disable_obj_read_lock(void);
-
-extern int obj_read_use_lock;
-extern pthread_mutex_t obj_read_mutex;
-
-static inline void obj_read_lock(void)
-{
- if(obj_read_use_lock)
- pthread_mutex_lock(&obj_read_mutex);
-}
-
-static inline void obj_read_unlock(void)
-{
- if(obj_read_use_lock)
- pthread_mutex_unlock(&obj_read_mutex);
-}
-/* Flags for for_each_*_object(). */
-enum for_each_object_flags {
- /* Iterate only over local objects, not alternates. */
- FOR_EACH_OBJECT_LOCAL_ONLY = (1<<0),
-
- /* Only iterate over packs obtained from the promisor remote. */
- FOR_EACH_OBJECT_PROMISOR_ONLY = (1<<1),
-
- /*
- * Visit objects within a pack in packfile order rather than .idx order
- */
- FOR_EACH_OBJECT_PACK_ORDER = (1<<2),
-
- /* Only iterate over packs that are not marked as kept in-core. */
- FOR_EACH_OBJECT_SKIP_IN_CORE_KEPT_PACKS = (1<<3),
-
- /* Only iterate over packs that do not have .keep files. */
- FOR_EACH_OBJECT_SKIP_ON_DISK_KEPT_PACKS = (1<<4),
-};
-
-
-void *read_object_with_reference(struct repository *r,
- const struct object_id *oid,
- enum object_type required_type,
- unsigned long *size,
- struct object_id *oid_ret);
-
-#endif /* OBJECT_STORE_H */
diff --git a/object.c b/object.c
index 3b15469139..c1553ee433 100644
--- a/object.c
+++ b/object.c
@@ -214,7 +214,7 @@ enum peel_status peel_object(struct repository *r,
struct object *o = lookup_unknown_object(r, name);
if (o->type == OBJ_NONE) {
- int type = oid_object_info(r, name, NULL);
+ int type = odb_read_object_info(r->objects, name, NULL);
if (type < 0 || !object_as_type(o, type, 0))
return PEEL_INVALID;
}
@@ -315,7 +315,7 @@ struct object *parse_object_with_flags(struct repository *r,
}
if ((!obj || obj->type == OBJ_BLOB) &&
- oid_object_info(r, oid, NULL) == OBJ_BLOB) {
+ odb_read_object_info(r->objects, oid, NULL) == OBJ_BLOB) {
if (!skip_hash && stream_object_signature(r, repl) < 0) {
error(_("hash mismatch %s"), oid_to_hex(oid));
return NULL;
@@ -331,11 +331,11 @@ struct object *parse_object_with_flags(struct repository *r,
*/
if (skip_hash && discard_tree &&
(!obj || obj->type == OBJ_TREE) &&
- oid_object_info(r, oid, NULL) == OBJ_TREE) {
+ odb_read_object_info(r->objects, oid, NULL) == OBJ_TREE) {
return &lookup_tree(r, oid)->object;
}
- buffer = repo_read_object_file(r, oid, &type, &size);
+ buffer = odb_read_object(r->objects, oid, &type, &size);
if (buffer) {
if (!skip_hash &&
check_object_signature(r, repl, buffer, size, type) < 0) {
diff --git a/object-store.c b/odb.c
index 58cde0313a..1f48a0448e 100644
--- a/object-store.c
+++ b/odb.c
@@ -1,5 +1,3 @@
-#define USE_THE_REPOSITORY_VARIABLE
-
#include "git-compat-util.h"
#include "abspath.h"
#include "commit-graph.h"
@@ -13,7 +11,7 @@
#include "loose.h"
#include "object-file-convert.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "packfile.h"
#include "path.h"
#include "promisor-remote.h"
@@ -24,14 +22,15 @@
#include "strbuf.h"
#include "strvec.h"
#include "submodule.h"
+#include "trace2.h"
#include "write-or-die.h"
KHASH_INIT(odb_path_map, const char * /* key: odb_path */,
- struct object_directory *, 1, fspathhash, fspatheq)
+ struct odb_source *, 1, fspathhash, fspatheq)
/*
* This is meant to hold a *small* number of objects that you would
- * want repo_read_object_file() to be able to return, but yet you do not want
+ * want odb_read_object() to be able to return, but yet you do not want
* to write them into the object store (e.g. a browse-only
* application).
*/
@@ -44,7 +43,7 @@ struct cached_object_entry {
} value;
};
-static const struct cached_object *find_cached_object(struct raw_object_store *object_store,
+static const struct cached_object *find_cached_object(struct object_database *object_store,
const struct object_id *oid)
{
static const struct cached_object empty_tree = {
@@ -63,7 +62,8 @@ static const struct cached_object *find_cached_object(struct raw_object_store *o
return NULL;
}
-int odb_mkstemp(struct strbuf *temp_filename, const char *pattern)
+int odb_mkstemp(struct object_database *odb,
+ struct strbuf *temp_filename, const char *pattern)
{
int fd;
/*
@@ -71,22 +71,22 @@ int odb_mkstemp(struct strbuf *temp_filename, const char *pattern)
* restrictive except to remove write permission.
*/
int mode = 0444;
- repo_git_path_replace(the_repository, temp_filename, "objects/%s", pattern);
+ repo_git_path_replace(odb->repo, temp_filename, "objects/%s", pattern);
fd = git_mkstemp_mode(temp_filename->buf, mode);
if (0 <= fd)
return fd;
/* slow path */
/* some mkstemp implementations erase temp_filename on failure */
- repo_git_path_replace(the_repository, temp_filename, "objects/%s", pattern);
- safe_create_leading_directories(the_repository, temp_filename->buf);
+ repo_git_path_replace(odb->repo, temp_filename, "objects/%s", pattern);
+ safe_create_leading_directories(odb->repo, temp_filename->buf);
return xmkstemp_mode(temp_filename->buf, mode);
}
/*
* Return non-zero iff the path is usable as an alternate object database.
*/
-static int alt_odb_usable(struct raw_object_store *o,
+static int alt_odb_usable(struct object_database *o,
struct strbuf *path,
const char *normalized_objdir, khiter_t *pos)
{
@@ -104,18 +104,18 @@ static int alt_odb_usable(struct raw_object_store *o,
* Prevent the common mistake of listing the same
* thing twice, or object directory itself.
*/
- if (!o->odb_by_path) {
+ if (!o->source_by_path) {
khiter_t p;
- o->odb_by_path = kh_init_odb_path_map();
- assert(!o->odb->next);
- p = kh_put_odb_path_map(o->odb_by_path, o->odb->path, &r);
+ o->source_by_path = kh_init_odb_path_map();
+ assert(!o->sources->next);
+ p = kh_put_odb_path_map(o->source_by_path, o->sources->path, &r);
assert(r == 1); /* never used */
- kh_value(o->odb_by_path, p) = o->odb;
+ kh_value(o->source_by_path, p) = o->sources;
}
if (fspatheq(path->buf, normalized_objdir))
return 0;
- *pos = kh_put_odb_path_map(o->odb_by_path, path->buf, &r);
+ *pos = kh_put_odb_path_map(o->source_by_path, path->buf, &r);
/* r: 0 = exists, 1 = never used, 2 = deleted */
return r == 0 ? 0 : 1;
}
@@ -124,7 +124,7 @@ static int alt_odb_usable(struct raw_object_store *o,
* Prepare alternate object database registry.
*
* The variable alt_odb_list points at the list of struct
- * object_directory. The elements on this list come from
+ * odb_source. The elements on this list come from
* non-empty elements from colon separated ALTERNATE_DB_ENVIRONMENT
* environment variable, and $GIT_OBJECT_DIRECTORY/info/alternates,
* whose contents is similar to that environment variable but can be
@@ -135,13 +135,17 @@ static int alt_odb_usable(struct raw_object_store *o,
* of the object ID, an extra slash for the first level indirection, and
* the terminating NUL.
*/
-static void read_info_alternates(struct repository *r,
+static void read_info_alternates(struct object_database *odb,
const char *relative_base,
int depth);
-static int link_alt_odb_entry(struct repository *r, const struct strbuf *entry,
- const char *relative_base, int depth, const char *normalized_objdir)
+
+static int link_alt_odb_entry(struct object_database *odb,
+ const struct strbuf *entry,
+ const char *relative_base,
+ int depth,
+ const char *normalized_objdir)
{
- struct object_directory *ent;
+ struct odb_source *alternate;
struct strbuf pathbuf = STRBUF_INIT;
struct strbuf tmp = STRBUF_INIT;
khiter_t pos;
@@ -167,22 +171,23 @@ static int link_alt_odb_entry(struct repository *r, const struct strbuf *entry,
while (pathbuf.len && pathbuf.buf[pathbuf.len - 1] == '/')
strbuf_setlen(&pathbuf, pathbuf.len - 1);
- if (!alt_odb_usable(r->objects, &pathbuf, normalized_objdir, &pos))
+ if (!alt_odb_usable(odb, &pathbuf, normalized_objdir, &pos))
goto error;
- CALLOC_ARRAY(ent, 1);
- /* pathbuf.buf is already in r->objects->odb_by_path */
- ent->path = strbuf_detach(&pathbuf, NULL);
+ CALLOC_ARRAY(alternate, 1);
+ alternate->odb = odb;
+ /* pathbuf.buf is already in r->objects->source_by_path */
+ alternate->path = strbuf_detach(&pathbuf, NULL);
/* add the alternate entry */
- *r->objects->odb_tail = ent;
- r->objects->odb_tail = &(ent->next);
- ent->next = NULL;
- assert(r->objects->odb_by_path);
- kh_value(r->objects->odb_by_path, pos) = ent;
+ *odb->sources_tail = alternate;
+ odb->sources_tail = &(alternate->next);
+ alternate->next = NULL;
+ assert(odb->source_by_path);
+ kh_value(odb->source_by_path, pos) = alternate;
/* recursively add alternates */
- read_info_alternates(r, ent->path, depth + 1);
+ read_info_alternates(odb, alternate->path, depth + 1);
ret = 0;
error:
strbuf_release(&tmp);
@@ -219,7 +224,7 @@ static const char *parse_alt_odb_entry(const char *string,
return end;
}
-static void link_alt_odb_entries(struct repository *r, const char *alt,
+static void link_alt_odb_entries(struct object_database *odb, const char *alt,
int sep, const char *relative_base, int depth)
{
struct strbuf objdirbuf = STRBUF_INIT;
@@ -234,20 +239,20 @@ static void link_alt_odb_entries(struct repository *r, const char *alt,
return;
}
- strbuf_realpath(&objdirbuf, r->objects->odb->path, 1);
+ strbuf_realpath(&objdirbuf, odb->sources->path, 1);
while (*alt) {
alt = parse_alt_odb_entry(alt, sep, &entry);
if (!entry.len)
continue;
- link_alt_odb_entry(r, &entry,
+ link_alt_odb_entry(odb, &entry,
relative_base, depth, objdirbuf.buf);
}
strbuf_release(&entry);
strbuf_release(&objdirbuf);
}
-static void read_info_alternates(struct repository *r,
+static void read_info_alternates(struct object_database *odb,
const char *relative_base,
int depth)
{
@@ -261,15 +266,16 @@ static void read_info_alternates(struct repository *r,
return;
}
- link_alt_odb_entries(r, buf.buf, '\n', relative_base, depth);
+ link_alt_odb_entries(odb, buf.buf, '\n', relative_base, depth);
strbuf_release(&buf);
free(path);
}
-void add_to_alternates_file(const char *reference)
+void odb_add_to_alternates_file(struct object_database *odb,
+ const char *reference)
{
struct lock_file lock = LOCK_INIT;
- char *alts = repo_git_path(the_repository, "objects/info/alternates");
+ char *alts = repo_git_path(odb->repo, "objects/info/alternates");
FILE *in, *out;
int found = 0;
@@ -302,82 +308,81 @@ void add_to_alternates_file(const char *reference)
fprintf_or_die(out, "%s\n", reference);
if (commit_lock_file(&lock))
die_errno(_("unable to move new alternates file into place"));
- if (the_repository->objects->loaded_alternates)
- link_alt_odb_entries(the_repository, reference,
+ if (odb->loaded_alternates)
+ link_alt_odb_entries(odb, reference,
'\n', NULL, 0);
}
free(alts);
}
-void add_to_alternates_memory(const char *reference)
+void odb_add_to_alternates_memory(struct object_database *odb,
+ const char *reference)
{
/*
* Make sure alternates are initialized, or else our entry may be
* overwritten when they are.
*/
- prepare_alt_odb(the_repository);
+ odb_prepare_alternates(odb);
- link_alt_odb_entries(the_repository, reference,
+ link_alt_odb_entries(odb, reference,
'\n', NULL, 0);
}
-struct object_directory *set_temporary_primary_odb(const char *dir, int will_destroy)
+struct odb_source *odb_set_temporary_primary_source(struct object_database *odb,
+ const char *dir, int will_destroy)
{
- struct object_directory *new_odb;
+ struct odb_source *source;
/*
* Make sure alternates are initialized, or else our entry may be
* overwritten when they are.
*/
- prepare_alt_odb(the_repository);
+ odb_prepare_alternates(odb);
/*
* Make a new primary odb and link the old primary ODB in as an
* alternate
*/
- new_odb = xcalloc(1, sizeof(*new_odb));
- new_odb->path = xstrdup(dir);
+ source = xcalloc(1, sizeof(*source));
+ source->odb = odb;
+ source->path = xstrdup(dir);
/*
* Disable ref updates while a temporary odb is active, since
* the objects in the database may roll back.
*/
- new_odb->disable_ref_updates = 1;
- new_odb->will_destroy = will_destroy;
- new_odb->next = the_repository->objects->odb;
- the_repository->objects->odb = new_odb;
- return new_odb->next;
+ source->disable_ref_updates = 1;
+ source->will_destroy = will_destroy;
+ source->next = odb->sources;
+ odb->sources = source;
+ return source->next;
}
-static void free_object_directory(struct object_directory *odb)
+static void free_object_directory(struct odb_source *source)
{
- free(odb->path);
- odb_clear_loose_cache(odb);
- loose_object_map_clear(&odb->loose_map);
- free(odb);
+ free(source->path);
+ odb_clear_loose_cache(source);
+ loose_object_map_clear(&source->loose_map);
+ free(source);
}
-void restore_primary_odb(struct object_directory *restore_odb, const char *old_path)
+void odb_restore_primary_source(struct object_database *odb,
+ struct odb_source *restore_source,
+ const char *old_path)
{
- struct object_directory *cur_odb = the_repository->objects->odb;
+ struct odb_source *cur_source = odb->sources;
- if (strcmp(old_path, cur_odb->path))
+ if (strcmp(old_path, cur_source->path))
BUG("expected %s as primary object store; found %s",
- old_path, cur_odb->path);
+ old_path, cur_source->path);
- if (cur_odb->next != restore_odb)
+ if (cur_source->next != restore_source)
BUG("we expect the old primary object store to be the first alternate");
- the_repository->objects->odb = restore_odb;
- free_object_directory(cur_odb);
+ odb->sources = restore_source;
+ free_object_directory(cur_source);
}
-/*
- * Compute the exact path an alternate is at and returns it. In case of
- * error NULL is returned and the human readable error is added to `err`
- * `path` may be relative and should point to $GIT_DIR.
- * `err` must not be null.
- */
char *compute_alternate_path(const char *path, struct strbuf *err)
{
char *ref_git = NULL;
@@ -442,15 +447,15 @@ out:
return ref_git;
}
-struct object_directory *find_odb(struct repository *r, const char *obj_dir)
+struct odb_source *odb_find_source(struct object_database *odb, const char *obj_dir)
{
- struct object_directory *odb;
+ struct odb_source *source;
char *obj_dir_real = real_pathdup(obj_dir, 1);
struct strbuf odb_path_real = STRBUF_INIT;
- prepare_alt_odb(r);
- for (odb = r->objects->odb; odb; odb = odb->next) {
- strbuf_realpath(&odb_path_real, odb->path, 1);
+ odb_prepare_alternates(odb);
+ for (source = odb->sources; source; source = source->next) {
+ strbuf_realpath(&odb_path_real, source->path, 1);
if (!strcmp(obj_dir_real, odb_path_real.buf))
break;
}
@@ -458,17 +463,24 @@ struct object_directory *find_odb(struct repository *r, const char *obj_dir)
free(obj_dir_real);
strbuf_release(&odb_path_real);
- if (!odb)
+ if (!source)
die(_("could not find object directory matching %s"), obj_dir);
- return odb;
+ return source;
}
-static void fill_alternate_refs_command(struct child_process *cmd,
+void odb_add_submodule_source_by_path(struct object_database *odb,
+ const char *path)
+{
+ string_list_insert(&odb->submodule_source_paths, path);
+}
+
+static void fill_alternate_refs_command(struct repository *repo,
+ struct child_process *cmd,
const char *repo_path)
{
const char *value;
- if (!git_config_get_value("core.alternateRefsCommand", &value)) {
+ if (!repo_config_get_value(repo, "core.alternateRefsCommand", &value)) {
cmd->use_shell = 1;
strvec_push(&cmd->args, value);
@@ -480,7 +492,7 @@ static void fill_alternate_refs_command(struct child_process *cmd,
strvec_push(&cmd->args, "for-each-ref");
strvec_push(&cmd->args, "--format=%(objectname)");
- if (!git_config_get_value("core.alternateRefsPrefixes", &value)) {
+ if (!repo_config_get_value(repo, "core.alternateRefsPrefixes", &value)) {
strvec_push(&cmd->args, "--");
strvec_split(&cmd->args, value);
}
@@ -490,15 +502,16 @@ static void fill_alternate_refs_command(struct child_process *cmd,
cmd->out = -1;
}
-static void read_alternate_refs(const char *path,
- alternate_ref_fn *cb,
- void *data)
+static void read_alternate_refs(struct repository *repo,
+ const char *path,
+ odb_for_each_alternate_ref_fn *cb,
+ void *payload)
{
struct child_process cmd = CHILD_PROCESS_INIT;
struct strbuf line = STRBUF_INIT;
FILE *fh;
- fill_alternate_refs_command(&cmd, path);
+ fill_alternate_refs_command(repo, &cmd, path);
if (start_command(&cmd))
return;
@@ -508,13 +521,13 @@ static void read_alternate_refs(const char *path,
struct object_id oid;
const char *p;
- if (parse_oid_hex(line.buf, &oid, &p) || *p) {
+ if (parse_oid_hex_algop(line.buf, &oid, &p, repo->hash_algo) || *p) {
warning(_("invalid line while parsing alternate refs: %s"),
line.buf);
break;
}
- cb(&oid, data);
+ cb(&oid, payload);
}
fclose(fh);
@@ -523,18 +536,18 @@ static void read_alternate_refs(const char *path,
}
struct alternate_refs_data {
- alternate_ref_fn *fn;
- void *data;
+ odb_for_each_alternate_ref_fn *fn;
+ void *payload;
};
-static int refs_from_alternate_cb(struct object_directory *e,
- void *data)
+static int refs_from_alternate_cb(struct odb_source *alternate,
+ void *payload)
{
struct strbuf path = STRBUF_INIT;
size_t base_len;
- struct alternate_refs_data *cb = data;
+ struct alternate_refs_data *cb = payload;
- if (!strbuf_realpath(&path, e->path, 0))
+ if (!strbuf_realpath(&path, alternate->path, 0))
goto out;
if (!strbuf_strip_suffix(&path, "/objects"))
goto out;
@@ -546,50 +559,52 @@ static int refs_from_alternate_cb(struct object_directory *e,
goto out;
strbuf_setlen(&path, base_len);
- read_alternate_refs(path.buf, cb->fn, cb->data);
+ read_alternate_refs(alternate->odb->repo, path.buf, cb->fn, cb->payload);
out:
strbuf_release(&path);
return 0;
}
-void for_each_alternate_ref(alternate_ref_fn fn, void *data)
+void odb_for_each_alternate_ref(struct object_database *odb,
+ odb_for_each_alternate_ref_fn cb, void *payload)
{
- struct alternate_refs_data cb;
- cb.fn = fn;
- cb.data = data;
- foreach_alt_odb(refs_from_alternate_cb, &cb);
+ struct alternate_refs_data data;
+ data.fn = cb;
+ data.payload = payload;
+ odb_for_each_alternate(odb, refs_from_alternate_cb, &data);
}
-int foreach_alt_odb(alt_odb_fn fn, void *cb)
+int odb_for_each_alternate(struct object_database *odb,
+ odb_for_each_alternate_fn cb, void *payload)
{
- struct object_directory *ent;
+ struct odb_source *alternate;
int r = 0;
- prepare_alt_odb(the_repository);
- for (ent = the_repository->objects->odb->next; ent; ent = ent->next) {
- r = fn(ent, cb);
+ odb_prepare_alternates(odb);
+ for (alternate = odb->sources->next; alternate; alternate = alternate->next) {
+ r = cb(alternate, payload);
if (r)
break;
}
return r;
}
-void prepare_alt_odb(struct repository *r)
+void odb_prepare_alternates(struct object_database *odb)
{
- if (r->objects->loaded_alternates)
+ if (odb->loaded_alternates)
return;
- link_alt_odb_entries(r, r->objects->alternate_db, PATH_SEP, NULL, 0);
+ link_alt_odb_entries(odb, odb->alternate_db, PATH_SEP, NULL, 0);
- read_info_alternates(r, r->objects->odb->path, 0);
- r->objects->loaded_alternates = 1;
+ read_info_alternates(odb, odb->sources->path, 0);
+ odb->loaded_alternates = 1;
}
-int has_alt_odb(struct repository *r)
+int odb_has_alternates(struct object_database *odb)
{
- prepare_alt_odb(r);
- return !!r->objects->odb->next;
+ odb_prepare_alternates(odb);
+ return !!odb->sources->next;
}
int obj_read_use_lock = 0;
@@ -615,7 +630,24 @@ void disable_obj_read_lock(void)
int fetch_if_missing = 1;
-static int do_oid_object_info_extended(struct repository *r,
+static int register_all_submodule_sources(struct object_database *odb)
+{
+ int ret = odb->submodule_source_paths.nr;
+
+ for (size_t i = 0; i < odb->submodule_source_paths.nr; i++)
+ odb_add_to_alternates_memory(odb,
+ odb->submodule_source_paths.items[i].string);
+ if (ret) {
+ string_list_clear(&odb->submodule_source_paths, 0);
+ trace2_data_intmax("submodule", odb->repo,
+ "register_all_submodule_sources/registered", ret);
+ if (git_env_bool("GIT_TEST_FATAL_REGISTER_SUBMODULE_ODB", 0))
+ BUG("register_all_submodule_sources() called");
+ }
+ return ret;
+}
+
+static int do_oid_object_info_extended(struct object_database *odb,
const struct object_id *oid,
struct object_info *oi, unsigned flags)
{
@@ -628,7 +660,7 @@ static int do_oid_object_info_extended(struct repository *r,
if (flags & OBJECT_INFO_LOOKUP_REPLACE)
- real = lookup_replace_object(r, oid);
+ real = lookup_replace_object(odb->repo, oid);
if (is_null_oid(real))
return -1;
@@ -636,7 +668,7 @@ static int do_oid_object_info_extended(struct repository *r,
if (!oi)
oi = &blank_oi;
- co = find_cached_object(r->objects, real);
+ co = find_cached_object(odb, real);
if (co) {
if (oi->typep)
*(oi->typep) = co->type;
@@ -645,7 +677,7 @@ static int do_oid_object_info_extended(struct repository *r,
if (oi->disk_sizep)
*(oi->disk_sizep) = 0;
if (oi->delta_base_oid)
- oidclr(oi->delta_base_oid, the_repository->hash_algo);
+ oidclr(oi->delta_base_oid, odb->repo->hash_algo);
if (oi->contentp)
*oi->contentp = xmemdupz(co->buf, co->size);
oi->whence = OI_CACHED;
@@ -653,36 +685,35 @@ static int do_oid_object_info_extended(struct repository *r,
}
while (1) {
- if (find_pack_entry(r, real, &e))
+ if (find_pack_entry(odb->repo, real, &e))
break;
/* Most likely it's a loose object. */
- if (!loose_object_info(r, real, oi, flags))
+ if (!loose_object_info(odb->repo, real, oi, flags))
return 0;
/* Not a loose object; someone else may have just packed it. */
if (!(flags & OBJECT_INFO_QUICK)) {
- reprepare_packed_git(r);
- if (find_pack_entry(r, real, &e))
+ reprepare_packed_git(odb->repo);
+ if (find_pack_entry(odb->repo, real, &e))
break;
}
/*
- * If r is the_repository, this might be an attempt at
- * accessing a submodule object as if it were in the_repository
- * (having called add_submodule_odb() on that submodule's ODB).
- * If any such ODBs exist, register them and try again.
+ * This might be an attempt at accessing a submodule object as
+ * if it were in main object store (having called
+ * `odb_add_submodule_source_by_path()` on that submodule's
+ * ODB). If any such ODBs exist, register them and try again.
*/
- if (r == the_repository &&
- register_all_submodule_odb_as_alternates())
+ if (register_all_submodule_sources(odb))
/* We added some alternates; retry */
continue;
/* Check if it is a missing object */
- if (fetch_if_missing && repo_has_promisor_remote(r) &&
+ if (fetch_if_missing && repo_has_promisor_remote(odb->repo) &&
!already_retried &&
!(flags & OBJECT_INFO_SKIP_FETCH_OBJECT)) {
- promisor_remote_get_direct(r, real, 1);
+ promisor_remote_get_direct(odb->repo, real, 1);
already_retried = 1;
continue;
}
@@ -692,7 +723,7 @@ static int do_oid_object_info_extended(struct repository *r,
if ((flags & OBJECT_INFO_LOOKUP_REPLACE) && !oideq(real, oid))
die(_("replacement %s not found for %s"),
oid_to_hex(real), oid_to_hex(oid));
- if ((p = has_packed_and_bad(r, real)))
+ if ((p = has_packed_and_bad(odb->repo, real)))
die(_("packed object %s (stored in %s) is corrupt"),
oid_to_hex(real), p->pack_name);
}
@@ -705,10 +736,10 @@ static int do_oid_object_info_extended(struct repository *r,
* information below, so return early.
*/
return 0;
- rtype = packed_object_info(r, e.p, e.offset, oi);
+ rtype = packed_object_info(odb->repo, e.p, e.offset, oi);
if (rtype < 0) {
mark_bad_packed_object(e.p, real);
- return do_oid_object_info_extended(r, real, oi, 0);
+ return do_oid_object_info_extended(odb, real, oi, 0);
} else if (oi->whence == OI_PACKED) {
oi->u.packed.offset = e.offset;
oi->u.packed.pack = e.p;
@@ -732,10 +763,10 @@ static int oid_object_info_convert(struct repository *r,
void *content;
int ret;
- if (repo_oid_to_algop(r, input_oid, the_hash_algo, &oid)) {
+ if (repo_oid_to_algop(r, input_oid, r->hash_algo, &oid)) {
if (do_die)
die(_("missing mapping of %s to %s"),
- oid_to_hex(input_oid), the_hash_algo->name);
+ oid_to_hex(input_oid), r->hash_algo->name);
return -1;
}
@@ -756,7 +787,7 @@ static int oid_object_info_convert(struct repository *r,
oi = &new_oi;
}
- ret = oid_object_info_extended(r, &oid, oi, flags);
+ ret = odb_read_object_info_extended(r->objects, &oid, oi, flags);
if (ret)
return -1;
if (oi == input_oi)
@@ -766,8 +797,8 @@ static int oid_object_info_convert(struct repository *r,
struct strbuf outbuf = STRBUF_INIT;
if (type != OBJ_BLOB) {
- ret = convert_object_file(the_repository, &outbuf,
- the_hash_algo, input_algo,
+ ret = convert_object_file(r, &outbuf,
+ r->hash_algo, input_algo,
content, size, type, !do_die);
free(content);
if (ret == -1)
@@ -799,52 +830,54 @@ static int oid_object_info_convert(struct repository *r,
return ret;
}
-int oid_object_info_extended(struct repository *r, const struct object_id *oid,
- struct object_info *oi, unsigned flags)
+int odb_read_object_info_extended(struct object_database *odb,
+ const struct object_id *oid,
+ struct object_info *oi,
+ unsigned flags)
{
int ret;
- if (oid->algo && (hash_algo_by_ptr(r->hash_algo) != oid->algo))
- return oid_object_info_convert(r, oid, oi, flags);
+ if (oid->algo && (hash_algo_by_ptr(odb->repo->hash_algo) != oid->algo))
+ return oid_object_info_convert(odb->repo, oid, oi, flags);
obj_read_lock();
- ret = do_oid_object_info_extended(r, oid, oi, flags);
+ ret = do_oid_object_info_extended(odb, oid, oi, flags);
obj_read_unlock();
return ret;
}
/* returns enum object_type or negative */
-int oid_object_info(struct repository *r,
- const struct object_id *oid,
- unsigned long *sizep)
+int odb_read_object_info(struct object_database *odb,
+ const struct object_id *oid,
+ unsigned long *sizep)
{
enum object_type type;
struct object_info oi = OBJECT_INFO_INIT;
oi.typep = &type;
oi.sizep = sizep;
- if (oid_object_info_extended(r, oid, &oi,
- OBJECT_INFO_LOOKUP_REPLACE) < 0)
+ if (odb_read_object_info_extended(odb, oid, &oi,
+ OBJECT_INFO_LOOKUP_REPLACE) < 0)
return -1;
return type;
}
-int pretend_object_file(struct repository *repo,
- void *buf, unsigned long len, enum object_type type,
- struct object_id *oid)
+int odb_pretend_object(struct object_database *odb,
+ void *buf, unsigned long len, enum object_type type,
+ struct object_id *oid)
{
struct cached_object_entry *co;
char *co_buf;
- hash_object_file(repo->hash_algo, buf, len, type, oid);
- if (has_object(repo, oid, 0) ||
- find_cached_object(repo->objects, oid))
+ hash_object_file(odb->repo->hash_algo, buf, len, type, oid);
+ if (odb_has_object(odb, oid, 0) ||
+ find_cached_object(odb, oid))
return 0;
- ALLOC_GROW(repo->objects->cached_objects,
- repo->objects->cached_object_nr + 1, repo->objects->cached_object_alloc);
- co = &repo->objects->cached_objects[repo->objects->cached_object_nr++];
+ ALLOC_GROW(odb->cached_objects,
+ odb->cached_object_nr + 1, odb->cached_object_alloc);
+ co = &odb->cached_objects[odb->cached_object_nr++];
co->value.size = len;
co->value.type = type;
co_buf = xmalloc(len);
@@ -854,15 +887,10 @@ int pretend_object_file(struct repository *repo,
return 0;
}
-/*
- * This function dies on corrupt objects; the callers who want to
- * deal with them should arrange to call oid_object_info_extended() and give
- * error messages themselves.
- */
-void *repo_read_object_file(struct repository *r,
- const struct object_id *oid,
- enum object_type *type,
- unsigned long *size)
+void *odb_read_object(struct object_database *odb,
+ const struct object_id *oid,
+ enum object_type *type,
+ unsigned long *size)
{
struct object_info oi = OBJECT_INFO_INIT;
unsigned flags = OBJECT_INFO_DIE_IF_CORRUPT | OBJECT_INFO_LOOKUP_REPLACE;
@@ -871,17 +899,17 @@ void *repo_read_object_file(struct repository *r,
oi.typep = type;
oi.sizep = size;
oi.contentp = &data;
- if (oid_object_info_extended(r, oid, &oi, flags))
+ if (odb_read_object_info_extended(odb, oid, &oi, flags))
return NULL;
return data;
}
-void *read_object_with_reference(struct repository *r,
- const struct object_id *oid,
- enum object_type required_type,
- unsigned long *size,
- struct object_id *actual_oid_return)
+void *odb_read_object_peeled(struct object_database *odb,
+ const struct object_id *oid,
+ enum object_type required_type,
+ unsigned long *size,
+ struct object_id *actual_oid_return)
{
enum object_type type;
void *buffer;
@@ -893,7 +921,7 @@ void *read_object_with_reference(struct repository *r,
int ref_length = -1;
const char *ref_type = NULL;
- buffer = repo_read_object_file(r, &actual_oid, &type, &isize);
+ buffer = odb_read_object(odb, &actual_oid, &type, &isize);
if (!buffer)
return NULL;
if (type == required_type) {
@@ -913,9 +941,10 @@ void *read_object_with_reference(struct repository *r,
}
ref_length = strlen(ref_type);
- if (ref_length + the_hash_algo->hexsz > isize ||
+ if (ref_length + odb->repo->hash_algo->hexsz > isize ||
memcmp(buffer, ref_type, ref_length) ||
- get_oid_hex((char *) buffer + ref_length, &actual_oid)) {
+ get_oid_hex_algop((char *) buffer + ref_length, &actual_oid,
+ odb->repo->hash_algo)) {
free(buffer);
return NULL;
}
@@ -925,7 +954,7 @@ void *read_object_with_reference(struct repository *r,
}
}
-int has_object(struct repository *r, const struct object_id *oid,
+int odb_has_object(struct object_database *odb, const struct object_id *oid,
unsigned flags)
{
unsigned object_info_flags = 0;
@@ -937,12 +966,13 @@ int has_object(struct repository *r, const struct object_id *oid,
if (!(flags & HAS_OBJECT_FETCH_PROMISOR))
object_info_flags |= OBJECT_INFO_SKIP_FETCH_OBJECT;
- return oid_object_info_extended(r, oid, NULL, object_info_flags) >= 0;
+ return odb_read_object_info_extended(odb, oid, NULL, object_info_flags) >= 0;
}
-void assert_oid_type(const struct object_id *oid, enum object_type expect)
+void odb_assert_oid_type(struct object_database *odb,
+ const struct object_id *oid, enum object_type expect)
{
- enum object_type type = oid_object_info(the_repository, oid, NULL);
+ enum object_type type = odb_read_object_info(odb, oid, NULL);
if (type < 0)
die(_("%s is not a valid object"), oid_to_hex(oid));
if (type != expect)
@@ -950,31 +980,33 @@ void assert_oid_type(const struct object_id *oid, enum object_type expect)
type_name(expect));
}
-struct raw_object_store *raw_object_store_new(void)
+struct object_database *odb_new(struct repository *repo)
{
- struct raw_object_store *o = xmalloc(sizeof(*o));
+ struct object_database *o = xmalloc(sizeof(*o));
memset(o, 0, sizeof(*o));
+ o->repo = repo;
INIT_LIST_HEAD(&o->packed_git_mru);
hashmap_init(&o->pack_map, pack_map_entry_cmp, NULL, 0);
pthread_mutex_init(&o->replace_mutex, NULL);
+ string_list_init_dup(&o->submodule_source_paths);
return o;
}
-static void free_object_directories(struct raw_object_store *o)
+static void free_object_directories(struct object_database *o)
{
- while (o->odb) {
- struct object_directory *next;
+ while (o->sources) {
+ struct odb_source *next;
- next = o->odb->next;
- free_object_directory(o->odb);
- o->odb = next;
+ next = o->sources->next;
+ free_object_directory(o->sources);
+ o->sources = next;
}
- kh_destroy_odb_path_map(o->odb_by_path);
- o->odb_by_path = NULL;
+ kh_destroy_odb_path_map(o->source_by_path);
+ o->source_by_path = NULL;
}
-void raw_object_store_clear(struct raw_object_store *o)
+void odb_clear(struct object_database *o)
{
FREE_AND_NULL(o->alternate_db);
@@ -986,7 +1018,7 @@ void raw_object_store_clear(struct raw_object_store *o)
o->commit_graph_attempted = 0;
free_object_directories(o);
- o->odb_tail = NULL;
+ o->sources_tail = NULL;
o->loaded_alternates = 0;
for (size_t i = 0; i < o->cached_object_nr; i++)
@@ -1007,4 +1039,5 @@ void raw_object_store_clear(struct raw_object_store *o)
o->packed_git = NULL;
hashmap_clear(&o->pack_map);
+ string_list_clear(&o->submodule_source_paths, 0);
}
diff --git a/odb.h b/odb.h
new file mode 100644
index 0000000000..e922f25680
--- /dev/null
+++ b/odb.h
@@ -0,0 +1,473 @@
+#ifndef ODB_H
+#define ODB_H
+
+#include "hashmap.h"
+#include "object.h"
+#include "list.h"
+#include "oidset.h"
+#include "oidmap.h"
+#include "string-list.h"
+#include "thread-utils.h"
+
+struct oidmap;
+struct oidtree;
+struct strbuf;
+struct repository;
+
+/*
+ * Compute the exact path an alternate is at and returns it. In case of
+ * error NULL is returned and the human readable error is added to `err`
+ * `path` may be relative and should point to $GIT_DIR.
+ * `err` must not be null.
+ */
+char *compute_alternate_path(const char *path, struct strbuf *err);
+
+/*
+ * The source is the part of the object database that stores the actual
+ * objects. It thus encapsulates the logic to read and write the specific
+ * on-disk format. An object database can have multiple sources:
+ *
+ * - The primary source, which is typically located in "$GIT_DIR/objects".
+ * This is where new objects are usually written to.
+ *
+ * - Alternate sources, which are configured via "objects/info/alternates" or
+ * via the GIT_ALTERNATE_OBJECT_DIRECTORIES environment variable. These
+ * alternate sources are only used to read objects.
+ */
+struct odb_source {
+ struct odb_source *next;
+
+ /* Object database that owns this object source. */
+ struct object_database *odb;
+
+ /*
+ * Used to store the results of readdir(3) calls when we are OK
+ * sacrificing accuracy due to races for speed. That includes
+ * object existence with OBJECT_INFO_QUICK, as well as
+ * our search for unique abbreviated hashes. Don't use it for tasks
+ * requiring greater accuracy!
+ *
+ * Be sure to call odb_load_loose_cache() before using.
+ */
+ uint32_t loose_objects_subdir_seen[8]; /* 256 bits */
+ struct oidtree *loose_objects_cache;
+
+ /* Map between object IDs for loose objects. */
+ struct loose_object_map *loose_map;
+
+ /*
+ * This is a temporary object store created by the tmp_objdir
+ * facility. Disable ref updates since the objects in the store
+ * might be discarded on rollback.
+ */
+ int disable_ref_updates;
+
+ /*
+ * This object store is ephemeral, so there is no need to fsync.
+ */
+ int will_destroy;
+
+ /*
+ * Path to the source. If this is a relative path, it is relative to
+ * the current working directory.
+ */
+ char *path;
+};
+
+struct packed_git;
+struct multi_pack_index;
+struct cached_object_entry;
+
+/*
+ * The object database encapsulates access to objects in a repository. It
+ * manages one or more sources that store the actual objects which are
+ * configured via alternates.
+ */
+struct object_database {
+ /* Repository that owns this database. */
+ struct repository *repo;
+
+ /*
+ * Set of all object directories; the main directory is first (and
+ * cannot be NULL after initialization). Subsequent directories are
+ * alternates.
+ */
+ struct odb_source *sources;
+ struct odb_source **sources_tail;
+ struct kh_odb_path_map *source_by_path;
+
+ int loaded_alternates;
+
+ /*
+ * A list of alternate object directories loaded from the environment;
+ * this should not generally need to be accessed directly, but will
+ * populate the "sources" list when odb_prepare_alternates() is run.
+ */
+ char *alternate_db;
+
+ /*
+ * Objects that should be substituted by other objects
+ * (see git-replace(1)).
+ */
+ struct oidmap replace_map;
+ unsigned replace_map_initialized : 1;
+ pthread_mutex_t replace_mutex; /* protect object replace functions */
+
+ struct commit_graph *commit_graph;
+ unsigned commit_graph_attempted : 1; /* if loading has been attempted */
+
+ /*
+ * private data
+ *
+ * should only be accessed directly by packfile.c and midx.c
+ */
+ struct multi_pack_index *multi_pack_index;
+
+ /*
+ * private data
+ *
+ * should only be accessed directly by packfile.c
+ */
+
+ struct packed_git *packed_git;
+ /* A most-recently-used ordered version of the packed_git list. */
+ struct list_head packed_git_mru;
+
+ struct {
+ struct packed_git **packs;
+ unsigned flags;
+ } kept_pack_cache;
+
+ /*
+ * This is meant to hold a *small* number of objects that you would
+ * want odb_read_object() to be able to return, but yet you do not want
+ * to write them into the object store (e.g. a browse-only
+ * application).
+ */
+ struct cached_object_entry *cached_objects;
+ size_t cached_object_nr, cached_object_alloc;
+
+ /*
+ * A map of packfiles to packed_git structs for tracking which
+ * packs have been loaded already.
+ */
+ struct hashmap pack_map;
+
+ /*
+ * A fast, rough count of the number of objects in the repository.
+ * These two fields are not meant for direct access. Use
+ * repo_approximate_object_count() instead.
+ */
+ unsigned long approximate_object_count;
+ unsigned approximate_object_count_valid : 1;
+
+ /*
+ * Whether packed_git has already been populated with this repository's
+ * packs.
+ */
+ unsigned packed_git_initialized : 1;
+
+ /*
+ * Submodule source paths that will be added as additional sources to
+ * allow lookup of submodule objects via the main object database.
+ */
+ struct string_list submodule_source_paths;
+};
+
+struct object_database *odb_new(struct repository *repo);
+void odb_clear(struct object_database *o);
+
+/*
+ * Find source by its object directory path. Dies in case the source couldn't
+ * be found.
+ */
+struct odb_source *odb_find_source(struct object_database *odb, const char *obj_dir);
+
+/*
+ * Replace the current writable object directory with the specified temporary
+ * object directory; returns the former primary source.
+ */
+struct odb_source *odb_set_temporary_primary_source(struct object_database *odb,
+ const char *dir, int will_destroy);
+
+/*
+ * Restore the primary source that was previously replaced by
+ * `odb_set_temporary_primary_source()`.
+ */
+void odb_restore_primary_source(struct object_database *odb,
+ struct odb_source *restore_source,
+ const char *old_path);
+
+/*
+ * Call odb_add_submodule_source_by_path() to add the submodule at the given
+ * path to a list. The object stores of all submodules in that list will be
+ * added as additional sources in the object store when looking up objects.
+ */
+void odb_add_submodule_source_by_path(struct object_database *odb,
+ const char *path);
+
+/*
+ * Iterate through all alternates of the database and execute the provided
+ * callback function for each of them. Stop iterating once the callback
+ * function returns a non-zero value, in which case the value is bubbled up
+ * from the callback.
+ */
+typedef int odb_for_each_alternate_fn(struct odb_source *, void *);
+int odb_for_each_alternate(struct object_database *odb,
+ odb_for_each_alternate_fn cb, void *payload);
+
+/*
+ * Iterate through all alternates of the database and yield their respective
+ * references.
+ */
+typedef void odb_for_each_alternate_ref_fn(const struct object_id *oid, void *);
+void odb_for_each_alternate_ref(struct object_database *odb,
+ odb_for_each_alternate_ref_fn cb, void *payload);
+
+/*
+ * Create a temporary file rooted in the primary alternate's directory, or die
+ * on failure. The filename is taken from "pattern", which should have the
+ * usual "XXXXXX" trailer, and the resulting filename is written into the
+ * "template" buffer. Returns the open descriptor.
+ */
+int odb_mkstemp(struct object_database *odb,
+ struct strbuf *temp_filename, const char *pattern);
+
+/*
+ * Prepare alternate object sources for the given database by reading
+ * "objects/info/alternates" and opening the respective sources.
+ */
+void odb_prepare_alternates(struct object_database *odb);
+
+/*
+ * Check whether the object database has any alternates. The primary object
+ * source does not count as alternate.
+ */
+int odb_has_alternates(struct object_database *odb);
+
+/*
+ * Add the directory to the on-disk alternates file; the new entry will also
+ * take effect in the current process.
+ */
+void odb_add_to_alternates_file(struct object_database *odb,
+ const char *dir);
+
+/*
+ * Add the directory to the in-memory list of alternate sources (along with any
+ * recursive alternates it points to), but do not modify the on-disk alternates
+ * file.
+ */
+void odb_add_to_alternates_memory(struct object_database *odb,
+ const char *dir);
+
+/*
+ * Read an object from the database. Returns the object data and assigns object
+ * type and size to the `type` and `size` pointers, if these pointers are
+ * non-NULL. Returns a `NULL` pointer in case the object does not exist.
+ *
+ * This function dies on corrupt objects; the callers who want to deal with
+ * them should arrange to call odb_read_object_info_extended() and give error
+ * messages themselves.
+ */
+void *odb_read_object(struct object_database *odb,
+ const struct object_id *oid,
+ enum object_type *type,
+ unsigned long *size);
+
+void *odb_read_object_peeled(struct object_database *odb,
+ const struct object_id *oid,
+ enum object_type required_type,
+ unsigned long *size,
+ struct object_id *oid_ret);
+
+/*
+ * Add an object file to the in-memory object store, without writing it
+ * to disk.
+ *
+ * Callers are responsible for calling write_object_file to record the
+ * object in persistent storage before writing any other new objects
+ * that reference it.
+ */
+int odb_pretend_object(struct object_database *odb,
+ void *buf, unsigned long len, enum object_type type,
+ struct object_id *oid);
+
+struct object_info {
+ /* Request */
+ enum object_type *typep;
+ unsigned long *sizep;
+ off_t *disk_sizep;
+ struct object_id *delta_base_oid;
+ void **contentp;
+
+ /* Response */
+ enum {
+ OI_CACHED,
+ OI_LOOSE,
+ OI_PACKED,
+ OI_DBCACHED
+ } whence;
+ union {
+ /*
+ * struct {
+ * ... Nothing to expose in this case
+ * } cached;
+ * struct {
+ * ... Nothing to expose in this case
+ * } loose;
+ */
+ struct {
+ struct packed_git *pack;
+ off_t offset;
+ unsigned int is_delta;
+ } packed;
+ } u;
+};
+
+/*
+ * Initializer for a "struct object_info" that wants no items. You may
+ * also memset() the memory to all-zeroes.
+ */
+#define OBJECT_INFO_INIT { 0 }
+
+/* Invoke lookup_replace_object() on the given hash */
+#define OBJECT_INFO_LOOKUP_REPLACE 1
+/* Do not retry packed storage after checking packed and loose storage */
+#define OBJECT_INFO_QUICK 8
+/*
+ * Do not attempt to fetch the object if missing (even if fetch_is_missing is
+ * nonzero).
+ */
+#define OBJECT_INFO_SKIP_FETCH_OBJECT 16
+/*
+ * This is meant for bulk prefetching of missing blobs in a partial
+ * clone. Implies OBJECT_INFO_SKIP_FETCH_OBJECT and OBJECT_INFO_QUICK
+ */
+#define OBJECT_INFO_FOR_PREFETCH (OBJECT_INFO_SKIP_FETCH_OBJECT | OBJECT_INFO_QUICK)
+
+/* Die if object corruption (not just an object being missing) was detected. */
+#define OBJECT_INFO_DIE_IF_CORRUPT 32
+
+/*
+ * Read object info from the object database and populate the `object_info`
+ * structure. Returns 0 on success, a negative error code otherwise.
+ */
+int odb_read_object_info_extended(struct object_database *odb,
+ const struct object_id *oid,
+ struct object_info *oi,
+ unsigned flags);
+
+/*
+ * Read a subset of object info for the given object ID. Returns an `enum
+ * object_type` on success, a negative error code otherwise. If successful and
+ * `sizep` is non-NULL, then the size of the object will be written to the
+ * pointer.
+ */
+int odb_read_object_info(struct object_database *odb,
+ const struct object_id *oid,
+ unsigned long *sizep);
+
+enum {
+ /* Retry packed storage after checking packed and loose storage */
+ HAS_OBJECT_RECHECK_PACKED = (1 << 0),
+ /* Allow fetching the object in case the repository has a promisor remote. */
+ HAS_OBJECT_FETCH_PROMISOR = (1 << 1),
+};
+
+/*
+ * Returns 1 if the object exists. This function will not lazily fetch objects
+ * in a partial clone by default.
+ */
+int odb_has_object(struct object_database *odb,
+ const struct object_id *oid,
+ unsigned flags);
+
+void odb_assert_oid_type(struct object_database *odb,
+ const struct object_id *oid, enum object_type expect);
+
+/*
+ * Enabling the object read lock allows multiple threads to safely call the
+ * following functions in parallel: odb_read_object(),
+ * odb_read_object_peeled(), odb_read_object_info() and odb().
+ *
+ * obj_read_lock() and obj_read_unlock() may also be used to protect other
+ * section which cannot execute in parallel with object reading. Since the used
+ * lock is a recursive mutex, these sections can even contain calls to object
+ * reading functions. However, beware that in these cases zlib inflation won't
+ * be performed in parallel, losing performance.
+ *
+ * TODO: odb_read_object_info_extended()'s call stack has a recursive behavior. If
+ * any of its callees end up calling it, this recursive call won't benefit from
+ * parallel inflation.
+ */
+void enable_obj_read_lock(void);
+void disable_obj_read_lock(void);
+
+extern int obj_read_use_lock;
+extern pthread_mutex_t obj_read_mutex;
+
+static inline void obj_read_lock(void)
+{
+ if(obj_read_use_lock)
+ pthread_mutex_lock(&obj_read_mutex);
+}
+
+static inline void obj_read_unlock(void)
+{
+ if(obj_read_use_lock)
+ pthread_mutex_unlock(&obj_read_mutex);
+}
+/* Flags for for_each_*_object(). */
+enum for_each_object_flags {
+ /* Iterate only over local objects, not alternates. */
+ FOR_EACH_OBJECT_LOCAL_ONLY = (1<<0),
+
+ /* Only iterate over packs obtained from the promisor remote. */
+ FOR_EACH_OBJECT_PROMISOR_ONLY = (1<<1),
+
+ /*
+ * Visit objects within a pack in packfile order rather than .idx order
+ */
+ FOR_EACH_OBJECT_PACK_ORDER = (1<<2),
+
+ /* Only iterate over packs that are not marked as kept in-core. */
+ FOR_EACH_OBJECT_SKIP_IN_CORE_KEPT_PACKS = (1<<3),
+
+ /* Only iterate over packs that do not have .keep files. */
+ FOR_EACH_OBJECT_SKIP_ON_DISK_KEPT_PACKS = (1<<4),
+};
+
+/* Compatibility wrappers, to be removed once Git 2.51 has been released. */
+#include "repository.h"
+
+static inline int oid_object_info_extended(struct repository *r,
+ const struct object_id *oid,
+ struct object_info *oi,
+ unsigned flags)
+{
+ return odb_read_object_info_extended(r->objects, oid, oi, flags);
+}
+
+static inline int oid_object_info(struct repository *r,
+ const struct object_id *oid,
+ unsigned long *sizep)
+{
+ return odb_read_object_info(r->objects, oid, sizep);
+}
+
+static inline void *repo_read_object_file(struct repository *r,
+ const struct object_id *oid,
+ enum object_type *type,
+ unsigned long *size)
+{
+ return odb_read_object(r->objects, oid, type, size);
+}
+
+static inline int has_object(struct repository *r,
+ const struct object_id *oid,
+ unsigned flags)
+{
+ return odb_has_object(r->objects, oid, flags);
+}
+
+#endif /* ODB_H */
diff --git a/oss-fuzz/fuzz-pack-idx.c b/oss-fuzz/fuzz-pack-idx.c
index 609a343ee3..d2a92f34d9 100644
--- a/oss-fuzz/fuzz-pack-idx.c
+++ b/oss-fuzz/fuzz-pack-idx.c
@@ -1,5 +1,5 @@
#include "git-compat-util.h"
-#include "object-store.h"
+#include "odb.h"
#include "packfile.h"
int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size);
diff --git a/pack-bitmap-write.c b/pack-bitmap-write.c
index 7f400ee012..4404921521 100644
--- a/pack-bitmap-write.c
+++ b/pack-bitmap-write.c
@@ -4,7 +4,7 @@
#include "environment.h"
#include "gettext.h"
#include "hex.h"
-#include "object-store.h"
+#include "odb.h"
#include "commit.h"
#include "diff.h"
#include "revision.h"
@@ -144,8 +144,8 @@ void bitmap_writer_build_type_index(struct bitmap_writer *writer,
break;
default:
- real_type = oid_object_info(writer->to_pack->repo,
- &entry->idx.oid, NULL);
+ real_type = odb_read_object_info(writer->to_pack->repo->objects,
+ &entry->idx.oid, NULL);
break;
}
@@ -1052,7 +1052,8 @@ void bitmap_writer_finish(struct bitmap_writer *writer,
struct bitmap_disk_header header;
- int fd = odb_mkstemp(&tmp_file, "pack/tmp_bitmap_XXXXXX");
+ int fd = odb_mkstemp(writer->repo->objects, &tmp_file,
+ "pack/tmp_bitmap_XXXXXX");
if (writer->pseudo_merges_nr)
options |= BITMAP_OPT_PSEUDO_MERGES;
@@ -1087,7 +1088,7 @@ void bitmap_writer_finish(struct bitmap_writer *writer,
oid_access);
if (commit_pos < 0)
- BUG(_("trying to write commit not in index"));
+ BUG("trying to write commit not in index");
stored->commit_pos = commit_pos + base_objects;
}
diff --git a/pack-bitmap.c b/pack-bitmap.c
index ac6d62b980..15cf51166e 100644
--- a/pack-bitmap.c
+++ b/pack-bitmap.c
@@ -17,7 +17,7 @@
#include "packfile.h"
#include "repository.h"
#include "trace2.h"
-#include "object-store.h"
+#include "odb.h"
#include "list-objects-filter-options.h"
#include "midx.h"
#include "config.h"
@@ -31,6 +31,7 @@ struct stored_bitmap {
struct object_id oid;
struct ewah_bitmap *root;
struct stored_bitmap *xor;
+ size_t map_pos;
int flags;
};
@@ -314,13 +315,14 @@ static struct stored_bitmap *store_bitmap(struct bitmap_index *index,
struct ewah_bitmap *root,
const struct object_id *oid,
struct stored_bitmap *xor_with,
- int flags)
+ int flags, size_t map_pos)
{
struct stored_bitmap *stored;
khiter_t hash_pos;
int ret;
stored = xmalloc(sizeof(struct stored_bitmap));
+ stored->map_pos = map_pos;
stored->root = root;
stored->xor = xor_with;
stored->flags = flags;
@@ -376,10 +378,12 @@ static int load_bitmap_entries_v1(struct bitmap_index *index)
struct stored_bitmap *xor_bitmap = NULL;
uint32_t commit_idx_pos;
struct object_id oid;
+ size_t entry_map_pos;
if (index->map_size - index->map_pos < 6)
return error(_("corrupt ewah bitmap: truncated header for entry %d"), i);
+ entry_map_pos = index->map_pos;
commit_idx_pos = read_be32(index->map, &index->map_pos);
xor_offset = read_u8(index->map, &index->map_pos);
flags = read_u8(index->map, &index->map_pos);
@@ -402,8 +406,9 @@ static int load_bitmap_entries_v1(struct bitmap_index *index)
if (!bitmap)
return -1;
- recent_bitmaps[i % MAX_XOR_OFFSET] = store_bitmap(
- index, bitmap, &oid, xor_bitmap, flags);
+ recent_bitmaps[i % MAX_XOR_OFFSET] =
+ store_bitmap(index, bitmap, &oid, xor_bitmap, flags,
+ entry_map_pos);
}
return 0;
@@ -630,41 +635,28 @@ static int load_bitmap(struct repository *r, struct bitmap_index *bitmap_git,
bitmap_git->ext_index.positions = kh_init_oid_pos();
if (load_reverse_index(r, bitmap_git))
- goto failed;
+ return -1;
if (!(bitmap_git->commits = read_bitmap_1(bitmap_git)) ||
!(bitmap_git->trees = read_bitmap_1(bitmap_git)) ||
!(bitmap_git->blobs = read_bitmap_1(bitmap_git)) ||
!(bitmap_git->tags = read_bitmap_1(bitmap_git)))
- goto failed;
+ return -1;
if (!bitmap_git->table_lookup && load_bitmap_entries_v1(bitmap_git) < 0)
- goto failed;
+ return -1;
if (bitmap_git->base) {
if (!bitmap_is_midx(bitmap_git))
BUG("non-MIDX bitmap has non-NULL base bitmap index");
if (load_bitmap(r, bitmap_git->base, 1) < 0)
- goto failed;
+ return -1;
}
if (!recursing)
load_all_type_bitmaps(bitmap_git);
return 0;
-
-failed:
- munmap(bitmap_git->map, bitmap_git->map_size);
- bitmap_git->map = NULL;
- bitmap_git->map_size = 0;
-
- kh_destroy_oid_map(bitmap_git->bitmaps);
- bitmap_git->bitmaps = NULL;
-
- kh_destroy_oid_pos(bitmap_git->ext_index.positions);
- bitmap_git->ext_index.positions = NULL;
-
- return -1;
}
static int open_pack_bitmap(struct repository *r,
@@ -882,6 +874,7 @@ static struct stored_bitmap *lazy_bitmap_for_commit(struct bitmap_index *bitmap_
int xor_flags;
khiter_t hash_pos;
struct bitmap_lookup_table_xor_item *xor_item;
+ size_t entry_map_pos;
if (is_corrupt)
return NULL;
@@ -941,6 +934,7 @@ static struct stored_bitmap *lazy_bitmap_for_commit(struct bitmap_index *bitmap_
goto corrupt;
}
+ entry_map_pos = bitmap_git->map_pos;
bitmap_git->map_pos += sizeof(uint32_t) + sizeof(uint8_t);
xor_flags = read_u8(bitmap_git->map, &bitmap_git->map_pos);
bitmap = read_bitmap_1(bitmap_git);
@@ -948,7 +942,8 @@ static struct stored_bitmap *lazy_bitmap_for_commit(struct bitmap_index *bitmap_
if (!bitmap)
goto corrupt;
- xor_bitmap = store_bitmap(bitmap_git, bitmap, &xor_item->oid, xor_bitmap, xor_flags);
+ xor_bitmap = store_bitmap(bitmap_git, bitmap, &xor_item->oid,
+ xor_bitmap, xor_flags, entry_map_pos);
xor_items_nr--;
}
@@ -982,6 +977,7 @@ static struct stored_bitmap *lazy_bitmap_for_commit(struct bitmap_index *bitmap_
* Instead, we can skip ahead and immediately read the flags and
* ewah bitmap.
*/
+ entry_map_pos = bitmap_git->map_pos;
bitmap_git->map_pos += sizeof(uint32_t) + sizeof(uint8_t);
flags = read_u8(bitmap_git->map, &bitmap_git->map_pos);
bitmap = read_bitmap_1(bitmap_git);
@@ -989,7 +985,8 @@ static struct stored_bitmap *lazy_bitmap_for_commit(struct bitmap_index *bitmap_
if (!bitmap)
goto corrupt;
- return store_bitmap(bitmap_git, bitmap, oid, xor_bitmap, flags);
+ return store_bitmap(bitmap_git, bitmap, oid, xor_bitmap, flags,
+ entry_map_pos);
corrupt:
free(xor_items);
@@ -1363,8 +1360,8 @@ static struct bitmap *find_boundary_objects(struct bitmap_index *bitmap_git,
bitmap_set(roots_bitmap, pos);
}
- if (!cascade_pseudo_merges_1(bitmap_git, cb.base, roots_bitmap))
- bitmap_free(roots_bitmap);
+ cascade_pseudo_merges_1(bitmap_git, cb.base, roots_bitmap);
+ bitmap_free(roots_bitmap);
}
/*
@@ -1868,8 +1865,8 @@ static unsigned long get_size_by_pos(struct bitmap_index *bitmap_git,
size_t eindex_pos = pos - bitmap_num_objects_total(bitmap_git);
struct eindex *eindex = &bitmap_git->ext_index;
struct object *obj = eindex->objects[eindex_pos];
- if (oid_object_info_extended(bitmap_repo(bitmap_git), &obj->oid,
- &oi, 0) < 0)
+ if (odb_read_object_info_extended(bitmap_repo(bitmap_git)->objects, &obj->oid,
+ &oi, 0) < 0)
die(_("unable to get size of %s"), oid_to_hex(&obj->oid));
}
@@ -2852,8 +2849,9 @@ int test_bitmap_commits(struct repository *r)
die(_("failed to load bitmap indexes"));
/*
- * As this function is only used to print bitmap selected
- * commits, we don't have to read the commit table.
+ * Since this function needs to print the bitmapped
+ * commits, bypass the commit lookup table (if one exists)
+ * by forcing the bitmap to eagerly load its entries.
*/
if (bitmap_git->table_lookup) {
if (load_bitmap_entries_v1(bitmap_git) < 0)
@@ -2869,6 +2867,48 @@ int test_bitmap_commits(struct repository *r)
return 0;
}
+int test_bitmap_commits_with_offset(struct repository *r)
+{
+ struct object_id oid;
+ struct stored_bitmap *stored;
+ struct bitmap_index *bitmap_git;
+ size_t commit_idx_pos_map_pos, xor_offset_map_pos, flag_map_pos,
+ ewah_bitmap_map_pos;
+
+ bitmap_git = prepare_bitmap_git(r);
+ if (!bitmap_git)
+ die(_("failed to load bitmap indexes"));
+
+ /*
+ * Since this function needs to know the position of each individual
+ * bitmap, bypass the commit lookup table (if one exists) by forcing
+ * the bitmap to eagerly load its entries.
+ */
+ if (bitmap_git->table_lookup) {
+ if (load_bitmap_entries_v1(bitmap_git) < 0)
+ die(_("failed to load bitmap indexes"));
+ }
+
+ kh_foreach (bitmap_git->bitmaps, oid, stored, {
+ commit_idx_pos_map_pos = stored->map_pos;
+ xor_offset_map_pos = stored->map_pos + sizeof(uint32_t);
+ flag_map_pos = xor_offset_map_pos + sizeof(uint8_t);
+ ewah_bitmap_map_pos = flag_map_pos + sizeof(uint8_t);
+
+ printf_ln("%s %"PRIuMAX" %"PRIuMAX" %"PRIuMAX" %"PRIuMAX,
+ oid_to_hex(&oid),
+ (uintmax_t)commit_idx_pos_map_pos,
+ (uintmax_t)xor_offset_map_pos,
+ (uintmax_t)flag_map_pos,
+ (uintmax_t)ewah_bitmap_map_pos);
+ })
+ ;
+
+ free_bitmap_index(bitmap_git);
+
+ return 0;
+}
+
int test_bitmap_hashes(struct repository *r)
{
struct bitmap_index *bitmap_git = prepare_bitmap_git(r);
@@ -3220,8 +3260,8 @@ static off_t get_disk_usage_for_extended(struct bitmap_index *bitmap_git)
i)))
continue;
- if (oid_object_info_extended(bitmap_repo(bitmap_git), &obj->oid,
- &oi, 0) < 0)
+ if (odb_read_object_info_extended(bitmap_repo(bitmap_git)->objects,
+ &obj->oid, &oi, 0) < 0)
die(_("unable to get disk usage of '%s'"),
oid_to_hex(&obj->oid));
diff --git a/pack-bitmap.h b/pack-bitmap.h
index 382d39499a..1bd7a791e2 100644
--- a/pack-bitmap.h
+++ b/pack-bitmap.h
@@ -81,6 +81,7 @@ void traverse_bitmap_commit_list(struct bitmap_index *,
show_reachable_fn show_reachable);
void test_bitmap_walk(struct rev_info *revs);
int test_bitmap_commits(struct repository *r);
+int test_bitmap_commits_with_offset(struct repository *r);
int test_bitmap_hashes(struct repository *r);
int test_bitmap_pseudo_merges(struct repository *r);
int test_bitmap_pseudo_merge_commits(struct repository *r, uint32_t n);
diff --git a/pack-check.c b/pack-check.c
index 874897d6cb..67cb2cf72f 100644
--- a/pack-check.c
+++ b/pack-check.c
@@ -8,7 +8,7 @@
#include "progress.h"
#include "packfile.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
struct idx_entry {
off_t offset;
diff --git a/pack-mtimes.c b/pack-mtimes.c
index 20900ca88d..8e1f2dec0e 100644
--- a/pack-mtimes.c
+++ b/pack-mtimes.c
@@ -1,7 +1,7 @@
#include "git-compat-util.h"
#include "gettext.h"
#include "pack-mtimes.h"
-#include "object-store.h"
+#include "odb.h"
#include "packfile.h"
#include "strbuf.h"
diff --git a/pack-objects.h b/pack-objects.h
index 475a2d67ce..83299d4732 100644
--- a/pack-objects.h
+++ b/pack-objects.h
@@ -1,7 +1,7 @@
#ifndef PACK_OBJECTS_H
#define PACK_OBJECTS_H
-#include "object-store.h"
+#include "odb.h"
#include "thread-utils.h"
#include "pack.h"
#include "packfile.h"
@@ -120,11 +120,23 @@ struct object_entry {
unsigned ext_base:1; /* delta_idx points outside packlist */
};
+/**
+ * A packing region is a section of the packing_data.objects array
+ * as given by a starting index and a number of elements.
+ */
+struct packing_region {
+ size_t start;
+ size_t nr;
+};
+
struct packing_data {
struct repository *repo;
struct object_entry *objects;
uint32_t nr_objects, nr_alloc;
+ struct packing_region *regions;
+ size_t nr_regions, nr_regions_alloc;
+
int32_t *index;
uint32_t index_size;
diff --git a/pack-revindex.c b/pack-revindex.c
index ffcde48870..0cc422a1e6 100644
--- a/pack-revindex.c
+++ b/pack-revindex.c
@@ -1,7 +1,7 @@
#include "git-compat-util.h"
#include "gettext.h"
#include "pack-revindex.h"
-#include "object-store.h"
+#include "odb.h"
#include "packfile.h"
#include "strbuf.h"
#include "trace2.h"
diff --git a/pack-write.c b/pack-write.c
index 6b06315f80..eccdc798e3 100644
--- a/pack-write.c
+++ b/pack-write.c
@@ -84,7 +84,8 @@ const char *write_idx_file(struct repository *repo,
} else {
if (!index_name) {
struct strbuf tmp_file = STRBUF_INIT;
- fd = odb_mkstemp(&tmp_file, "pack/tmp_idx_XXXXXX");
+ fd = odb_mkstemp(repo->objects, &tmp_file,
+ "pack/tmp_idx_XXXXXX");
index_name = strbuf_detach(&tmp_file, NULL);
} else {
unlink(index_name);
@@ -259,7 +260,8 @@ char *write_rev_file_order(struct repository *repo,
if (flags & WRITE_REV) {
if (!rev_name) {
struct strbuf tmp_file = STRBUF_INIT;
- fd = odb_mkstemp(&tmp_file, "pack/tmp_rev_XXXXXX");
+ fd = odb_mkstemp(repo->objects, &tmp_file,
+ "pack/tmp_rev_XXXXXX");
path = strbuf_detach(&tmp_file, NULL);
} else {
unlink(rev_name);
@@ -342,7 +344,7 @@ static char *write_mtimes_file(struct repository *repo,
if (!to_pack)
BUG("cannot call write_mtimes_file with NULL packing_data");
- fd = odb_mkstemp(&tmp_file, "pack/tmp_mtimes_XXXXXX");
+ fd = odb_mkstemp(repo->objects, &tmp_file, "pack/tmp_mtimes_XXXXXX");
mtimes_name = strbuf_detach(&tmp_file, NULL);
f = hashfd(repo->hash_algo, fd, mtimes_name);
@@ -531,7 +533,7 @@ struct hashfile *create_tmp_packfile(struct repository *repo,
struct strbuf tmpname = STRBUF_INIT;
int fd;
- fd = odb_mkstemp(&tmpname, "pack/tmp_pack_XXXXXX");
+ fd = odb_mkstemp(repo->objects, &tmpname, "pack/tmp_pack_XXXXXX");
*pack_tmp_name = strbuf_detach(&tmpname, NULL);
return hashfd(repo->hash_algo, fd, *pack_tmp_name);
}
diff --git a/packfile.c b/packfile.c
index 70c7208f02..af9ccfdba6 100644
--- a/packfile.c
+++ b/packfile.c
@@ -19,7 +19,7 @@
#include "tree-walk.h"
#include "tree.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "midx.h"
#include "commit-graph.h"
#include "pack-revindex.h"
@@ -359,7 +359,7 @@ void close_pack(struct packed_git *p)
oidset_clear(&p->bad_objects);
}
-void close_object_store(struct raw_object_store *o)
+void close_object_store(struct object_database *o)
{
struct packed_git *p;
@@ -1029,16 +1029,16 @@ static void prepare_packed_git_mru(struct repository *r)
static void prepare_packed_git(struct repository *r)
{
- struct object_directory *odb;
+ struct odb_source *source;
if (r->objects->packed_git_initialized)
return;
- prepare_alt_odb(r);
- for (odb = r->objects->odb; odb; odb = odb->next) {
- int local = (odb == r->objects->odb);
- prepare_multi_pack_index_one(r, odb->path, local);
- prepare_packed_git_one(r, odb->path, local);
+ odb_prepare_alternates(r->objects);
+ for (source = r->objects->sources; source; source = source->next) {
+ int local = (source == r->objects->sources);
+ prepare_multi_pack_index_one(r, source->path, local);
+ prepare_packed_git_one(r, source->path, local);
}
rearrange_packed_git(r);
@@ -1048,7 +1048,7 @@ static void prepare_packed_git(struct repository *r)
void reprepare_packed_git(struct repository *r)
{
- struct object_directory *odb;
+ struct odb_source *source;
obj_read_lock();
@@ -1059,10 +1059,10 @@ void reprepare_packed_git(struct repository *r)
* the lifetime of the process.
*/
r->objects->loaded_alternates = 0;
- prepare_alt_odb(r);
+ odb_prepare_alternates(r->objects);
- for (odb = r->objects->odb; odb; odb = odb->next)
- odb_clear_loose_cache(odb);
+ for (source = r->objects->sources; source; source = source->next)
+ odb_clear_loose_cache(source);
r->objects->approximate_object_count_valid = 0;
r->objects->packed_git_initialized = 0;
@@ -1321,7 +1321,7 @@ static int retry_bad_packed_offset(struct repository *r,
return OBJ_BAD;
nth_packed_object_id(&oid, p, pack_pos_to_index(p, pos));
mark_bad_packed_object(p, &oid);
- type = oid_object_info(r, &oid, NULL);
+ type = odb_read_object_info(r->objects, &oid, NULL);
if (type <= OBJ_NONE)
return OBJ_BAD;
return type;
@@ -1849,7 +1849,8 @@ void *unpack_entry(struct repository *r, struct packed_git *p, off_t obj_offset,
oi.typep = &type;
oi.sizep = &base_size;
oi.contentp = &base;
- if (oid_object_info_extended(r, &base_oid, &oi, 0) < 0)
+ if (odb_read_object_info_extended(r->objects, &base_oid,
+ &oi, 0) < 0)
base = NULL;
external_base = base;
diff --git a/packfile.h b/packfile.h
index 3a3c77cf05..53c3b7d3b4 100644
--- a/packfile.h
+++ b/packfile.h
@@ -3,10 +3,10 @@
#include "list.h"
#include "object.h"
-#include "object-store.h"
+#include "odb.h"
#include "oidset.h"
-/* in object-store.h */
+/* in odb.h */
struct object_info;
struct packed_git {
@@ -183,12 +183,12 @@ int close_pack_fd(struct packed_git *p);
uint32_t get_pack_fanout(struct packed_git *p, uint32_t value);
-struct raw_object_store;
+struct object_database;
unsigned char *use_pack(struct packed_git *, struct pack_window **, off_t, unsigned long *);
void close_pack_windows(struct packed_git *);
void close_pack(struct packed_git *);
-void close_object_store(struct raw_object_store *o);
+void close_object_store(struct object_database *o);
void unuse_pack(struct pack_window **);
void clear_delta_base_cache(void);
struct packed_git *add_packed_git(struct repository *r, const char *path,
diff --git a/parse-options.c b/parse-options.c
index a9a39ecaef..5224203ffe 100644
--- a/parse-options.c
+++ b/parse-options.c
@@ -68,6 +68,64 @@ static char *fix_filename(const char *prefix, const char *file)
return prefix_filename_except_for_dash(prefix, file);
}
+static int do_get_int_value(const void *value, size_t precision, intmax_t *ret)
+{
+ switch (precision) {
+ case sizeof(int8_t):
+ *ret = *(int8_t *)value;
+ return 0;
+ case sizeof(int16_t):
+ *ret = *(int16_t *)value;
+ return 0;
+ case sizeof(int32_t):
+ *ret = *(int32_t *)value;
+ return 0;
+ case sizeof(int64_t):
+ *ret = *(int64_t *)value;
+ return 0;
+ default:
+ return -1;
+ }
+}
+
+static intmax_t get_int_value(const struct option *opt, enum opt_parsed flags)
+{
+ intmax_t ret;
+ if (do_get_int_value(opt->value, opt->precision, &ret))
+ BUG("invalid precision for option %s", optname(opt, flags));
+ return ret;
+}
+
+static enum parse_opt_result set_int_value(const struct option *opt,
+ enum opt_parsed flags,
+ intmax_t value)
+{
+ switch (opt->precision) {
+ case sizeof(int8_t):
+ *(int8_t *)opt->value = value;
+ return 0;
+ case sizeof(int16_t):
+ *(int16_t *)opt->value = value;
+ return 0;
+ case sizeof(int32_t):
+ *(int32_t *)opt->value = value;
+ return 0;
+ case sizeof(int64_t):
+ *(int64_t *)opt->value = value;
+ return 0;
+ default:
+ BUG("invalid precision for option %s", optname(opt, flags));
+ }
+}
+
+static int signed_int_fits(intmax_t value, size_t precision)
+{
+ size_t bits = precision * CHAR_BIT;
+ intmax_t upper_bound = INTMAX_MAX >> (bitsizeof(intmax_t) - bits);
+ intmax_t lower_bound = -upper_bound - 1;
+ return lower_bound <= value && value <= upper_bound;
+}
+
static enum parse_opt_result do_get_value(struct parse_opt_ctx_t *p,
const struct option *opt,
enum opt_parsed flags,
@@ -89,35 +147,55 @@ static enum parse_opt_result do_get_value(struct parse_opt_ctx_t *p,
return opt->ll_callback(p, opt, NULL, unset);
case OPTION_BIT:
+ {
+ intmax_t value = get_int_value(opt, flags);
if (unset)
- *(int *)opt->value &= ~opt->defval;
+ value &= ~opt->defval;
else
- *(int *)opt->value |= opt->defval;
- return 0;
+ value |= opt->defval;
+ return set_int_value(opt, flags, value);
+ }
case OPTION_NEGBIT:
+ {
+ intmax_t value = get_int_value(opt, flags);
if (unset)
- *(int *)opt->value |= opt->defval;
+ value |= opt->defval;
else
- *(int *)opt->value &= ~opt->defval;
- return 0;
+ value &= ~opt->defval;
+ return set_int_value(opt, flags, value);
+ }
case OPTION_BITOP:
+ {
+ intmax_t value = get_int_value(opt, flags);
if (unset)
BUG("BITOP can't have unset form");
- *(int *)opt->value &= ~opt->extra;
- *(int *)opt->value |= opt->defval;
- return 0;
+ value &= ~opt->extra;
+ value |= opt->defval;
+ return set_int_value(opt, flags, value);
+ }
case OPTION_COUNTUP:
- if (*(int *)opt->value < 0)
- *(int *)opt->value = 0;
- *(int *)opt->value = unset ? 0 : *(int *)opt->value + 1;
- return 0;
+ {
+ size_t bits = CHAR_BIT * opt->precision;
+ intmax_t upper_bound = INTMAX_MAX >> (bitsizeof(intmax_t) - bits);
+ intmax_t value = get_int_value(opt, flags);
+
+ if (value < 0)
+ value = 0;
+ if (unset)
+ value = 0;
+ else if (value < upper_bound)
+ value++;
+ else
+ return error(_("value for %s exceeds %"PRIdMAX),
+ optname(opt, flags), upper_bound);
+ return set_int_value(opt, flags, value);
+ }
case OPTION_SET_INT:
- *(int *)opt->value = unset ? 0 : opt->defval;
- return 0;
+ return set_int_value(opt, flags, unset ? 0 : opt->defval);
case OPTION_STRING:
if (unset)
@@ -199,23 +277,7 @@ static enum parse_opt_result do_get_value(struct parse_opt_ctx_t *p,
return error(_("value %s for %s not in range [%"PRIdMAX",%"PRIdMAX"]"),
arg, optname(opt, flags), (intmax_t)lower_bound, (intmax_t)upper_bound);
- switch (opt->precision) {
- case 1:
- *(int8_t *)opt->value = value;
- return 0;
- case 2:
- *(int16_t *)opt->value = value;
- return 0;
- case 4:
- *(int32_t *)opt->value = value;
- return 0;
- case 8:
- *(int64_t *)opt->value = value;
- return 0;
- default:
- BUG("invalid precision for option %s",
- optname(opt, flags));
- }
+ return set_int_value(opt, flags, value);
}
case OPTION_UNSIGNED:
{
@@ -266,7 +328,9 @@ static enum parse_opt_result do_get_value(struct parse_opt_ctx_t *p,
}
struct parse_opt_cmdmode_list {
- int value, *value_ptr;
+ intmax_t value;
+ void *value_ptr;
+ size_t precision;
const struct option *opt;
const char *arg;
enum opt_parsed flags;
@@ -280,7 +344,7 @@ static void build_cmdmode_list(struct parse_opt_ctx_t *ctx,
for (; opts->type != OPTION_END; opts++) {
struct parse_opt_cmdmode_list *elem = ctx->cmdmode_list;
- int *value_ptr = opts->value;
+ void *value_ptr = opts->value;
if (!(opts->flags & PARSE_OPT_CMDMODE) || !value_ptr)
continue;
@@ -292,10 +356,13 @@ static void build_cmdmode_list(struct parse_opt_ctx_t *ctx,
CALLOC_ARRAY(elem, 1);
elem->value_ptr = value_ptr;
- elem->value = *value_ptr;
+ elem->precision = opts->precision;
+ if (do_get_int_value(value_ptr, opts->precision, &elem->value))
+ optbug(opts, "has invalid precision");
elem->next = ctx->cmdmode_list;
ctx->cmdmode_list = elem;
}
+ BUG_if_bug("invalid 'struct option'");
}
static char *optnamearg(const struct option *opt, const char *arg,
@@ -317,7 +384,13 @@ static enum parse_opt_result get_value(struct parse_opt_ctx_t *p,
char *opt_name, *other_opt_name;
for (; elem; elem = elem->next) {
- if (*elem->value_ptr == elem->value)
+ intmax_t new_value;
+
+ if (do_get_int_value(elem->value_ptr, elem->precision,
+ &new_value))
+ BUG("impossible: invalid precision");
+
+ if (new_value == elem->value)
continue;
if (elem->opt &&
@@ -327,7 +400,7 @@ static enum parse_opt_result get_value(struct parse_opt_ctx_t *p,
elem->opt = opt;
elem->arg = arg;
elem->flags = flags;
- elem->value = *elem->value_ptr;
+ elem->value = new_value;
}
if (result || !elem)
@@ -586,10 +659,14 @@ static void parse_options_check(const struct option *opts)
opts->long_name && !(opts->flags & PARSE_OPT_NONEG))
optbug(opts, "OPTION_SET_INT 0 should not be negatable");
switch (opts->type) {
- case OPTION_COUNTUP:
+ case OPTION_SET_INT:
case OPTION_BIT:
case OPTION_NEGBIT:
- case OPTION_SET_INT:
+ case OPTION_BITOP:
+ case OPTION_COUNTUP:
+ if (!signed_int_fits(opts->defval, opts->precision))
+ optbug(opts, "has invalid defval");
+ /* fallthru */
case OPTION_NUMBER:
if ((opts->flags & PARSE_OPT_OPTARG) ||
!(opts->flags & PARSE_OPT_NOARG))
diff --git a/parse-options.h b/parse-options.h
index 91c3e3c29b..312045604d 100644
--- a/parse-options.h
+++ b/parse-options.h
@@ -172,6 +172,7 @@ struct option {
.short_name = (s), \
.long_name = (l), \
.value = (v), \
+ .precision = sizeof(*v), \
.help = (h), \
.flags = PARSE_OPT_NOARG|(f), \
.callback = NULL, \
@@ -182,6 +183,7 @@ struct option {
.short_name = (s), \
.long_name = (l), \
.value = (v), \
+ .precision = sizeof(*v), \
.help = (h), \
.flags = PARSE_OPT_NOARG|(f), \
}
@@ -190,6 +192,7 @@ struct option {
.short_name = (s), \
.long_name = (l), \
.value = (v), \
+ .precision = sizeof(*v), \
.help = (h), \
.flags = PARSE_OPT_NOARG | (f), \
.defval = (i), \
@@ -238,6 +241,7 @@ struct option {
.short_name = (s), \
.long_name = (l), \
.value = (v), \
+ .precision = sizeof(*v), \
.help = (h), \
.flags = PARSE_OPT_NOARG|PARSE_OPT_NONEG, \
.defval = (set), \
@@ -248,6 +252,7 @@ struct option {
.short_name = (s), \
.long_name = (l), \
.value = (v), \
+ .precision = sizeof(*v), \
.help = (h), \
.flags = PARSE_OPT_NOARG, \
.defval = (b), \
@@ -260,6 +265,7 @@ struct option {
.short_name = (s), \
.long_name = (l), \
.value = (v), \
+ .precision = sizeof(*v), \
.help = (h), \
.flags = PARSE_OPT_NOARG | PARSE_OPT_HIDDEN, \
.defval = 1, \
@@ -269,6 +275,7 @@ struct option {
.short_name = (s), \
.long_name = (l), \
.value = (v), \
+ .precision = sizeof(*v), \
.help = (h), \
.flags = PARSE_OPT_CMDMODE|PARSE_OPT_NOARG|PARSE_OPT_NONEG | (f), \
.defval = (i), \
diff --git a/path-walk.c b/path-walk.c
index 341bdd2ba4..2d4ddbadd5 100644
--- a/path-walk.c
+++ b/path-walk.c
@@ -503,7 +503,11 @@ int walk_objects_by_path(struct path_walk_info *info)
if (prepare_revision_walk(info->revs))
die(_("failed to setup revision walk"));
- /* Walk trees to mark them as UNINTERESTING. */
+ /*
+ * Walk trees to mark them as UNINTERESTING.
+ * This is particularly important when 'edge_aggressive' is set.
+ */
+ info->revs->edge_hint_aggressive = info->edge_aggressive;
edge_repo = info->revs->repo;
edge_tree_list = root_tree_list;
mark_edges_uninteresting(info->revs, show_edge,
diff --git a/path-walk.h b/path-walk.h
index 473ee9d361..5ef5a8440e 100644
--- a/path-walk.h
+++ b/path-walk.h
@@ -51,6 +51,13 @@ struct path_walk_info {
int prune_all_uninteresting;
/**
+ * When 'edge_aggressive' is set, then the revision walk will use
+ * the '--object-edge-aggressive' option to mark even more objects
+ * as uninteresting.
+ */
+ int edge_aggressive;
+
+ /**
* Specify a sparse-checkout definition to match our paths to. Do not
* walk outside of this sparse definition. If the patterns are in
* cone mode, then the search may prune directories that are outside
diff --git a/path.c b/path.c
index 3b598b2847..7f56eaf993 100644
--- a/path.c
+++ b/path.c
@@ -15,7 +15,7 @@
#include "submodule-config.h"
#include "path.h"
#include "packfile.h"
-#include "object-store.h"
+#include "odb.h"
#include "lockfile.h"
#include "exec-cmd.h"
@@ -397,7 +397,7 @@ static void adjust_git_path(struct repository *repo,
strbuf_splice(buf, 0, buf->len,
repo->index_file, strlen(repo->index_file));
else if (dir_prefix(base, "objects"))
- replace_dir(buf, git_dir_len + 7, repo->objects->odb->path);
+ replace_dir(buf, git_dir_len + 7, repo->objects->sources->path);
else if (repo_settings_get_hooks_path(repo) && dir_prefix(base, "hooks"))
replace_dir(buf, git_dir_len + 5, repo_settings_get_hooks_path(repo));
else if (repo->different_commondir)
diff --git a/pathspec.c b/pathspec.c
index 2b4e434bc0..a3ddd701c7 100644
--- a/pathspec.c
+++ b/pathspec.c
@@ -492,7 +492,7 @@ static void init_pathspec_item(struct pathspec_item *item, unsigned flags,
if (!match) {
const char *hint_path;
- if (!have_git_dir())
+ if ((flags & PATHSPEC_NO_REPOSITORY) || !have_git_dir())
die(_("'%s' is outside the directory tree"),
copyfrom);
hint_path = repo_get_work_tree(the_repository);
@@ -614,6 +614,10 @@ void parse_pathspec(struct pathspec *pathspec,
(flags & PATHSPEC_PREFER_FULL))
BUG("PATHSPEC_PREFER_CWD and PATHSPEC_PREFER_FULL are incompatible");
+ if ((flags & PATHSPEC_NO_REPOSITORY) &&
+ (~magic_mask & (PATHSPEC_ATTR | PATHSPEC_FROMTOP)))
+ BUG("PATHSPEC_NO_REPOSITORY is incompatible with PATHSPEC_ATTR and PATHSPEC_FROMTOP");
+
/* No arguments with prefix -> prefix pathspec */
if (!entry) {
if (flags & PATHSPEC_PREFER_FULL)
diff --git a/pathspec.h b/pathspec.h
index de537cff3c..5e3a6f1fe7 100644
--- a/pathspec.h
+++ b/pathspec.h
@@ -76,6 +76,11 @@ struct pathspec {
* allowed, then it will automatically set for every pathspec.
*/
#define PATHSPEC_LITERAL_PATH (1<<6)
+/*
+ * For git diff --no-index, indicate that we are operating without
+ * a repository or index.
+ */
+#define PATHSPEC_NO_REPOSITORY (1<<7)
/**
* Given command line arguments and a prefix, convert the input to
@@ -184,6 +189,12 @@ int match_pathspec(struct index_state *istate,
const char *name, int namelen,
int prefix, char *seen, int is_dir);
+/* Set both DO_MATCH_DIRECTORY and DO_MATCH_LEADING_PATHSPEC if is_dir true */
+int match_leading_pathspec(struct index_state *istate,
+ const struct pathspec *ps,
+ const char *name, int namelen,
+ int prefix, char *seen, int is_dir);
+
/*
* Determine whether a pathspec will match only entire index entries (non-sparse
* files and/or entire sparse directories). If the pathspec has the potential to
diff --git a/perl/Git.pm b/perl/Git.pm
index 6f47d653ab..090cf77dab 100644
--- a/perl/Git.pm
+++ b/perl/Git.pm
@@ -1061,6 +1061,19 @@ sub _close_cat_blob {
delete @$self{@vars};
}
+# Given PORT, a port number or service name, return its numerical
+# value else undef.
+sub port_num {
+ my ($port) = @_;
+
+ # Port can be either a positive integer within the 16-bit range...
+ if ($port =~ /^\d+$/ && $port > 0 && $port <= (2**16 - 1)) {
+ return $port;
+ }
+
+ # ... or a symbolic port (service name).
+ return scalar getservbyname($port, '');
+}
=item credential_read( FILEHANDLE )
diff --git a/pkt-line.c b/pkt-line.c
index a5bcbc96fb..fc583feb26 100644
--- a/pkt-line.c
+++ b/pkt-line.c
@@ -617,7 +617,7 @@ void packet_reader_init(struct packet_reader *reader, int fd,
reader->buffer_size = sizeof(packet_buffer);
reader->options = options;
reader->me = "git";
- reader->hash_algo = &hash_algos[GIT_HASH_SHA1];
+ reader->hash_algo = &hash_algos[GIT_HASH_SHA1_LEGACY];
strbuf_init(&reader->scratch, 0);
}
diff --git a/po/meson.build b/po/meson.build
index d7154b6395..de3b4e2c38 100644
--- a/po/meson.build
+++ b/po/meson.build
@@ -8,6 +8,7 @@ translations = i18n.gettext('git',
'el',
'es',
'fr',
+ 'ga',
'id',
'is',
'it',
diff --git a/preload-index.c b/preload-index.c
index 40ab2abafb..b222821b44 100644
--- a/preload-index.c
+++ b/preload-index.c
@@ -2,7 +2,6 @@
* Copyright (C) 2008 Linus Torvalds
*/
-#define USE_THE_REPOSITORY_VARIABLE
#define DISABLE_SIGN_COMPARE_WARNINGS
#include "git-compat-util.h"
@@ -19,6 +18,7 @@
#include "repository.h"
#include "symlinks.h"
#include "trace2.h"
+#include "config.h"
/*
* Mostly randomly chosen maximum thread counts: we
@@ -111,6 +111,9 @@ void preload_index(struct index_state *index,
struct thread_data data[MAX_PARALLEL];
struct progress_data pd;
int t2_sum_lstat = 0;
+ int core_preload_index = 1;
+
+ repo_config_get_bool(index->repo, "core.preloadindex", &core_preload_index);
if (!HAVE_THREADS || !core_preload_index)
return;
@@ -132,7 +135,7 @@ void preload_index(struct index_state *index,
memset(&pd, 0, sizeof(pd));
if (refresh_flags & REFRESH_PROGRESS && isatty(2)) {
- pd.progress = start_delayed_progress(the_repository,
+ pd.progress = start_delayed_progress(index->repo,
_("Refreshing index"),
index->cache_nr);
pthread_mutex_init(&pd.mutex, NULL);
diff --git a/prio-queue.c b/prio-queue.c
index ec33ac27db..9748528ce6 100644
--- a/prio-queue.c
+++ b/prio-queue.c
@@ -58,22 +58,10 @@ void prio_queue_put(struct prio_queue *queue, void *thing)
}
}
-void *prio_queue_get(struct prio_queue *queue)
+static void sift_down_root(struct prio_queue *queue)
{
- void *result;
size_t ix, child;
- if (!queue->nr)
- return NULL;
- if (!queue->compare)
- return queue->array[--queue->nr].data; /* LIFO */
-
- result = queue->array[0].data;
- if (!--queue->nr)
- return result;
-
- queue->array[0] = queue->array[queue->nr];
-
/* Push down the one at the root */
for (ix = 0; ix * 2 + 1 < queue->nr; ix = child) {
child = ix * 2 + 1; /* left */
@@ -86,6 +74,23 @@ void *prio_queue_get(struct prio_queue *queue)
swap(queue, child, ix);
}
+}
+
+void *prio_queue_get(struct prio_queue *queue)
+{
+ void *result;
+
+ if (!queue->nr)
+ return NULL;
+ if (!queue->compare)
+ return queue->array[--queue->nr].data; /* LIFO */
+
+ result = queue->array[0].data;
+ if (!--queue->nr)
+ return result;
+
+ queue->array[0] = queue->array[queue->nr];
+ sift_down_root(queue);
return result;
}
@@ -97,3 +102,17 @@ void *prio_queue_peek(struct prio_queue *queue)
return queue->array[queue->nr - 1].data;
return queue->array[0].data;
}
+
+void prio_queue_replace(struct prio_queue *queue, void *thing)
+{
+ if (!queue->nr) {
+ prio_queue_put(queue, thing);
+ } else if (!queue->compare) {
+ queue->array[queue->nr - 1].ctr = queue->insertion_ctr++;
+ queue->array[queue->nr - 1].data = thing;
+ } else {
+ queue->array[0].ctr = queue->insertion_ctr++;
+ queue->array[0].data = thing;
+ sift_down_root(queue);
+ }
+}
diff --git a/prio-queue.h b/prio-queue.h
index 38d032636d..da7fad2f1f 100644
--- a/prio-queue.h
+++ b/prio-queue.h
@@ -52,6 +52,14 @@ void *prio_queue_get(struct prio_queue *);
*/
void *prio_queue_peek(struct prio_queue *);
+/*
+ * Replace the "thing" that compares the smallest with a new "thing",
+ * like prio_queue_get()+prio_queue_put() would do, but in a more
+ * efficient way. Does the same as prio_queue_put() if the queue is
+ * empty.
+ */
+void prio_queue_replace(struct prio_queue *queue, void *thing);
+
void clear_prio_queue(struct prio_queue *);
/* Reverse the LIFO elements */
diff --git a/promisor-remote.c b/promisor-remote.c
index 9d058586df..be6f82d12f 100644
--- a/promisor-remote.c
+++ b/promisor-remote.c
@@ -3,7 +3,7 @@
#include "git-compat-util.h"
#include "gettext.h"
#include "hex.h"
-#include "object-store.h"
+#include "odb.h"
#include "promisor-remote.h"
#include "config.h"
#include "trace2.h"
@@ -245,8 +245,8 @@ static int remove_fetched_oids(struct repository *repo,
struct object_id *new_oids;
for (i = 0; i < oid_nr; i++)
- if (oid_object_info_extended(repo, &old_oids[i], NULL,
- OBJECT_INFO_SKIP_FETCH_OBJECT)) {
+ if (odb_read_object_info_extended(repo->objects, &old_oids[i], NULL,
+ OBJECT_INFO_SKIP_FETCH_OBJECT)) {
remaining[i] = 1;
remaining_nr++;
}
diff --git a/protocol-caps.c b/protocol-caps.c
index 9b8db37a21..ecdd0dc58d 100644
--- a/protocol-caps.c
+++ b/protocol-caps.c
@@ -6,7 +6,7 @@
#include "hash.h"
#include "hex.h"
#include "object.h"
-#include "object-store.h"
+#include "odb.h"
#include "repository.h"
#include "string-list.h"
#include "strbuf.h"
@@ -64,7 +64,7 @@ static void send_info(struct repository *r, struct packet_writer *writer,
strbuf_addstr(&send_buffer, oid_str);
if (info->size) {
- if (oid_object_info(r, &oid, &object_size) < 0) {
+ if (odb_read_object_info(r->objects, &oid, &object_size) < 0) {
strbuf_addstr(&send_buffer, " ");
} else {
strbuf_addf(&send_buffer, " %lu", object_size);
diff --git a/reachable.c b/reachable.c
index 9dc748f0b9..e984b68a0c 100644
--- a/reachable.c
+++ b/reachable.c
@@ -211,7 +211,7 @@ static void add_recent_object(const struct object_id *oid,
* later processing, and the revision machinery expects
* commits and tags to have been parsed.
*/
- type = oid_object_info(the_repository, oid, NULL);
+ type = odb_read_object_info(the_repository->objects, oid, NULL);
if (type < 0)
die("unable to get object info for %s", oid_to_hex(oid));
diff --git a/read-cache.c b/read-cache.c
index c0bb760ad4..5cf41b81f1 100644
--- a/read-cache.c
+++ b/read-cache.c
@@ -20,7 +20,7 @@
#include "refs.h"
#include "dir.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "oid-array.h"
#include "tree.h"
#include "commit.h"
@@ -254,7 +254,7 @@ static int ce_compare_link(const struct cache_entry *ce, size_t expected_size)
if (strbuf_readlink(&sb, ce->name, expected_size))
return -1;
- buffer = repo_read_object_file(the_repository, &ce->oid, &type, &size);
+ buffer = odb_read_object(the_repository->objects, &ce->oid, &type, &size);
if (buffer) {
if (size == sb.len)
match = memcmp(buffer, sb.buf, size);
@@ -1456,7 +1456,8 @@ int repo_refresh_and_write_index(struct repository *repo,
struct lock_file lock_file = LOCK_INIT;
int fd, ret = 0;
- fd = repo_hold_locked_index(repo, &lock_file, 0);
+ fd = repo_hold_locked_index(repo, &lock_file,
+ gentle ? 0 : LOCK_REPORT_ON_ERROR);
if (!gentle && fd < 0)
return -1;
if (refresh_index(repo->index, refresh_flags, pathspec, seen, header_msg))
@@ -3485,8 +3486,8 @@ void *read_blob_data_from_index(struct index_state *istate,
}
if (pos < 0)
return NULL;
- data = repo_read_object_file(the_repository, &istate->cache[pos]->oid,
- &type, &sz);
+ data = odb_read_object(the_repository->objects, &istate->cache[pos]->oid,
+ &type, &sz);
if (!data || type != OBJ_BLOB) {
free(data);
return NULL;
@@ -3729,9 +3730,9 @@ void prefetch_cache_entries(const struct index_state *istate,
if (S_ISGITLINK(ce->ce_mode) || !must_prefetch(ce))
continue;
- if (!oid_object_info_extended(the_repository, &ce->oid,
- NULL,
- OBJECT_INFO_FOR_PREFETCH))
+ if (!odb_read_object_info_extended(the_repository->objects,
+ &ce->oid, NULL,
+ OBJECT_INFO_FOR_PREFETCH))
continue;
oid_array_append(&to_fetch, &ce->oid);
}
diff --git a/ref-filter.c b/ref-filter.c
index 7a274633cf..f9f2c512a8 100644
--- a/ref-filter.c
+++ b/ref-filter.c
@@ -12,7 +12,7 @@
#include "refs.h"
#include "wildmatch.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "oid-array.h"
#include "repo-settings.h"
#include "repository.h"
@@ -2302,8 +2302,8 @@ static int get_object(struct ref_array_item *ref, int deref, struct object **obj
oi->info.sizep = &oi->size;
oi->info.typep = &oi->type;
}
- if (oid_object_info_extended(the_repository, &oi->oid, &oi->info,
- OBJECT_INFO_LOOKUP_REPLACE))
+ if (odb_read_object_info_extended(the_repository->objects, &oi->oid, &oi->info,
+ OBJECT_INFO_LOOKUP_REPLACE))
return strbuf_addf_ret(err, -1, _("missing object %s for %s"),
oid_to_hex(&oi->oid), ref->refname);
if (oi->info.disk_sizep && oi->disk_size < 0)
diff --git a/reflog.c b/reflog.c
index 15d81ebea9..9a68524df8 100644
--- a/reflog.c
+++ b/reflog.c
@@ -5,7 +5,7 @@
#include "config.h"
#include "gettext.h"
#include "parse-options.h"
-#include "object-store.h"
+#include "odb.h"
#include "reflog.h"
#include "refs.h"
#include "revision.h"
@@ -81,6 +81,20 @@ int reflog_expire_config(const char *var, const char *value,
return 0;
}
+void reflog_clear_expire_config(struct reflog_expire_options *opts)
+{
+ struct reflog_expire_entry_option *ent = opts->entries, *tmp;
+
+ while (ent) {
+ tmp = ent;
+ ent = ent->next;
+ free(tmp);
+ }
+
+ opts->entries = NULL;
+ opts->entries_tail = NULL;
+}
+
void reflog_expire_options_set_refname(struct reflog_expire_options *cb,
const char *ref)
{
@@ -140,8 +154,8 @@ static int tree_is_complete(const struct object_id *oid)
if (!tree->buffer) {
enum object_type type;
unsigned long size;
- void *data = repo_read_object_file(the_repository, oid, &type,
- &size);
+ void *data = odb_read_object(the_repository->objects, oid,
+ &type, &size);
if (!data) {
tree->object.flags |= INCOMPLETE;
return 0;
@@ -152,7 +166,7 @@ static int tree_is_complete(const struct object_id *oid)
init_tree_desc(&desc, &tree->object.oid, tree->buffer, tree->size);
complete = 1;
while (tree_entry(&desc, &entry)) {
- if (!has_object(the_repository, &entry.oid,
+ if (!odb_has_object(the_repository->objects, &entry.oid,
HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR) ||
(S_ISDIR(entry.mode) && !tree_is_complete(&entry.oid))) {
tree->object.flags |= INCOMPLETE;
diff --git a/reflog.h b/reflog.h
index 63bb56280f..74b3f3c4f0 100644
--- a/reflog.h
+++ b/reflog.h
@@ -34,6 +34,8 @@ struct reflog_expire_options {
int reflog_expire_config(const char *var, const char *value,
const struct config_context *ctx, void *cb);
+void reflog_clear_expire_config(struct reflog_expire_options *opts);
+
/*
* Adapt the options so that they apply to the given refname. This applies any
* per-reference reflog expiry configuration that may exist to the options.
diff --git a/refs.c b/refs.c
index dce5c49ca2..73913b6627 100644
--- a/refs.c
+++ b/refs.c
@@ -19,7 +19,7 @@
#include "run-command.h"
#include "hook.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "object.h"
#include "path.h"
#include "submodule.h"
@@ -376,7 +376,8 @@ int ref_resolves_to_object(const char *refname,
{
if (flags & REF_ISBROKEN)
return 0;
- if (!has_object(repo, oid, HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR)) {
+ if (!odb_has_object(repo->objects, oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR)) {
error(_("%s does not point to a valid object!"), refname);
return 0;
}
@@ -438,9 +439,9 @@ static int for_each_filter_refs(const char *refname, const char *referent,
struct warn_if_dangling_data {
struct ref_store *refs;
FILE *fp;
- const char *refname;
const struct string_list *refnames;
- const char *msg_fmt;
+ const char *indent;
+ int dry_run;
};
static int warn_if_dangling_symref(const char *refname, const char *referent UNUSED,
@@ -448,44 +449,34 @@ static int warn_if_dangling_symref(const char *refname, const char *referent UNU
int flags, void *cb_data)
{
struct warn_if_dangling_data *d = cb_data;
- const char *resolves_to;
+ const char *resolves_to, *msg;
if (!(flags & REF_ISSYMREF))
return 0;
resolves_to = refs_resolve_ref_unsafe(d->refs, refname, 0, NULL, NULL);
if (!resolves_to
- || (d->refname
- ? strcmp(resolves_to, d->refname)
- : !string_list_has_string(d->refnames, resolves_to))) {
+ || !string_list_has_string(d->refnames, resolves_to)) {
return 0;
}
- fprintf(d->fp, d->msg_fmt, refname);
- fputc('\n', d->fp);
+ msg = d->dry_run
+ ? _("%s%s will become dangling after %s is deleted\n")
+ : _("%s%s has become dangling after %s was deleted\n");
+ fprintf(d->fp, msg, d->indent, refname, resolves_to);
return 0;
}
-void refs_warn_dangling_symref(struct ref_store *refs, FILE *fp,
- const char *msg_fmt, const char *refname)
-{
- struct warn_if_dangling_data data = {
- .refs = refs,
- .fp = fp,
- .refname = refname,
- .msg_fmt = msg_fmt,
- };
- refs_for_each_rawref(refs, warn_if_dangling_symref, &data);
-}
-
void refs_warn_dangling_symrefs(struct ref_store *refs, FILE *fp,
- const char *msg_fmt, const struct string_list *refnames)
+ const char *indent, int dry_run,
+ const struct string_list *refnames)
{
struct warn_if_dangling_data data = {
.refs = refs,
.fp = fp,
.refnames = refnames,
- .msg_fmt = msg_fmt,
+ .indent = indent,
+ .dry_run = dry_run,
};
refs_for_each_rawref(refs, warn_if_dangling_symref, &data);
}
@@ -2477,7 +2468,7 @@ int ref_transaction_prepare(struct ref_transaction *transaction,
break;
}
- if (refs->repo->objects->odb->disable_ref_updates) {
+ if (refs->repo->objects->sources->disable_ref_updates) {
strbuf_addstr(err,
_("ref updates forbidden inside quarantine environment"));
return -1;
@@ -3314,3 +3305,23 @@ int ref_update_expects_existing_old_ref(struct ref_update *update)
return (update->flags & REF_HAVE_OLD) &&
(!is_null_oid(&update->old_oid) || update->old_target);
}
+
+const char *ref_transaction_error_msg(enum ref_transaction_error err)
+{
+ switch (err) {
+ case REF_TRANSACTION_ERROR_NAME_CONFLICT:
+ return "refname conflict";
+ case REF_TRANSACTION_ERROR_CREATE_EXISTS:
+ return "reference already exists";
+ case REF_TRANSACTION_ERROR_NONEXISTENT_REF:
+ return "reference does not exist";
+ case REF_TRANSACTION_ERROR_INCORRECT_OLD_VALUE:
+ return "incorrect old value provided";
+ case REF_TRANSACTION_ERROR_INVALID_NEW_VALUE:
+ return "invalid new value provided";
+ case REF_TRANSACTION_ERROR_EXPECTED_SYMREF:
+ return "expected symref but found regular ref";
+ default:
+ return "unknown failure";
+ }
+}
diff --git a/refs.h b/refs.h
index 46a6008e07..efa182c6a1 100644
--- a/refs.h
+++ b/refs.h
@@ -452,10 +452,9 @@ static inline const char *has_glob_specials(const char *pattern)
return strpbrk(pattern, "?*[");
}
-void refs_warn_dangling_symref(struct ref_store *refs, FILE *fp,
- const char *msg_fmt, const char *refname);
void refs_warn_dangling_symrefs(struct ref_store *refs, FILE *fp,
- const char *msg_fmt, const struct string_list *refnames);
+ const char *indent, int dry_run,
+ const struct string_list *refnames);
/*
* Flags for controlling behaviour of pack_refs()
@@ -908,6 +907,11 @@ void ref_transaction_for_each_rejected_update(struct ref_transaction *transactio
void *cb_data);
/*
+ * Translate errors to human readable error messages.
+ */
+const char *ref_transaction_error_msg(enum ref_transaction_error err);
+
+/*
* Free `*transaction` and all associated data.
*/
void ref_transaction_free(struct ref_transaction *transaction);
diff --git a/refs/files-backend.c b/refs/files-backend.c
index bf6f89b1d1..89ae4517a9 100644
--- a/refs/files-backend.c
+++ b/refs/files-backend.c
@@ -2760,6 +2760,8 @@ static void files_transaction_cleanup(struct files_ref_store *refs,
if (lock) {
unlock_ref(lock);
+ try_remove_empty_parents(refs, update->refname,
+ REMOVE_EMPTY_PARENTS_REF);
update->backend_data = NULL;
}
}
@@ -3208,6 +3210,10 @@ static int files_transaction_finish(struct ref_store *ref_store,
*/
for (i = 0; i < transaction->nr; i++) {
struct ref_update *update = transaction->updates[i];
+
+ if (update->rejection_err)
+ continue;
+
if (update->flags & REF_DELETING &&
!(update->flags & REF_LOG_ONLY) &&
!(update->flags & REF_IS_PRUNING)) {
@@ -3239,6 +3245,9 @@ static int files_transaction_finish(struct ref_store *ref_store,
struct ref_update *update = transaction->updates[i];
struct ref_lock *lock = update->backend_data;
+ if (update->rejection_err)
+ continue;
+
if (update->flags & REF_DELETING &&
!(update->flags & REF_LOG_ONLY)) {
update->flags |= REF_DELETED_RMDIR;
diff --git a/remote-curl.c b/remote-curl.c
index b8bc3a80cf..84f4694780 100644
--- a/remote-curl.c
+++ b/remote-curl.c
@@ -285,7 +285,7 @@ static const struct git_hash_algo *detect_hash_algo(struct discovery *heads)
* back to SHA1, which may or may not be correct.
*/
if (!p)
- return &hash_algos[GIT_HASH_SHA1];
+ return &hash_algos[GIT_HASH_SHA1_LEGACY];
algo = hash_algo_by_length((p - heads->buf) / 2);
if (algo == GIT_HASH_UNKNOWN)
diff --git a/remote.c b/remote.c
index 4099183cac..e965f022f1 100644
--- a/remote.c
+++ b/remote.c
@@ -12,7 +12,7 @@
#include "refs.h"
#include "refspec.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "path.h"
#include "commit.h"
#include "diff.h"
@@ -165,6 +165,9 @@ static void remote_clear(struct remote *remote)
strvec_clear(&remote->url);
strvec_clear(&remote->pushurl);
+ refspec_clear(&remote->push);
+ refspec_clear(&remote->fetch);
+
free((char *)remote->receivepack);
free((char *)remote->uploadpack);
FREE_AND_NULL(remote->http_proxy);
@@ -174,9 +177,15 @@ static void remote_clear(struct remote *remote)
static void add_merge(struct branch *branch, const char *name)
{
- ALLOC_GROW(branch->merge_name, branch->merge_nr + 1,
+ struct refspec_item *merge;
+
+ ALLOC_GROW(branch->merge, branch->merge_nr + 1,
branch->merge_alloc);
- branch->merge_name[branch->merge_nr++] = name;
+
+ merge = xcalloc(1, sizeof(*merge));
+ merge->src = xstrdup(name);
+
+ branch->merge[branch->merge_nr++] = merge;
}
struct branches_hash_key {
@@ -247,15 +256,23 @@ static struct branch *make_branch(struct remote_state *remote_state,
return ret;
}
+static void merge_clear(struct branch *branch)
+{
+ for (int i = 0; i < branch->merge_nr; i++) {
+ refspec_item_clear(branch->merge[i]);
+ free(branch->merge[i]);
+ }
+ FREE_AND_NULL(branch->merge);
+ branch->merge_nr = 0;
+}
+
static void branch_release(struct branch *branch)
{
free((char *)branch->name);
free((char *)branch->refname);
free(branch->remote_name);
free(branch->pushremote_name);
- for (int i = 0; i < branch->merge_nr; i++)
- refspec_item_clear(branch->merge[i]);
- free(branch->merge);
+ merge_clear(branch);
}
static struct rewrite *make_rewrite(struct rewrites *r,
@@ -317,11 +334,10 @@ static void warn_about_deprecated_remote_type(const char *type,
type, remote->name, remote->name, remote->name);
}
-static void read_remotes_file(struct remote_state *remote_state,
- struct remote *remote)
+static void read_remotes_file(struct repository *repo, struct remote *remote)
{
struct strbuf buf = STRBUF_INIT;
- FILE *f = fopen_or_warn(repo_git_path_append(the_repository, &buf,
+ FILE *f = fopen_or_warn(repo_git_path_append(repo, &buf,
"remotes/%s", remote->name), "r");
if (!f)
@@ -337,7 +353,7 @@ static void read_remotes_file(struct remote_state *remote_state,
strbuf_rtrim(&buf);
if (skip_prefix(buf.buf, "URL:", &v))
- add_url_alias(remote_state, remote,
+ add_url_alias(repo->remote_state, remote,
skip_spaces(v));
else if (skip_prefix(buf.buf, "Push:", &v))
refspec_append(&remote->push, skip_spaces(v));
@@ -350,12 +366,11 @@ out:
strbuf_release(&buf);
}
-static void read_branches_file(struct remote_state *remote_state,
- struct remote *remote)
+static void read_branches_file(struct repository *repo, struct remote *remote)
{
char *frag, *to_free = NULL;
struct strbuf buf = STRBUF_INIT;
- FILE *f = fopen_or_warn(repo_git_path_append(the_repository, &buf,
+ FILE *f = fopen_or_warn(repo_git_path_append(repo, &buf,
"branches/%s", remote->name), "r");
if (!f)
@@ -382,9 +397,9 @@ static void read_branches_file(struct remote_state *remote_state,
if (frag)
*(frag++) = '\0';
else
- frag = to_free = repo_default_branch_name(the_repository, 0);
+ frag = to_free = repo_default_branch_name(repo, 0);
- add_url_alias(remote_state, remote, buf.buf);
+ add_url_alias(repo->remote_state, remote, buf.buf);
refspec_appendf(&remote->fetch, "refs/heads/%s:refs/heads/%s",
frag, remote->name);
@@ -429,7 +444,7 @@ static int handle_config(const char *key, const char *value,
} else if (!strcmp(subkey, "merge")) {
if (!value)
return config_error_nonbool(key);
- add_merge(branch, xstrdup(value));
+ add_merge(branch, value);
}
return 0;
}
@@ -681,7 +696,7 @@ const char *pushremote_for_branch(struct branch *branch, int *explicit)
branch, explicit);
}
-static struct remote *remotes_remote_get(struct remote_state *remote_state,
+static struct remote *remotes_remote_get(struct repository *repo,
const char *name);
char *remote_ref_for_branch(struct branch *branch, int for_push)
@@ -692,7 +707,7 @@ char *remote_ref_for_branch(struct branch *branch, int for_push)
if (branch) {
if (!for_push) {
if (branch->merge_nr) {
- return xstrdup(branch->merge_name[0]);
+ return xstrdup(branch->merge[0]->src);
}
} else {
char *dst;
@@ -700,7 +715,7 @@ char *remote_ref_for_branch(struct branch *branch, int for_push)
the_repository->remote_state, branch,
NULL);
struct remote *remote = remotes_remote_get(
- the_repository->remote_state, remote_name);
+ the_repository, remote_name);
if (remote && remote->push.nr &&
(dst = apply_refspecs(&remote->push,
@@ -757,10 +772,11 @@ loop_cleanup:
}
static struct remote *
-remotes_remote_get_1(struct remote_state *remote_state, const char *name,
+remotes_remote_get_1(struct repository *repo, const char *name,
const char *(*get_default)(struct remote_state *,
struct branch *, int *))
{
+ struct remote_state *remote_state = repo->remote_state;
struct remote *ret;
int name_given = 0;
@@ -774,9 +790,9 @@ remotes_remote_get_1(struct remote_state *remote_state, const char *name,
#ifndef WITH_BREAKING_CHANGES
if (valid_remote_nick(name) && have_git_dir()) {
if (!valid_remote(ret))
- read_remotes_file(remote_state, ret);
+ read_remotes_file(repo, ret);
if (!valid_remote(ret))
- read_branches_file(remote_state, ret);
+ read_branches_file(repo, ret);
}
#endif /* WITH_BREAKING_CHANGES */
if (name_given && !valid_remote(ret))
@@ -790,35 +806,33 @@ remotes_remote_get_1(struct remote_state *remote_state, const char *name,
}
static inline struct remote *
-remotes_remote_get(struct remote_state *remote_state, const char *name)
+remotes_remote_get(struct repository *repo, const char *name)
{
- return remotes_remote_get_1(remote_state, name,
- remotes_remote_for_branch);
+ return remotes_remote_get_1(repo, name, remotes_remote_for_branch);
}
struct remote *remote_get(const char *name)
{
read_config(the_repository, 0);
- return remotes_remote_get(the_repository->remote_state, name);
+ return remotes_remote_get(the_repository, name);
}
struct remote *remote_get_early(const char *name)
{
read_config(the_repository, 1);
- return remotes_remote_get(the_repository->remote_state, name);
+ return remotes_remote_get(the_repository, name);
}
static inline struct remote *
-remotes_pushremote_get(struct remote_state *remote_state, const char *name)
+remotes_pushremote_get(struct repository *repo, const char *name)
{
- return remotes_remote_get_1(remote_state, name,
- remotes_pushremote_for_branch);
+ return remotes_remote_get_1(repo, name, remotes_pushremote_for_branch);
}
struct remote *pushremote_get(const char *name)
{
read_config(the_repository, 0);
- return remotes_pushremote_get(the_repository->remote_state, name);
+ return remotes_pushremote_get(the_repository, name);
}
int remote_is_configured(struct remote *remote, int in_repo)
@@ -1182,7 +1196,7 @@ static void show_push_unqualified_ref_name_error(const char *dst_value,
BUG("'%s' is not a valid object, "
"match_explicit_lhs() should catch this!",
matched_src_name);
- type = oid_object_info(the_repository, &oid, NULL);
+ type = odb_read_object_info(the_repository->objects, &oid, NULL);
if (type == OBJ_COMMIT) {
advise(_("The <src> part of the refspec is a commit object.\n"
"Did you mean to create a new branch by pushing to\n"
@@ -1412,7 +1426,8 @@ static void add_missing_tags(struct ref *src, struct ref **dst, struct ref ***ds
continue; /* not a tag */
if (string_list_has_string(&dst_tag, ref->name))
continue; /* they already have it */
- if (oid_object_info(the_repository, &ref->new_oid, NULL) != OBJ_TAG)
+ if (odb_read_object_info(the_repository->objects,
+ &ref->new_oid, NULL) != OBJ_TAG)
continue; /* be conservative */
item = string_list_append(&src_tag, ref->name);
item->util = ref;
@@ -1702,7 +1717,7 @@ void set_ref_status_for_push(struct ref *remote_refs, int send_mirror,
if (!reject_reason && !ref->deletion && !is_null_oid(&ref->old_oid)) {
if (starts_with(ref->name, "refs/tags/"))
reject_reason = REF_STATUS_REJECT_ALREADY_EXISTS;
- else if (!has_object(the_repository, &ref->old_oid, HAS_OBJECT_RECHECK_PACKED))
+ else if (!odb_has_object(the_repository->objects, &ref->old_oid, HAS_OBJECT_RECHECK_PACKED))
reject_reason = REF_STATUS_REJECT_FETCH_FIRST;
else if (!lookup_commit_reference_gently(the_repository, &ref->old_oid, 1) ||
!lookup_commit_reference_gently(the_repository, &ref->new_oid, 1))
@@ -1722,7 +1737,7 @@ void set_ref_status_for_push(struct ref *remote_refs, int send_mirror,
}
}
-static void set_merge(struct remote_state *remote_state, struct branch *ret)
+static void set_merge(struct repository *repo, struct branch *ret)
{
struct remote *remote;
char *ref;
@@ -1731,52 +1746,80 @@ static void set_merge(struct remote_state *remote_state, struct branch *ret)
if (!ret)
return; /* no branch */
- if (ret->merge)
+ if (ret->set_merge)
return; /* already run */
if (!ret->remote_name || !ret->merge_nr) {
/*
* no merge config; let's make sure we don't confuse callers
* with a non-zero merge_nr but a NULL merge
*/
- ret->merge_nr = 0;
+ merge_clear(ret);
return;
}
+ ret->set_merge = 1;
- remote = remotes_remote_get(remote_state, ret->remote_name);
+ remote = remotes_remote_get(repo, ret->remote_name);
- CALLOC_ARRAY(ret->merge, ret->merge_nr);
for (i = 0; i < ret->merge_nr; i++) {
- ret->merge[i] = xcalloc(1, sizeof(**ret->merge));
- ret->merge[i]->src = xstrdup(ret->merge_name[i]);
if (!remote_find_tracking(remote, ret->merge[i]) ||
strcmp(ret->remote_name, "."))
continue;
- if (repo_dwim_ref(the_repository, ret->merge_name[i],
- strlen(ret->merge_name[i]), &oid, &ref,
+ if (repo_dwim_ref(repo, ret->merge[i]->src,
+ strlen(ret->merge[i]->src), &oid, &ref,
0) == 1)
ret->merge[i]->dst = ref;
else
- ret->merge[i]->dst = xstrdup(ret->merge_name[i]);
+ ret->merge[i]->dst = xstrdup(ret->merge[i]->src);
}
}
-struct branch *branch_get(const char *name)
+static struct branch *repo_branch_get(struct repository *repo, const char *name)
{
struct branch *ret;
- read_config(the_repository, 0);
+ read_config(repo, 0);
if (!name || !*name || !strcmp(name, "HEAD"))
- ret = the_repository->remote_state->current_branch;
+ ret = repo->remote_state->current_branch;
else
- ret = make_branch(the_repository->remote_state, name,
+ ret = make_branch(repo->remote_state, name,
strlen(name));
- set_merge(the_repository->remote_state, ret);
+ set_merge(repo, ret);
return ret;
}
+struct branch *branch_get(const char *name)
+{
+ return repo_branch_get(the_repository, name);
+}
+
+const char *repo_default_remote(struct repository *repo)
+{
+ struct branch *branch;
+
+ read_config(repo, 0);
+ branch = repo_branch_get(repo, "HEAD");
+
+ return remotes_remote_for_branch(repo->remote_state, branch, NULL);
+}
+
+const char *repo_remote_from_url(struct repository *repo, const char *url)
+{
+ read_config(repo, 0);
+
+ for (int i = 0; i < repo->remote_state->remotes_nr; i++) {
+ struct remote *remote = repo->remote_state->remotes[i];
+ if (!remote)
+ continue;
+
+ if (remote_has_url(remote, url))
+ return remote->name;
+ }
+ return NULL;
+}
+
int branch_has_merge_config(struct branch *branch)
{
- return branch && !!branch->merge;
+ return branch && branch->set_merge;
}
int branch_merge_matches(struct branch *branch,
@@ -1841,13 +1884,14 @@ static const char *tracking_for_push_dest(struct remote *remote,
return ret;
}
-static const char *branch_get_push_1(struct remote_state *remote_state,
+static const char *branch_get_push_1(struct repository *repo,
struct branch *branch, struct strbuf *err)
{
+ struct remote_state *remote_state = repo->remote_state;
struct remote *remote;
remote = remotes_remote_get(
- remote_state,
+ repo,
remotes_pushremote_for_branch(remote_state, branch, NULL));
if (!remote)
return error_buf(err,
@@ -1914,7 +1958,7 @@ const char *branch_get_push(struct branch *branch, struct strbuf *err)
if (!branch->push_tracking_ref)
branch->push_tracking_ref = branch_get_push_1(
- the_repository->remote_state, branch, err);
+ the_repository, branch, err);
return branch->push_tracking_ref;
}
diff --git a/remote.h b/remote.h
index 7e4943ae3a..0ca399e183 100644
--- a/remote.h
+++ b/remote.h
@@ -9,6 +9,7 @@
struct option;
struct transport_ls_refs_options;
+struct repository;
/**
* The API gives access to the configuration related to remotes. It handles
@@ -315,8 +316,8 @@ struct branch {
char *pushremote_name;
- /* An array of the "merge" lines in the configuration. */
- const char **merge_name;
+ /* True if set_merge() has been called to finalize the merge array */
+ int set_merge;
/**
* An array of the struct refspecs used for the merge lines. That is,
@@ -338,6 +339,9 @@ const char *remote_for_branch(struct branch *branch, int *explicit);
const char *pushremote_for_branch(struct branch *branch, int *explicit);
char *remote_ref_for_branch(struct branch *branch, int for_push);
+const char *repo_default_remote(struct repository *repo);
+const char *repo_remote_from_url(struct repository *repo, const char *url);
+
/* returns true if the given branch has merge configuration given. */
int branch_has_merge_config(struct branch *branch);
diff --git a/replace-object.c b/replace-object.c
index f8c5f68837..3eae051074 100644
--- a/replace-object.c
+++ b/replace-object.c
@@ -2,7 +2,7 @@
#include "gettext.h"
#include "hex.h"
#include "oidmap.h"
-#include "object-store.h"
+#include "odb.h"
#include "replace-object.h"
#include "refs.h"
#include "repository.h"
diff --git a/replace-object.h b/replace-object.h
index 3052e96a62..4c9f2a2383 100644
--- a/replace-object.h
+++ b/replace-object.h
@@ -3,7 +3,7 @@
#include "oidmap.h"
#include "repository.h"
-#include "object-store.h"
+#include "odb.h"
struct replace_object {
struct oidmap_entry original;
diff --git a/repo-settings.c b/repo-settings.c
index 4129f8fb2b..195c24e9c0 100644
--- a/repo-settings.c
+++ b/repo-settings.c
@@ -54,11 +54,13 @@ void prepare_repo_settings(struct repository *r)
r->settings.fetch_negotiation_algorithm = FETCH_NEGOTIATION_SKIPPING;
r->settings.pack_use_bitmap_boundary_traversal = 1;
r->settings.pack_use_multi_pack_reuse = 1;
+ r->settings.pack_use_path_walk = 1;
}
if (manyfiles) {
r->settings.index_version = 4;
r->settings.index_skip_hash = 1;
r->settings.core_untracked_cache = UNTRACKED_CACHE_WRITE;
+ r->settings.pack_use_path_walk = 1;
}
/* Commit graph config or default, does not cascade (simple) */
@@ -73,6 +75,7 @@ void prepare_repo_settings(struct repository *r)
/* Boolean config or default, does not cascade (simple) */
repo_cfg_bool(r, "pack.usesparse", &r->settings.pack_use_sparse, 1);
+ repo_cfg_bool(r, "pack.usepathwalk", &r->settings.pack_use_path_walk, 0);
repo_cfg_bool(r, "core.multipackindex", &r->settings.core_multi_pack_index, 1);
repo_cfg_bool(r, "index.sparse", &r->settings.sparse_index, 0);
repo_cfg_bool(r, "index.skiphash", &r->settings.index_skip_hash, r->settings.index_skip_hash);
diff --git a/repo-settings.h b/repo-settings.h
index 2bf24b2597..d477885561 100644
--- a/repo-settings.h
+++ b/repo-settings.h
@@ -56,6 +56,7 @@ struct repo_settings {
enum untracked_cache_setting core_untracked_cache;
int pack_use_sparse;
+ int pack_use_path_walk;
enum fetch_negotiation_setting fetch_negotiation_algorithm;
int core_multi_pack_index;
diff --git a/repository.c b/repository.c
index 9b3d6665fc..ecd691181f 100644
--- a/repository.c
+++ b/repository.c
@@ -1,7 +1,7 @@
#include "git-compat-util.h"
#include "abspath.h"
#include "repository.h"
-#include "object-store.h"
+#include "odb.h"
#include "config.h"
#include "object.h"
#include "lockfile.h"
@@ -52,7 +52,7 @@ static void set_default_hash_algo(struct repository *repo)
void initialize_repository(struct repository *repo)
{
- repo->objects = raw_object_store_new();
+ repo->objects = odb_new(repo);
repo->remote_state = remote_state_new();
repo->parsed_objects = parsed_object_pool_new(repo);
ALLOC_ARRAY(repo->index, 1);
@@ -107,9 +107,9 @@ const char *repo_get_common_dir(struct repository *repo)
const char *repo_get_object_directory(struct repository *repo)
{
- if (!repo->objects->odb)
+ if (!repo->objects->sources)
BUG("repository hasn't been set up");
- return repo->objects->odb->path;
+ return repo->objects->sources->path;
}
const char *repo_get_index_file(struct repository *repo)
@@ -165,14 +165,15 @@ void repo_set_gitdir(struct repository *repo,
repo_set_commondir(repo, o->commondir);
- if (!repo->objects->odb) {
- CALLOC_ARRAY(repo->objects->odb, 1);
- repo->objects->odb_tail = &repo->objects->odb->next;
+ if (!repo->objects->sources) {
+ CALLOC_ARRAY(repo->objects->sources, 1);
+ repo->objects->sources->odb = repo->objects;
+ repo->objects->sources_tail = &repo->objects->sources->next;
}
- expand_base_dir(&repo->objects->odb->path, o->object_dir,
+ expand_base_dir(&repo->objects->sources->path, o->object_dir,
repo->commondir, "objects");
- repo->objects->odb->disable_ref_updates = o->disable_ref_updates;
+ repo->objects->sources->disable_ref_updates = o->disable_ref_updates;
free(repo->objects->alternate_db);
repo->objects->alternate_db = xstrdup_or_null(o->alternate_db);
@@ -284,6 +285,7 @@ int repo_init(struct repository *repo,
repo_set_ref_storage_format(repo, format.ref_storage_format);
repo->repository_format_worktree_config = format.worktree_config;
repo->repository_format_relative_worktrees = format.relative_worktrees;
+ repo->repository_format_precious_objects = format.precious_objects;
/* take ownership of format.partial_clone */
repo->repository_format_partial_clone = format.partial_clone;
@@ -374,7 +376,7 @@ void repo_clear(struct repository *repo)
FREE_AND_NULL(repo->worktree);
FREE_AND_NULL(repo->submodule_prefix);
- raw_object_store_clear(repo->objects);
+ odb_clear(repo->objects);
FREE_AND_NULL(repo->objects);
parsed_object_pool_clear(repo->parsed_objects);
diff --git a/repository.h b/repository.h
index c4c92b2ab9..042dc93f0f 100644
--- a/repository.h
+++ b/repository.h
@@ -9,7 +9,7 @@ struct git_hash_algo;
struct index_state;
struct lock_file;
struct pathspec;
-struct raw_object_store;
+struct object_database;
struct submodule_cache;
struct promisor_remote_config;
struct remote_state;
@@ -20,6 +20,12 @@ enum ref_storage_format {
REF_STORAGE_FORMAT_REFTABLE,
};
+#ifdef WITH_BREAKING_CHANGES /* Git 3.0 */
+# define REF_STORAGE_FORMAT_DEFAULT REF_STORAGE_FORMAT_REFTABLE
+#else
+# define REF_STORAGE_FORMAT_DEFAULT REF_STORAGE_FORMAT_FILES
+#endif
+
struct repo_path_cache {
char *squash_msg;
char *merge_msg;
@@ -47,7 +53,7 @@ struct repository {
/*
* Holds any information related to accessing the raw object content.
*/
- struct raw_object_store *objects;
+ struct object_database *objects;
/*
* All objects in this repository that have been parsed. This structure
@@ -151,6 +157,7 @@ struct repository {
/* Configurations */
int repository_format_worktree_config;
int repository_format_relative_worktrees;
+ int repository_format_precious_objects;
/* Indicate if a repository has a different 'commondir' from 'gitdir' */
unsigned different_commondir:1;
diff --git a/rerere.c b/rerere.c
index 3cd37c5f0a..8bb97c9822 100644
--- a/rerere.c
+++ b/rerere.c
@@ -18,7 +18,7 @@
#include "path.h"
#include "pathspec.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "strmap.h"
#define RESOLVED 0
@@ -1000,9 +1000,8 @@ static int handle_cache(struct index_state *istate,
break;
i = ce_stage(ce) - 1;
if (!mmfile[i].ptr) {
- mmfile[i].ptr = repo_read_object_file(the_repository,
- &ce->oid, &type,
- &size);
+ mmfile[i].ptr = odb_read_object(the_repository->objects,
+ &ce->oid, &type, &size);
if (!mmfile[i].ptr)
die(_("unable to read %s"),
oid_to_hex(&ce->oid));
diff --git a/revision.c b/revision.c
index 2c36a9c179..c2e17205a8 100644
--- a/revision.c
+++ b/revision.c
@@ -8,7 +8,7 @@
#include "hex.h"
#include "object-name.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "oidset.h"
#include "tag.h"
#include "blob.h"
@@ -50,8 +50,6 @@
#include "parse-options.h"
#include "wildmatch.h"
-volatile show_early_output_fn_t show_early_output;
-
static char *term_bad;
static char *term_good;
@@ -675,24 +673,48 @@ static int forbid_bloom_filters(struct pathspec *spec)
{
if (spec->has_wildcard)
return 1;
- if (spec->nr > 1)
- return 1;
if (spec->magic & ~PATHSPEC_LITERAL)
return 1;
- if (spec->nr && (spec->items[0].magic & ~PATHSPEC_LITERAL))
- return 1;
+ for (size_t nr = 0; nr < spec->nr; nr++)
+ if (spec->items[nr].magic & ~PATHSPEC_LITERAL)
+ return 1;
return 0;
}
-static void prepare_to_use_bloom_filter(struct rev_info *revs)
+static void release_revisions_bloom_keyvecs(struct rev_info *revs);
+
+static int convert_pathspec_to_bloom_keyvec(struct bloom_keyvec **out,
+ const struct pathspec_item *pi,
+ const struct bloom_filter_settings *settings)
{
- struct pathspec_item *pi;
char *path_alloc = NULL;
- const char *path, *p;
+ const char *path;
size_t len;
- int path_component_nr = 1;
+ int res = 0;
+
+ /* remove single trailing slash from path, if needed */
+ if (pi->len > 0 && pi->match[pi->len - 1] == '/') {
+ path_alloc = xmemdupz(pi->match, pi->len - 1);
+ path = path_alloc;
+ } else
+ path = pi->match;
+ len = strlen(path);
+ if (!len) {
+ res = -1;
+ goto cleanup;
+ }
+
+ *out = bloom_keyvec_new(path, len, settings);
+
+cleanup:
+ free(path_alloc);
+ return res;
+}
+
+static void prepare_to_use_bloom_filter(struct rev_info *revs)
+{
if (!revs->commits)
return;
@@ -708,48 +730,14 @@ static void prepare_to_use_bloom_filter(struct rev_info *revs)
if (!revs->pruning.pathspec.nr)
return;
- pi = &revs->pruning.pathspec.items[0];
-
- /* remove single trailing slash from path, if needed */
- if (pi->len > 0 && pi->match[pi->len - 1] == '/') {
- path_alloc = xmemdupz(pi->match, pi->len - 1);
- path = path_alloc;
- } else
- path = pi->match;
-
- len = strlen(path);
- if (!len) {
- revs->bloom_filter_settings = NULL;
- free(path_alloc);
- return;
- }
-
- p = path;
- while (*p) {
- /*
- * At this point, the path is normalized to use Unix-style
- * path separators. This is required due to how the
- * changed-path Bloom filters store the paths.
- */
- if (*p == '/')
- path_component_nr++;
- p++;
- }
-
- revs->bloom_keys_nr = path_component_nr;
- ALLOC_ARRAY(revs->bloom_keys, revs->bloom_keys_nr);
-
- fill_bloom_key(path, len, &revs->bloom_keys[0],
- revs->bloom_filter_settings);
- path_component_nr = 1;
+ revs->bloom_keyvecs_nr = revs->pruning.pathspec.nr;
+ CALLOC_ARRAY(revs->bloom_keyvecs, revs->bloom_keyvecs_nr);
- p = path + len - 1;
- while (p > path) {
- if (*p == '/')
- fill_bloom_key(path, p - path,
- &revs->bloom_keys[path_component_nr++],
- revs->bloom_filter_settings);
- p--;
+ for (int i = 0; i < revs->pruning.pathspec.nr; i++) {
+ if (convert_pathspec_to_bloom_keyvec(&revs->bloom_keyvecs[i],
+ &revs->pruning.pathspec.items[i],
+ revs->bloom_filter_settings))
+ goto fail;
}
if (trace2_is_enabled() && !bloom_filter_atexit_registered) {
@@ -757,14 +745,18 @@ static void prepare_to_use_bloom_filter(struct rev_info *revs)
bloom_filter_atexit_registered = 1;
}
- free(path_alloc);
+ return;
+
+fail:
+ revs->bloom_filter_settings = NULL;
+ release_revisions_bloom_keyvecs(revs);
}
static int check_maybe_different_in_bloom_filter(struct rev_info *revs,
struct commit *commit)
{
struct bloom_filter *filter;
- int result = 1, j;
+ int result = 0;
if (!revs->repo->objects->commit_graph)
return -1;
@@ -779,10 +771,10 @@ static int check_maybe_different_in_bloom_filter(struct rev_info *revs,
return -1;
}
- for (j = 0; result && j < revs->bloom_keys_nr; j++) {
- result = bloom_filter_contains(filter,
- &revs->bloom_keys[j],
- revs->bloom_filter_settings);
+ for (size_t nr = 0; !result && nr < revs->bloom_keyvecs_nr; nr++) {
+ result = bloom_filter_contains_vec(filter,
+ revs->bloom_keyvecs[nr],
+ revs->bloom_filter_settings);
}
if (result)
@@ -823,7 +815,7 @@ static int rev_compare_tree(struct rev_info *revs,
return REV_TREE_SAME;
}
- if (revs->bloom_keys_nr && !nth_parent) {
+ if (revs->bloom_keyvecs_nr && !nth_parent) {
bloom_ret = check_maybe_different_in_bloom_filter(revs, commit);
if (bloom_ret == 0)
@@ -850,7 +842,7 @@ static int rev_same_tree_as_empty(struct rev_info *revs, struct commit *commit,
if (!t1)
return 0;
- if (!nth_parent && revs->bloom_keys_nr) {
+ if (!nth_parent && revs->bloom_keyvecs_nr) {
bloom_ret = check_maybe_different_in_bloom_filter(revs, commit);
if (!bloom_ret)
return 1;
@@ -1479,7 +1471,6 @@ static int limit_list(struct rev_info *revs)
while (original_list) {
struct commit *commit = pop_commit(&original_list);
struct object *obj = &commit->object;
- show_early_output_fn_t show;
if (commit == interesting_cache)
interesting_cache = NULL;
@@ -1503,13 +1494,6 @@ static int limit_list(struct rev_info *revs)
continue;
date = commit->date;
p = &commit_list_insert(commit, p)->next;
-
- show = show_early_output;
- if (!show)
- continue;
-
- show(revs, newlist);
- show_early_output = NULL;
}
if (revs->cherry_pick || revs->cherry_mark)
cherry_pick_list(newlist, revs);
@@ -1907,7 +1891,8 @@ static void add_alternate_refs_to_pending(struct rev_info *revs,
struct add_alternate_refs_data data;
data.revs = revs;
data.flags = flags;
- for_each_alternate_ref(add_one_alternate_ref, &data);
+ odb_for_each_alternate_ref(the_repository->objects,
+ add_one_alternate_ref, &data);
}
static int add_parents_only(struct rev_info *revs, const char *arg_, int flags,
@@ -2060,6 +2045,7 @@ static void prepare_show_merge(struct rev_info *revs)
parse_pathspec(&revs->prune_data, PATHSPEC_ALL_MAGIC & ~PATHSPEC_LITERAL,
PATHSPEC_PREFER_FULL | PATHSPEC_LITERAL_PATH, "", prune);
revs->limited = 1;
+ free(prune);
}
static int dotdot_missing(const char *arg, char *dotdot,
@@ -2441,13 +2427,6 @@ static int handle_revision_opt(struct rev_info *revs, int argc, const char **arg
} else if (!strcmp(arg, "--author-date-order")) {
revs->sort_order = REV_SORT_BY_AUTHOR_DATE;
revs->topo_order = 1;
- } else if (!strcmp(arg, "--early-output")) {
- revs->early_output = 100;
- revs->topo_order = 1;
- } else if (skip_prefix(arg, "--early-output=", &optarg)) {
- if (strtoul_ui(optarg, 10, &revs->early_output) < 0)
- die("'%s': not a non-negative integer", optarg);
- revs->topo_order = 1;
} else if (!strcmp(arg, "--parents")) {
revs->rewrite_parents = 1;
revs->print_parents = 1;
@@ -3111,7 +3090,7 @@ int setup_revisions(int argc, const char **argv, struct rev_info *revs, struct s
/* Pickaxe, diff-filter and rename following need diffs */
if ((revs->diffopt.pickaxe_opts & DIFF_PICKAXE_KINDS_MASK) ||
- revs->diffopt.filter ||
+ revs->diffopt.filter || revs->diffopt.filter_not ||
revs->diffopt.flags.follow_renames)
revs->diff = 1;
@@ -3200,6 +3179,14 @@ static void release_revisions_mailmap(struct string_list *mailmap)
static void release_revisions_topo_walk_info(struct topo_walk_info *info);
+static void release_revisions_bloom_keyvecs(struct rev_info *revs)
+{
+ for (size_t nr = 0; nr < revs->bloom_keyvecs_nr; nr++)
+ bloom_keyvec_free(revs->bloom_keyvecs[nr]);
+ FREE_AND_NULL(revs->bloom_keyvecs);
+ revs->bloom_keyvecs_nr = 0;
+}
+
static void free_void_commit_list(void *list)
{
free_commit_list(list);
@@ -3228,11 +3215,7 @@ void release_revisions(struct rev_info *revs)
clear_decoration(&revs->treesame, free);
line_log_free(revs);
oidset_clear(&revs->missing_commits);
-
- for (int i = 0; i < revs->bloom_keys_nr; i++)
- clear_bloom_key(&revs->bloom_keys[i]);
- FREE_AND_NULL(revs->bloom_keys);
- revs->bloom_keys_nr = 0;
+ release_revisions_bloom_keyvecs(revs);
}
static void add_child(struct rev_info *revs, struct commit *parent, struct commit *child)
diff --git a/revision.h b/revision.h
index 6d369cdad6..21e288c5ba 100644
--- a/revision.h
+++ b/revision.h
@@ -62,7 +62,7 @@ struct repository;
struct rev_info;
struct string_list;
struct saved_parents;
-struct bloom_key;
+struct bloom_keyvec;
struct bloom_filter_settings;
struct option;
struct parse_opt_ctx_t;
@@ -160,8 +160,6 @@ struct rev_info {
/* topo-sort */
enum rev_sort_order sort_order;
- unsigned int early_output;
-
unsigned int ignore_missing:1,
ignore_missing_links:1;
@@ -360,8 +358,8 @@ struct rev_info {
/* Commit graph bloom filter fields */
/* The bloom filter key(s) for the pathspec */
- struct bloom_key *bloom_keys;
- int bloom_keys_nr;
+ struct bloom_keyvec **bloom_keyvecs;
+ int bloom_keyvecs_nr;
/*
* The bloom filter settings used to generate the key.
@@ -553,10 +551,4 @@ int rewrite_parents(struct rev_info *revs,
*/
struct commit_list *get_saved_parents(struct rev_info *revs, const struct commit *commit);
-/**
- * Global for the (undocumented) "--early-output" flag for "git log".
- */
-typedef void (*show_early_output_fn_t)(struct rev_info *, struct commit_list *);
-extern volatile show_early_output_fn_t show_early_output;
-
#endif
diff --git a/sane-ctype.h b/sane-ctype.h
index cbea1b299b..4f476c4381 100644
--- a/sane-ctype.h
+++ b/sane-ctype.h
@@ -1,6 +1,15 @@
#ifndef SANE_CTYPE_H
#define SANE_CTYPE_H
+/*
+ * Explicitly include <ctype.h> so that its header guards kick in from here on.
+ * This ensures that the file won't get included after "sane-ctype.h", as that
+ * would otherwise lead to a compiler error because the function declarations
+ * for `int isascii(int c)` et al would be mangled by our macros with the same
+ * name.
+ */
+#include <ctype.h>
+
/* Sane ctype - no locale, and works with signed chars */
#undef isascii
#undef isspace
diff --git a/scalar.c b/scalar.c
index 355baf75e4..0dc79fa9fb 100644
--- a/scalar.c
+++ b/scalar.c
@@ -170,6 +170,7 @@ static int set_recommended_config(int reconfigure)
{ "core.autoCRLF", "false" },
{ "core.safeCRLF", "false" },
{ "fetch.showForcedUpdates", "false" },
+ { "pack.usePathWalk", "true" },
{ NULL, NULL },
};
int i;
diff --git a/send-pack.c b/send-pack.c
index 86592ce526..67d6987b1c 100644
--- a/send-pack.c
+++ b/send-pack.c
@@ -4,7 +4,7 @@
#include "date.h"
#include "gettext.h"
#include "hex.h"
-#include "object-store.h"
+#include "odb.h"
#include "pkt-line.h"
#include "sideband.h"
#include "run-command.h"
@@ -45,7 +45,7 @@ int option_parse_push_signed(const struct option *opt,
static void feed_object(struct repository *r,
const struct object_id *oid, FILE *fh, int negative)
{
- if (negative && !has_object(r, oid, 0))
+ if (negative && !odb_has_object(r->objects, oid, 0))
return;
if (negative)
@@ -257,6 +257,13 @@ static int receive_status(struct repository *r,
refname);
continue;
}
+
+ /*
+ * Clients sending duplicate refs can cause the same value
+ * to be overridden, causing a memory leak.
+ */
+ free(hint->remote_status);
+
if (!strcmp(head, "ng")) {
hint->status = REF_STATUS_REMOTE_REJECT;
if (p)
diff --git a/sequencer.c b/sequencer.c
index 1ee0abbd45..67e4310edc 100644
--- a/sequencer.c
+++ b/sequencer.c
@@ -13,7 +13,7 @@
#include "dir.h"
#include "object-file.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "object.h"
#include "pager.h"
#include "commit.h"
@@ -2067,6 +2067,9 @@ static int update_squash_messages(struct repository *r,
const char *message, *body;
const char *encoding = get_commit_output_encoding();
+ if (!is_fixup(command))
+ BUG("not a FIXUP or SQUASH %d", command);
+
if (ctx->current_fixup_count > 0) {
struct strbuf header = STRBUF_INIT;
char *eol;
@@ -2134,8 +2137,7 @@ static int update_squash_messages(struct repository *r,
strbuf_addstr(&buf, "\n\n");
strbuf_add_commented_lines(&buf, body, strlen(body),
comment_line_str);
- } else
- return error(_("unknown command: %d"), command);
+ }
repo_unuse_commit_buffer(r, commit, message);
if (!res)
@@ -5503,9 +5505,8 @@ int sequencer_pick_revisions(struct repository *r,
if (!repo_get_oid(r, name, &oid)) {
if (!lookup_commit_reference_gently(r, &oid, 1)) {
- enum object_type type = oid_object_info(r,
- &oid,
- NULL);
+ enum object_type type = odb_read_object_info(r->objects,
+ &oid, NULL);
res = error(_("%s: can't cherry-pick a %s"),
name, type_name(type));
goto out;
diff --git a/serve.c b/serve.c
index e3ccf1505c..53ecab3b42 100644
--- a/serve.c
+++ b/serve.c
@@ -14,7 +14,7 @@
static int advertise_sid = -1;
static int advertise_object_info = -1;
-static int client_hash_algo = GIT_HASH_SHA1;
+static int client_hash_algo = GIT_HASH_SHA1_LEGACY;
static int always_advertise(struct repository *r UNUSED,
struct strbuf *value UNUSED)
diff --git a/server-info.c b/server-info.c
index d6cd20a39d..9bb30d9ab7 100644
--- a/server-info.c
+++ b/server-info.c
@@ -11,7 +11,7 @@
#include "packfile.h"
#include "path.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "server-info.h"
#include "strbuf.h"
#include "tempfile.h"
diff --git a/setup.c b/setup.c
index f93bd6a24a..6f52dab64c 100644
--- a/setup.c
+++ b/setup.c
@@ -753,7 +753,8 @@ static int check_repository_format_gently(const char *gitdir, struct repository_
die("%s", err.buf);
}
- repository_format_precious_objects = candidate->precious_objects;
+ the_repository->repository_format_precious_objects = candidate->precious_objects;
+
string_list_clear(&candidate->unknown_extensions, 0);
string_list_clear(&candidate->v1_only_extensions, 0);
@@ -835,9 +836,12 @@ static void init_repository_format(struct repository_format *format)
int read_repository_format(struct repository_format *format, const char *path)
{
clear_repository_format(format);
+ format->hash_algo = GIT_HASH_SHA1_LEGACY;
git_config_from_file(check_repo_format, path, format);
- if (format->version == -1)
+ if (format->version == -1) {
clear_repository_format(format);
+ format->hash_algo = GIT_HASH_SHA1_LEGACY;
+ }
return format->version;
}
@@ -1864,6 +1868,8 @@ const char *setup_git_directory_gently(int *nongit_ok)
the_repository->repository_format_partial_clone =
repo_fmt.partial_clone;
repo_fmt.partial_clone = NULL;
+ the_repository->repository_format_precious_objects =
+ repo_fmt.precious_objects;
}
}
/*
@@ -2222,11 +2228,11 @@ void initialize_repository_version(int hash_algo,
* version will get adjusted by git-clone(1) once it has learned about
* the remote repository's format.
*/
- if (hash_algo != GIT_HASH_SHA1 ||
+ if (hash_algo != GIT_HASH_SHA1_LEGACY ||
ref_storage_format != REF_STORAGE_FORMAT_FILES)
target_version = GIT_REPO_VERSION_READ;
- if (hash_algo != GIT_HASH_SHA1 && hash_algo != GIT_HASH_UNKNOWN)
+ if (hash_algo != GIT_HASH_SHA1_LEGACY && hash_algo != GIT_HASH_UNKNOWN)
git_config_set("extensions.objectformat",
hash_algos[hash_algo].name);
else if (reinit)
@@ -2481,6 +2487,18 @@ static int read_default_format_config(const char *key, const char *value,
goto out;
}
+ /*
+ * Enable the reftable format when "features.experimental" is enabled.
+ * "init.defaultRefFormat" takes precedence over this setting.
+ */
+ if (!strcmp(key, "feature.experimental") &&
+ cfg->ref_format == REF_STORAGE_FORMAT_UNKNOWN &&
+ git_config_bool(key, value)) {
+ cfg->ref_format = REF_STORAGE_FORMAT_REFTABLE;
+ ret = 0;
+ goto out;
+ }
+
ret = 0;
out:
free(str);
@@ -2541,6 +2559,8 @@ static void repository_format_configure(struct repository_format *repo_fmt,
repo_fmt->ref_storage_format = ref_format;
} else if (cfg.ref_format != REF_STORAGE_FORMAT_UNKNOWN) {
repo_fmt->ref_storage_format = cfg.ref_format;
+ } else {
+ repo_fmt->ref_storage_format = REF_STORAGE_FORMAT_DEFAULT;
}
repo_set_ref_storage_format(the_repository, repo_fmt->ref_storage_format);
}
diff --git a/setup.h b/setup.h
index 18dc3b7368..8522fa8575 100644
--- a/setup.h
+++ b/setup.h
@@ -149,7 +149,7 @@ struct repository_format {
{ \
.version = -1, \
.is_bare = -1, \
- .hash_algo = GIT_HASH_SHA1, \
+ .hash_algo = GIT_HASH_DEFAULT, \
.ref_storage_format = REF_STORAGE_FORMAT_FILES, \
.unknown_extensions = STRING_LIST_INIT_DUP, \
.v1_only_extensions = STRING_LIST_INIT_DUP, \
diff --git a/shallow.c b/shallow.c
index faeeeb45f9..ef3adb635f 100644
--- a/shallow.c
+++ b/shallow.c
@@ -5,7 +5,7 @@
#include "repository.h"
#include "tempfile.h"
#include "lockfile.h"
-#include "object-store.h"
+#include "odb.h"
#include "commit.h"
#include "tag.h"
#include "pkt-line.h"
@@ -310,8 +310,8 @@ static int write_one_shallow(const struct commit_graft *graft, void *cb_data)
if (graft->nr_parent != -1)
return 0;
if (data->flags & QUICK) {
- if (!has_object(the_repository, &graft->oid,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
+ if (!odb_has_object(the_repository->objects, &graft->oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
return 0;
} else if (data->flags & SEEN_ONLY) {
struct commit *c = lookup_commit(the_repository, &graft->oid);
@@ -477,8 +477,8 @@ void prepare_shallow_info(struct shallow_info *info, struct oid_array *sa)
ALLOC_ARRAY(info->ours, sa->nr);
ALLOC_ARRAY(info->theirs, sa->nr);
for (size_t i = 0; i < sa->nr; i++) {
- if (has_object(the_repository, sa->oid + i,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR)) {
+ if (odb_has_object(the_repository->objects, sa->oid + i,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR)) {
struct commit_graft *graft;
graft = lookup_commit_graft(the_repository,
&sa->oid[i]);
@@ -515,8 +515,8 @@ void remove_nonexistent_theirs_shallow(struct shallow_info *info)
for (i = dst = 0; i < info->nr_theirs; i++) {
if (i != dst)
info->theirs[dst] = info->theirs[i];
- if (has_object(the_repository, oid + info->theirs[i],
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
+ if (odb_has_object(the_repository->objects, oid + info->theirs[i],
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
dst++;
}
info->nr_theirs = dst;
diff --git a/strbuf.c b/strbuf.c
index f30fdc6984..6c3851a7f8 100644
--- a/strbuf.c
+++ b/strbuf.c
@@ -8,55 +8,55 @@
#include "utf8.h"
#include "date.h"
-int starts_with(const char *str, const char *prefix)
+bool starts_with(const char *str, const char *prefix)
{
for (; ; str++, prefix++)
if (!*prefix)
- return 1;
+ return true;
else if (*str != *prefix)
- return 0;
+ return false;
}
-int istarts_with(const char *str, const char *prefix)
+bool istarts_with(const char *str, const char *prefix)
{
for (; ; str++, prefix++)
if (!*prefix)
- return 1;
+ return true;
else if (tolower(*str) != tolower(*prefix))
- return 0;
+ return false;
}
-int starts_with_mem(const char *str, size_t len, const char *prefix)
+bool starts_with_mem(const char *str, size_t len, const char *prefix)
{
const char *end = str + len;
for (; ; str++, prefix++) {
if (!*prefix)
- return 1;
+ return true;
else if (str == end || *str != *prefix)
- return 0;
+ return false;
}
}
-int skip_to_optional_arg_default(const char *str, const char *prefix,
+bool skip_to_optional_arg_default(const char *str, const char *prefix,
const char **arg, const char *def)
{
const char *p;
if (!skip_prefix(str, prefix, &p))
- return 0;
+ return false;
if (!*p) {
if (arg)
*arg = def;
- return 1;
+ return true;
}
if (*p != '=')
- return 0;
+ return false;
if (arg)
*arg = p + 1;
- return 1;
+ return true;
}
/*
diff --git a/strbuf.h b/strbuf.h
index 6362777c0a..a580ac6084 100644
--- a/strbuf.h
+++ b/strbuf.h
@@ -660,9 +660,9 @@ char *xstrvfmt(const char *fmt, va_list ap);
__attribute__((format (printf, 1, 2)))
char *xstrfmt(const char *fmt, ...);
-int starts_with(const char *str, const char *prefix);
-int istarts_with(const char *str, const char *prefix);
-int starts_with_mem(const char *str, size_t len, const char *prefix);
+bool starts_with(const char *str, const char *prefix);
+bool istarts_with(const char *str, const char *prefix);
+bool starts_with_mem(const char *str, size_t len, const char *prefix);
/*
* If the string "str" is the same as the string in "prefix", then the "arg"
@@ -678,16 +678,16 @@ int starts_with_mem(const char *str, size_t len, const char *prefix);
* can be used instead of !strcmp(arg, "--key") and then
* skip_prefix(arg, "--key=", &arg) to parse such an option.
*/
-int skip_to_optional_arg_default(const char *str, const char *prefix,
+bool skip_to_optional_arg_default(const char *str, const char *prefix,
const char **arg, const char *def);
-static inline int skip_to_optional_arg(const char *str, const char *prefix,
+static inline bool skip_to_optional_arg(const char *str, const char *prefix,
const char **arg)
{
return skip_to_optional_arg_default(str, prefix, arg, "");
}
-static inline int ends_with(const char *str, const char *suffix)
+static inline bool ends_with(const char *str, const char *suffix)
{
size_t len;
return strip_suffix(str, suffix, &len);
diff --git a/streaming.c b/streaming.c
index 6d6512e2e0..4b13827668 100644
--- a/streaming.c
+++ b/streaming.c
@@ -10,7 +10,7 @@
#include "streaming.h"
#include "repository.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "replace-object.h"
#include "packfile.h"
@@ -44,7 +44,7 @@ struct git_istream {
union {
struct {
- char *buf; /* from oid_object_info_extended() */
+ char *buf; /* from odb_read_object_info_extended() */
unsigned long read_ptr;
} incore;
@@ -403,8 +403,8 @@ static int open_istream_incore(struct git_istream *st, struct repository *r,
oi.typep = type;
oi.sizep = &st->size;
oi.contentp = (void **)&st->u.incore.buf;
- return oid_object_info_extended(r, oid, &oi,
- OBJECT_INFO_DIE_IF_CORRUPT);
+ return odb_read_object_info_extended(r->objects, oid, &oi,
+ OBJECT_INFO_DIE_IF_CORRUPT);
}
/*****************************************************************************
@@ -422,7 +422,7 @@ static int istream_source(struct git_istream *st,
oi.typep = type;
oi.sizep = &size;
- status = oid_object_info_extended(r, oid, &oi, 0);
+ status = odb_read_object_info_extended(r->objects, oid, &oi, 0);
if (status < 0)
return status;
diff --git a/string-list.c b/string-list.c
index bf061fec56..53faaa8420 100644
--- a/string-list.c
+++ b/string-list.c
@@ -1,5 +1,3 @@
-#define DISABLE_SIGN_COMPARE_WARNINGS
-
#include "git-compat-util.h"
#include "string-list.h"
@@ -17,19 +15,19 @@ void string_list_init_dup(struct string_list *list)
/* if there is no exact match, point to the index where the entry could be
* inserted */
-static int get_entry_index(const struct string_list *list, const char *string,
- int *exact_match)
+static size_t get_entry_index(const struct string_list *list, const char *string,
+ int *exact_match)
{
- int left = -1, right = list->nr;
+ size_t left = 0, right = list->nr;
compare_strings_fn cmp = list->cmp ? list->cmp : strcmp;
- while (left + 1 < right) {
- int middle = left + (right - left) / 2;
+ while (left < right) {
+ size_t middle = left + (right - left) / 2;
int compare = cmp(string, list->items[middle].string);
if (compare < 0)
right = middle;
else if (compare > 0)
- left = middle;
+ left = middle + 1;
else {
*exact_match = 1;
return middle;
@@ -40,14 +38,13 @@ static int get_entry_index(const struct string_list *list, const char *string,
return right;
}
-/* returns -1-index if already exists */
-static int add_entry(int insert_at, struct string_list *list, const char *string)
+static size_t add_entry(struct string_list *list, const char *string)
{
int exact_match = 0;
- int index = insert_at != -1 ? insert_at : get_entry_index(list, string, &exact_match);
+ size_t index = get_entry_index(list, string, &exact_match);
if (exact_match)
- return -1 - index;
+ return index;
ALLOC_GROW(list->items, list->nr+1, list->alloc);
if (index < list->nr)
@@ -63,10 +60,7 @@ static int add_entry(int insert_at, struct string_list *list, const char *string
struct string_list_item *string_list_insert(struct string_list *list, const char *string)
{
- int index = add_entry(-1, list, string);
-
- if (index < 0)
- index = -1 - index;
+ size_t index = add_entry(list, string);
return list->items + index;
}
@@ -116,9 +110,9 @@ struct string_list_item *string_list_lookup(struct string_list *list, const char
void string_list_remove_duplicates(struct string_list *list, int free_util)
{
if (list->nr > 1) {
- int src, dst;
+ size_t dst = 1;
compare_strings_fn cmp = list->cmp ? list->cmp : strcmp;
- for (src = dst = 1; src < list->nr; src++) {
+ for (size_t src = 1; src < list->nr; src++) {
if (!cmp(list->items[dst - 1].string, list->items[src].string)) {
if (list->strdup_strings)
free(list->items[src].string);
@@ -134,8 +128,8 @@ void string_list_remove_duplicates(struct string_list *list, int free_util)
int for_each_string_list(struct string_list *list,
string_list_each_func_t fn, void *cb_data)
{
- int i, ret = 0;
- for (i = 0; i < list->nr; i++)
+ int ret = 0;
+ for (size_t i = 0; i < list->nr; i++)
if ((ret = fn(&list->items[i], cb_data)))
break;
return ret;
@@ -144,8 +138,8 @@ int for_each_string_list(struct string_list *list,
void filter_string_list(struct string_list *list, int free_util,
string_list_each_func_t want, void *cb_data)
{
- int src, dst = 0;
- for (src = 0; src < list->nr; src++) {
+ size_t dst = 0;
+ for (size_t src = 0; src < list->nr; src++) {
if (want(&list->items[src], cb_data)) {
list->items[dst++] = list->items[src];
} else {
@@ -171,13 +165,12 @@ void string_list_remove_empty_items(struct string_list *list, int free_util)
void string_list_clear(struct string_list *list, int free_util)
{
if (list->items) {
- int i;
if (list->strdup_strings) {
- for (i = 0; i < list->nr; i++)
+ for (size_t i = 0; i < list->nr; i++)
free(list->items[i].string);
}
if (free_util) {
- for (i = 0; i < list->nr; i++)
+ for (size_t i = 0; i < list->nr; i++)
free(list->items[i].util);
}
free(list->items);
@@ -189,13 +182,12 @@ void string_list_clear(struct string_list *list, int free_util)
void string_list_clear_func(struct string_list *list, string_list_clear_func_t clearfunc)
{
if (list->items) {
- int i;
if (clearfunc) {
- for (i = 0; i < list->nr; i++)
+ for (size_t i = 0; i < list->nr; i++)
clearfunc(list->items[i].util, list->items[i].string);
}
if (list->strdup_strings) {
- for (i = 0; i < list->nr; i++)
+ for (size_t i = 0; i < list->nr; i++)
free(list->items[i].string);
}
free(list->items);
diff --git a/sub-process.h b/sub-process.h
index 6a61638a8a..bfc3959a1b 100644
--- a/sub-process.h
+++ b/sub-process.h
@@ -73,7 +73,7 @@ static inline struct child_process *subprocess_get_child_process(
/*
* Perform the version and capability negotiation as described in the
- * "Handshake" section of long-running-process-protocol.txt using the
+ * "Handshake" section of long-running-process-protocol.adoc using the
* given requested versions and capabilities. The "versions" and "capabilities"
* parameters are arrays terminated by a 0 or blank struct.
*
diff --git a/submodule-config.c b/submodule-config.c
index 8630e27947..70324da383 100644
--- a/submodule-config.c
+++ b/submodule-config.c
@@ -13,7 +13,7 @@
#include "submodule.h"
#include "strbuf.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "parse-options.h"
#include "thread-utils.h"
#include "tree-walk.h"
@@ -235,18 +235,6 @@ in_component:
return 0;
}
-static int starts_with_dot_slash(const char *const path)
-{
- return path_match_flags(path, PATH_MATCH_STARTS_WITH_DOT_SLASH |
- PATH_MATCH_XPLATFORM);
-}
-
-static int starts_with_dot_dot_slash(const char *const path)
-{
- return path_match_flags(path, PATH_MATCH_STARTS_WITH_DOT_DOT_SLASH |
- PATH_MATCH_XPLATFORM);
-}
-
static int submodule_url_is_relative(const char *url)
{
return starts_with_dot_slash(url) || starts_with_dot_dot_slash(url);
@@ -743,8 +731,8 @@ static const struct submodule *config_from(struct submodule_cache *cache,
if (submodule)
goto out;
- config = repo_read_object_file(the_repository, &oid, &type,
- &config_size);
+ config = odb_read_object(the_repository->objects, &oid,
+ &type, &config_size);
if (!config || type != OBJ_BLOB)
goto out;
@@ -810,7 +798,8 @@ static void config_from_gitmodules(config_fn_t fn, struct repository *repo, void
repo_get_oid(repo, GITMODULES_HEAD, &oid) >= 0) {
config_source.blob = oidstr = xstrdup(oid_to_hex(&oid));
if (repo != the_repository)
- add_submodule_odb_by_path(repo->objects->odb->path);
+ odb_add_submodule_source_by_path(the_repository->objects,
+ repo->objects->sources->path);
} else {
goto out;
}
diff --git a/submodule.c b/submodule.c
index ead3fb5dad..f8373a9ea7 100644
--- a/submodule.c
+++ b/submodule.c
@@ -27,11 +27,10 @@
#include "parse-options.h"
#include "object-file.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "commit-reach.h"
#include "read-cache-ll.h"
#include "setup.h"
-#include "trace2.h"
static int config_update_recurse_submodules = RECURSE_SUBMODULES_OFF;
static int initialized_fetch_ref_tips;
@@ -176,30 +175,6 @@ void stage_updated_gitmodules(struct index_state *istate)
die(_("staging updated .gitmodules failed"));
}
-static struct string_list added_submodule_odb_paths = STRING_LIST_INIT_DUP;
-
-void add_submodule_odb_by_path(const char *path)
-{
- string_list_insert(&added_submodule_odb_paths, path);
-}
-
-int register_all_submodule_odb_as_alternates(void)
-{
- int i;
- int ret = added_submodule_odb_paths.nr;
-
- for (i = 0; i < added_submodule_odb_paths.nr; i++)
- add_to_alternates_memory(added_submodule_odb_paths.items[i].string);
- if (ret) {
- string_list_clear(&added_submodule_odb_paths, 0);
- trace2_data_intmax("submodule", the_repository,
- "register_all_submodule_odb_as_alternates/registered", ret);
- if (git_env_bool("GIT_TEST_FATAL_REGISTER_SUBMODULE_ODB", 0))
- BUG("register_all_submodule_odb_as_alternates() called");
- }
- return ret;
-}
-
void set_diffopt_flags_from_submodule_config(struct diff_options *diffopt,
const char *path)
{
@@ -993,7 +968,7 @@ static int check_has_commit(const struct object_id *oid, void *data)
return 0;
}
- type = oid_object_info(&subrepo, oid, NULL);
+ type = odb_read_object_info(subrepo.objects, oid, NULL);
switch (type) {
case OBJ_COMMIT:
@@ -1777,8 +1752,7 @@ static int fetch_start_failure(struct strbuf *err UNUSED,
static int commit_missing_in_sub(const struct object_id *oid, void *data)
{
struct repository *subrepo = data;
-
- enum object_type type = oid_object_info(subrepo, oid, NULL);
+ enum object_type type = odb_read_object_info(subrepo->objects, oid, NULL);
return type != OBJ_COMMIT;
}
diff --git a/submodule.h b/submodule.h
index db980c1d08..b10e16e6c0 100644
--- a/submodule.h
+++ b/submodule.h
@@ -105,15 +105,6 @@ int submodule_uses_gitfile(const char *path);
int bad_to_remove_submodule(const char *path, unsigned flags);
/*
- * Call add_submodule_odb_by_path() to add the submodule at the given
- * path to a list. When register_all_submodule_odb_as_alternates() is
- * called, the object stores of all submodules in that list will be
- * added as alternates in the_repository.
- */
-void add_submodule_odb_by_path(const char *path);
-int register_all_submodule_odb_as_alternates(void);
-
-/*
* Checks if there are submodule changes in a..b. If a is the null OID,
* checks b and all its ancestors instead.
*/
diff --git a/subprojects/expat.wrap b/subprojects/expat.wrap
index 2e0427dcfd..0e9292f97b 100644
--- a/subprojects/expat.wrap
+++ b/subprojects/expat.wrap
@@ -1,13 +1,13 @@
[wrap-file]
-directory = expat-2.6.3
-source_url = https://github.com/libexpat/libexpat/releases/download/R_2_6_3/expat-2.6.3.tar.xz
-source_filename = expat-2.6.3.tar.bz2
-source_hash = 274db254a6979bde5aad404763a704956940e465843f2a9bd9ed7af22e2c0efc
-patch_filename = expat_2.6.3-1_patch.zip
-patch_url = https://wrapdb.mesonbuild.com/v2/expat_2.6.3-1/get_patch
-patch_hash = cf017fbe105e31428b2768360bd9be39094df4e948a1e8d1c54b6f7c76460cb1
-source_fallback_url = https://github.com/mesonbuild/wrapdb/releases/download/expat_2.6.3-1/expat-2.6.3.tar.bz2
-wrapdb_version = 2.6.3-1
+directory = expat-2.7.1
+source_url = https://github.com/libexpat/libexpat/releases/download/R_2_7_1/expat-2.7.1.tar.xz
+source_filename = expat-2.7.1.tar.bz2
+source_hash = 354552544b8f99012e5062f7d570ec77f14b412a3ff5c7d8d0dae62c0d217c30
+patch_filename = expat_2.7.1-1_patch.zip
+patch_url = https://wrapdb.mesonbuild.com/v2/expat_2.7.1-1/get_patch
+patch_hash = fe28cbbc427a7c9787d08b969ad54d19f59d8dd18294b4a18651cecfc789d4ef
+source_fallback_url = https://github.com/mesonbuild/wrapdb/releases/download/expat_2.7.1-1/expat-2.7.1.tar.bz2
+wrapdb_version = 2.7.1-1
[provide]
expat = expat_dep
diff --git a/subprojects/pcre2.wrap b/subprojects/pcre2.wrap
index 7e18447254..f45c968e2f 100644
--- a/subprojects/pcre2.wrap
+++ b/subprojects/pcre2.wrap
@@ -1,13 +1,13 @@
[wrap-file]
-directory = pcre2-10.44
-source_url = https://github.com/PCRE2Project/pcre2/releases/download/pcre2-10.44/pcre2-10.44.tar.bz2
-source_filename = pcre2-10.44.tar.bz2
-source_hash = d34f02e113cf7193a1ebf2770d3ac527088d485d4e047ed10e5d217c6ef5de96
-patch_filename = pcre2_10.44-2_patch.zip
-patch_url = https://wrapdb.mesonbuild.com/v2/pcre2_10.44-2/get_patch
-patch_hash = 4336d422ee9043847e5e10dbbbd01940d4c9e5027f31ccdc33a7898a1ca94009
-source_fallback_url = https://github.com/mesonbuild/wrapdb/releases/download/pcre2_10.44-2/pcre2-10.44.tar.bz2
-wrapdb_version = 10.44-2
+directory = pcre2-10.45
+source_url = https://github.com/PCRE2Project/pcre2/releases/download/pcre2-10.45/pcre2-10.45.tar.bz2
+source_filename = pcre2-10.45.tar.bz2
+source_hash = 21547f3516120c75597e5b30a992e27a592a31950b5140e7b8bfde3f192033c4
+patch_filename = pcre2_10.45-2_patch.zip
+patch_url = https://wrapdb.mesonbuild.com/v2/pcre2_10.45-2/get_patch
+patch_hash = 7c6f34b703708652a404f9dc2769c67658c437b6043573295fa3428a9b7a6807
+source_fallback_url = https://github.com/mesonbuild/wrapdb/releases/download/pcre2_10.45-2/pcre2-10.45.tar.bz2
+wrapdb_version = 10.45-2
[provide]
libpcre2-8 = libpcre2_8
diff --git a/t/README b/t/README
index e9ffd9a81c..adbbd9acf4 100644
--- a/t/README
+++ b/t/README
@@ -415,6 +415,10 @@ GIT_TEST_PACK_SPARSE=<boolean> if disabled will default the pack-objects
builtin to use the non-sparse object walk. This can still be overridden by
the --sparse command-line argument.
+GIT_TEST_PACK_PATH_WALK=<boolean> if enabled will default the pack-objects
+builtin to use the path-walk API for the object walk. This can still be
+overridden by the --no-path-walk command-line argument.
+
GIT_TEST_PRELOAD_INDEX=<boolean> exercises the preload-index code path
by overriding the minimum number of cache entries required per thread.
diff --git a/t/helper/test-bitmap.c b/t/helper/test-bitmap.c
index 3f23f21072..16a01669e4 100644
--- a/t/helper/test-bitmap.c
+++ b/t/helper/test-bitmap.c
@@ -10,6 +10,11 @@ static int bitmap_list_commits(void)
return test_bitmap_commits(the_repository);
}
+static int bitmap_list_commits_with_offset(void)
+{
+ return test_bitmap_commits_with_offset(the_repository);
+}
+
static int bitmap_dump_hashes(void)
{
return test_bitmap_hashes(the_repository);
@@ -36,6 +41,8 @@ int cmd__bitmap(int argc, const char **argv)
if (argc == 2 && !strcmp(argv[1], "list-commits"))
return bitmap_list_commits();
+ if (argc == 2 && !strcmp(argv[1], "list-commits-with-offset"))
+ return bitmap_list_commits_with_offset();
if (argc == 2 && !strcmp(argv[1], "dump-hashes"))
return bitmap_dump_hashes();
if (argc == 2 && !strcmp(argv[1], "dump-pseudo-merges"))
@@ -46,6 +53,7 @@ int cmd__bitmap(int argc, const char **argv)
return bitmap_dump_pseudo_merge_objects(atoi(argv[2]));
usage("\ttest-tool bitmap list-commits\n"
+ "\ttest-tool bitmap list-commits-with-offset\n"
"\ttest-tool bitmap dump-hashes\n"
"\ttest-tool bitmap dump-pseudo-merges\n"
"\ttest-tool bitmap dump-pseudo-merge-commits <n>\n"
diff --git a/t/helper/test-bloom.c b/t/helper/test-bloom.c
index 9aa2c5a592..3283544bd3 100644
--- a/t/helper/test-bloom.c
+++ b/t/helper/test-bloom.c
@@ -12,13 +12,13 @@ static struct bloom_filter_settings settings = DEFAULT_BLOOM_FILTER_SETTINGS;
static void add_string_to_filter(const char *data, struct bloom_filter *filter) {
struct bloom_key key;
- fill_bloom_key(data, strlen(data), &key, &settings);
+ bloom_key_fill(&key, data, strlen(data), &settings);
printf("Hashes:");
for (size_t i = 0; i < settings.num_hashes; i++)
printf("0x%08x|", key.hashes[i]);
printf("\n");
add_key_to_filter(&key, filter, &settings);
- clear_bloom_key(&key);
+ bloom_key_clear(&key);
}
static void print_bloom_filter(struct bloom_filter *filter) {
@@ -61,13 +61,13 @@ int cmd__bloom(int argc, const char **argv)
uint32_t hashed;
if (argc < 3)
usage(bloom_usage);
- hashed = murmur3_seeded_v2(0, argv[2], strlen(argv[2]));
+ hashed = test_bloom_murmur3_seeded(0, argv[2], strlen(argv[2]), 2);
printf("Murmur3 Hash with seed=0:0x%08x\n", hashed);
}
if (!strcmp(argv[1], "get_murmur3_seven_highbit")) {
uint32_t hashed;
- hashed = murmur3_seeded_v2(0, "\x99\xaa\xbb\xcc\xdd\xee\xff", 7);
+ hashed = test_bloom_murmur3_seeded(0, "\x99\xaa\xbb\xcc\xdd\xee\xff", 7, 2);
printf("Murmur3 Hash with seed=0:0x%08x\n", hashed);
}
diff --git a/t/helper/test-find-pack.c b/t/helper/test-find-pack.c
index 76c2f4eba8..611a13a326 100644
--- a/t/helper/test-find-pack.c
+++ b/t/helper/test-find-pack.c
@@ -2,7 +2,7 @@
#include "test-tool.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "packfile.h"
#include "parse-options.h"
#include "setup.h"
diff --git a/t/helper/test-pack-mtimes.c b/t/helper/test-pack-mtimes.c
index fdf1b13437..d51aaa3dc4 100644
--- a/t/helper/test-pack-mtimes.c
+++ b/t/helper/test-pack-mtimes.c
@@ -3,7 +3,7 @@
#include "test-tool.h"
#include "hex.h"
#include "strbuf.h"
-#include "object-store.h"
+#include "odb.h"
#include "packfile.h"
#include "pack-mtimes.h"
#include "setup.h"
diff --git a/t/helper/test-parse-options.c b/t/helper/test-parse-options.c
index f2663dd0c0..68579d83f3 100644
--- a/t/helper/test-parse-options.c
+++ b/t/helper/test-parse-options.c
@@ -131,6 +131,7 @@ int cmd__parse_options(int argc, const char **argv)
.short_name = 'B',
.long_name = "no-fear",
.value = &boolean,
+ .precision = sizeof(boolean),
.help = "be brave",
.flags = PARSE_OPT_NOARG | PARSE_OPT_NONEG,
.defval = 1,
@@ -148,9 +149,16 @@ int cmd__parse_options(int argc, const char **argv)
OPT_SET_INT(0, "set23", &integer, "set integer to 23", 23),
OPT_CMDMODE(0, "mode1", &integer, "set integer to 1 (cmdmode option)", 1),
OPT_CMDMODE(0, "mode2", &integer, "set integer to 2 (cmdmode option)", 2),
- OPT_CALLBACK_F(0, "mode34", &integer, "(3|4)",
- "set integer to 3 or 4 (cmdmode option)",
- PARSE_OPT_CMDMODE, mode34_callback),
+ {
+ .type = OPTION_CALLBACK,
+ .long_name = "mode34",
+ .value = &integer,
+ .precision = sizeof(integer),
+ .argh = "(3|4)",
+ .help = "set integer to 3 or 4 (cmdmode option)",
+ .flags = PARSE_OPT_CMDMODE,
+ .callback = mode34_callback,
+ },
OPT_CALLBACK('L', "length", &integer, "str",
"get length of <str>", length_callback),
OPT_FILENAME('F', "file", &file, "set file to <file>"),
@@ -170,6 +178,7 @@ int cmd__parse_options(int argc, const char **argv)
.type = OPTION_COUNTUP,
.short_name = '+',
.value = &boolean,
+ .precision = sizeof(boolean),
.help = "same as -b",
.flags = PARSE_OPT_NOARG | PARSE_OPT_NONEG | PARSE_OPT_NODASH,
},
@@ -177,6 +186,7 @@ int cmd__parse_options(int argc, const char **argv)
.type = OPTION_COUNTUP,
.long_name = "ambiguous",
.value = &ambiguous,
+ .precision = sizeof(ambiguous),
.help = "positive ambiguity",
.flags = PARSE_OPT_NOARG | PARSE_OPT_NONEG,
},
@@ -184,6 +194,7 @@ int cmd__parse_options(int argc, const char **argv)
.type = OPTION_COUNTUP,
.long_name = "no-ambiguous",
.value = &ambiguous,
+ .precision = sizeof(ambiguous),
.help = "negative ambiguity",
.flags = PARSE_OPT_NOARG | PARSE_OPT_NONEG,
},
diff --git a/t/helper/test-partial-clone.c b/t/helper/test-partial-clone.c
index 34f1aee558..d848800749 100644
--- a/t/helper/test-partial-clone.c
+++ b/t/helper/test-partial-clone.c
@@ -1,7 +1,7 @@
#include "test-tool.h"
#include "hex.h"
#include "repository.h"
-#include "object-store.h"
+#include "odb.h"
#include "setup.h"
/*
@@ -23,7 +23,7 @@ static void object_info(const char *gitdir, const char *oid_hex)
die("could not init repo");
if (parse_oid_hex_algop(oid_hex, &oid, &p, r.hash_algo))
die("could not parse oid");
- if (oid_object_info_extended(&r, &oid, &oi, 0))
+ if (odb_read_object_info_extended(r.objects, &oid, &oi, 0))
die("could not obtain object info");
printf("%d\n", (int) size);
diff --git a/t/helper/test-path-walk.c b/t/helper/test-path-walk.c
index 61e845e5ec..fe63002c2b 100644
--- a/t/helper/test-path-walk.c
+++ b/t/helper/test-path-walk.c
@@ -82,6 +82,8 @@ int cmd__path_walk(int argc, const char **argv)
N_("toggle inclusion of tree objects")),
OPT_BOOL(0, "prune", &info.prune_all_uninteresting,
N_("toggle pruning of uninteresting paths")),
+ OPT_BOOL(0, "edge-aggressive", &info.edge_aggressive,
+ N_("toggle aggressive edge walk")),
OPT_BOOL(0, "stdin-pl", &stdin_pl,
N_("read a pattern list over stdin")),
OPT_END(),
diff --git a/t/helper/test-read-graph.c b/t/helper/test-read-graph.c
index 8b413b644b..ef5339bbee 100644
--- a/t/helper/test-read-graph.c
+++ b/t/helper/test-read-graph.c
@@ -3,7 +3,7 @@
#include "test-tool.h"
#include "commit-graph.h"
#include "repository.h"
-#include "object-store.h"
+#include "odb.h"
#include "bloom.h"
#include "setup.h"
@@ -73,15 +73,15 @@ static void dump_graph_bloom_filters(struct commit_graph *graph)
int cmd__read_graph(int argc, const char **argv)
{
struct commit_graph *graph = NULL;
- struct object_directory *odb;
+ struct odb_source *source;
int ret = 0;
setup_git_directory();
- odb = the_repository->objects->odb;
+ source = the_repository->objects->sources;
prepare_repo_settings(the_repository);
- graph = read_commit_graph_one(the_repository, odb);
+ graph = read_commit_graph_one(the_repository, source);
if (!graph) {
ret = 1;
goto done;
diff --git a/t/helper/test-read-midx.c b/t/helper/test-read-midx.c
index ac81390899..da2aa036b5 100644
--- a/t/helper/test-read-midx.c
+++ b/t/helper/test-read-midx.c
@@ -4,7 +4,7 @@
#include "hex.h"
#include "midx.h"
#include "repository.h"
-#include "object-store.h"
+#include "odb.h"
#include "pack-bitmap.h"
#include "packfile.h"
#include "setup.h"
diff --git a/t/helper/test-ref-store.c b/t/helper/test-ref-store.c
index 4cfc7c90b5..8d9a271845 100644
--- a/t/helper/test-ref-store.c
+++ b/t/helper/test-ref-store.c
@@ -5,7 +5,7 @@
#include "refs.h"
#include "setup.h"
#include "worktree.h"
-#include "object-store.h"
+#include "odb.h"
#include "path.h"
#include "repository.h"
#include "strbuf.h"
@@ -79,7 +79,7 @@ static const char **get_store(const char **argv, struct ref_store **refs)
if (!repo_submodule_path_append(the_repository,
&sb, gitdir, "objects/"))
die("computing submodule path failed");
- add_to_alternates_memory(sb.buf);
+ odb_add_to_alternates_memory(the_repository->objects, sb.buf);
strbuf_release(&sb);
*refs = repo_get_submodule_ref_store(the_repository, gitdir);
diff --git a/t/helper/test-string-list.c b/t/helper/test-string-list.c
index 6f10c5a435..6be0cdb8e2 100644
--- a/t/helper/test-string-list.c
+++ b/t/helper/test-string-list.c
@@ -1,105 +1,9 @@
-#define DISABLE_SIGN_COMPARE_WARNINGS
-
#include "test-tool.h"
#include "strbuf.h"
#include "string-list.h"
-/*
- * Parse an argument into a string list. arg should either be a
- * ':'-separated list of strings, or "-" to indicate an empty string
- * list (as opposed to "", which indicates a string list containing a
- * single empty string). list->strdup_strings must be set.
- */
-static void parse_string_list(struct string_list *list, const char *arg)
-{
- if (!strcmp(arg, "-"))
- return;
-
- (void)string_list_split(list, arg, ':', -1);
-}
-
-static void write_list(const struct string_list *list)
-{
- int i;
- for (i = 0; i < list->nr; i++)
- printf("[%d]: \"%s\"\n", i, list->items[i].string);
-}
-
-static void write_list_compact(const struct string_list *list)
-{
- int i;
- if (!list->nr)
- printf("-\n");
- else {
- printf("%s", list->items[0].string);
- for (i = 1; i < list->nr; i++)
- printf(":%s", list->items[i].string);
- printf("\n");
- }
-}
-
-static int prefix_cb(struct string_list_item *item, void *cb_data)
-{
- const char *prefix = (const char *)cb_data;
- return starts_with(item->string, prefix);
-}
-
int cmd__string_list(int argc, const char **argv)
{
- if (argc == 5 && !strcmp(argv[1], "split")) {
- struct string_list list = STRING_LIST_INIT_DUP;
- int i;
- const char *s = argv[2];
- int delim = *argv[3];
- int maxsplit = atoi(argv[4]);
-
- i = string_list_split(&list, s, delim, maxsplit);
- printf("%d\n", i);
- write_list(&list);
- string_list_clear(&list, 0);
- return 0;
- }
-
- if (argc == 5 && !strcmp(argv[1], "split_in_place")) {
- struct string_list list = STRING_LIST_INIT_NODUP;
- int i;
- char *s = xstrdup(argv[2]);
- const char *delim = argv[3];
- int maxsplit = atoi(argv[4]);
-
- i = string_list_split_in_place(&list, s, delim, maxsplit);
- printf("%d\n", i);
- write_list(&list);
- string_list_clear(&list, 0);
- free(s);
- return 0;
- }
-
- if (argc == 4 && !strcmp(argv[1], "filter")) {
- /*
- * Retain only the items that have the specified prefix.
- * Arguments: list|- prefix
- */
- struct string_list list = STRING_LIST_INIT_DUP;
- const char *prefix = argv[3];
-
- parse_string_list(&list, argv[2]);
- filter_string_list(&list, 0, prefix_cb, (void *)prefix);
- write_list_compact(&list);
- string_list_clear(&list, 0);
- return 0;
- }
-
- if (argc == 3 && !strcmp(argv[1], "remove_duplicates")) {
- struct string_list list = STRING_LIST_INIT_DUP;
-
- parse_string_list(&list, argv[2]);
- string_list_remove_duplicates(&list, 0);
- write_list_compact(&list);
- string_list_clear(&list, 0);
- return 0;
- }
-
if (argc == 2 && !strcmp(argv[1], "sort")) {
struct string_list list = STRING_LIST_INIT_NODUP;
struct strbuf sb = STRBUF_INIT;
diff --git a/t/meson.build b/t/meson.build
index d052fc3e23..660d780dcc 100644
--- a/t/meson.build
+++ b/t/meson.build
@@ -11,6 +11,7 @@ clar_test_suites = [
'unit-tests/u-reftable-tree.c',
'unit-tests/u-strbuf.c',
'unit-tests/u-strcmp-offset.c',
+ 'unit-tests/u-string-list.c',
'unit-tests/u-strvec.c',
'unit-tests/u-trailer.c',
'unit-tests/u-urlmatch-normalization.c',
@@ -51,7 +52,7 @@ clar_unit_tests = executable('unit-tests',
sources: clar_sources + clar_test_suites,
dependencies: [libgit_commonmain],
)
-test('unit-tests', clar_unit_tests)
+test('unit-tests', clar_unit_tests, kwargs: test_kwargs)
unit_test_programs = [
'unit-tests/t-reftable-basics.c',
@@ -76,7 +77,7 @@ foreach unit_test_program : unit_test_programs
)
test(unit_test_name, unit_test,
workdir: meson.current_source_dir(),
- timeout: 0,
+ kwargs: test_kwargs,
)
endforeach
@@ -123,7 +124,6 @@ integration_tests = [
't0060-path-utils.sh',
't0061-run-command.sh',
't0062-revision-walking.sh',
- 't0063-string-list.sh',
't0066-dir-iterator.sh',
't0067-parse_pathspec_file.sh',
't0068-for-each-repo.sh',
@@ -178,7 +178,6 @@ integration_tests = [
't1015-read-index-unmerged.sh',
't1016-compatObjectFormat.sh',
't1020-subdirectory.sh',
- 't1021-rerere-in-workdir.sh',
't1022-read-tree-partial-clone.sh',
't1050-large.sh',
't1051-large-conversion.sh',
@@ -1117,6 +1116,7 @@ benchmarks = [
'perf/p1450-fsck.sh',
'perf/p1451-fsck-skip-list.sh',
'perf/p1500-graph-walks.sh',
+ 'perf/p1501-rev-parse-oneline.sh',
'perf/p2000-sparse-operations.sh',
'perf/p3400-rebase.sh',
'perf/p3404-rebase-interactive.sh',
@@ -1212,7 +1212,7 @@ foreach integration_test : integration_tests
workdir: meson.current_source_dir(),
env: test_environment,
depends: test_dependencies + bin_wrappers,
- timeout: 0,
+ kwargs: test_kwargs,
)
endforeach
diff --git a/t/perf/p1501-rev-parse-oneline.sh b/t/perf/p1501-rev-parse-oneline.sh
new file mode 100755
index 0000000000..538fa9c404
--- /dev/null
+++ b/t/perf/p1501-rev-parse-oneline.sh
@@ -0,0 +1,71 @@
+#!/bin/sh
+
+test_description='Test :/ object name notation'
+
+. ./perf-lib.sh
+
+test_perf_fresh_repo
+
+#
+# Creates lots of merges to make history traversal costly. In
+# particular it creates 2^($max_level-1)-1 2-way merges on top of
+# 2^($max_level-1) root commits. E.g., the commit history looks like
+# this for a $max_level of 3:
+#
+# _1_
+# / \
+# 2 3
+# / \ / \
+# 4 5 6 7
+#
+# The numbers are the fast-import marks, which also are the commit
+# messages. 1 is the HEAD commit and a merge, 2 and 3 are also merges,
+# 4-7 are the root commits.
+#
+build_history () {
+ local max_level="$1" &&
+ local level="${2:-1}" &&
+ local mark="${3:-1}" &&
+ if test $level -eq $max_level
+ then
+ echo "reset refs/heads/master" &&
+ echo "from $ZERO_OID" &&
+ echo "commit refs/heads/master" &&
+ echo "mark :$mark" &&
+ echo "committer C <c@example.com> 1234567890 +0000" &&
+ echo "data <<EOF" &&
+ echo "$mark" &&
+ echo "EOF"
+ else
+ local level1=$((level+1)) &&
+ local mark1=$((2*mark)) &&
+ local mark2=$((2*mark+1)) &&
+ build_history $max_level $level1 $mark1 &&
+ build_history $max_level $level1 $mark2 &&
+ echo "commit refs/heads/master" &&
+ echo "mark :$mark" &&
+ echo "committer C <c@example.com> 1234567890 +0000" &&
+ echo "data <<EOF" &&
+ echo "$mark" &&
+ echo "EOF" &&
+ echo "from :$mark1" &&
+ echo "merge :$mark2"
+ fi
+}
+
+test_expect_success 'setup' '
+ build_history 16 | git fast-import &&
+ git log --format="%H %s" --reverse >commits &&
+ sed -n -e "s/ .*$//p" -e "q" <commits >expect &&
+ sed -n -e "s/^.* //p" -e "q" <commits >needle
+'
+
+test_perf "rev-parse :/$(cat needle)" '
+ git rev-parse :/$(cat needle) >actual
+'
+
+test_expect_success 'verify result' '
+ test_cmp expect actual
+'
+
+test_done
diff --git a/t/perf/p5313-pack-objects.sh b/t/perf/p5313-pack-objects.sh
index 786a2c1c6f..46a6cd32d2 100755
--- a/t/perf/p5313-pack-objects.sh
+++ b/t/perf/p5313-pack-objects.sh
@@ -22,46 +22,53 @@ test_expect_success 'create rev input' '
EOF
'
-for version in 1 2
-do
- export version
+test_all_with_args () {
+ parameter=$1
+ export parameter
- test_perf "thin pack with version $version" '
+ test_perf "thin pack with $parameter" '
git pack-objects --thin --stdout --revs --sparse \
- --name-hash-version=$version <in-thin >out
+ $parameter <in-thin >out
'
- test_size "thin pack size with version $version" '
+ test_size "thin pack size with $parameter" '
test_file_size out
'
- test_perf "big pack with version $version" '
+ test_perf "big pack with $parameter" '
git pack-objects --stdout --revs --sparse \
- --name-hash-version=$version <in-big >out
+ $parameter <in-big >out
'
- test_size "big pack size with version $version" '
+ test_size "big pack size with $parameter" '
test_file_size out
'
- test_perf "shallow fetch pack with version $version" '
+ test_perf "shallow fetch pack with $parameter" '
git pack-objects --stdout --revs --sparse --shallow \
- --name-hash-version=$version <in-shallow >out
+ $parameter <in-shallow >out
'
- test_size "shallow pack size with version $version" '
+ test_size "shallow pack size with $parameter" '
test_file_size out
'
- test_perf "repack with version $version" '
- git repack -adf --name-hash-version=$version
+ test_perf "repack with $parameter" '
+ git repack -adf $parameter
'
- test_size "repack size with version $version" '
+ test_size "repack size with $parameter" '
gitdir=$(git rev-parse --git-dir) &&
pack=$(ls $gitdir/objects/pack/pack-*.pack) &&
test_file_size "$pack"
'
+}
+
+for version in 1 2
+do
+ test_all_with_args --name-hash-version=$version
done
+test_all_with_args --path-walk
+
test_done
diff --git a/t/t0000-basic.sh b/t/t0000-basic.sh
index 35c5c2b4f9..2b63e1c86c 100755
--- a/t/t0000-basic.sh
+++ b/t/t0000-basic.sh
@@ -130,7 +130,7 @@ test_expect_success 'subtest: a failing TODO test' '
'
test_expect_success 'subtest: a passing TODO test' '
- write_and_run_sub_test_lib_test passing-todo <<-\EOF &&
+ write_and_run_sub_test_lib_test_err passing-todo <<-\EOF &&
test_expect_failure "pretend we have fixed a known breakage" "true"
test_done
EOF
@@ -142,7 +142,7 @@ test_expect_success 'subtest: a passing TODO test' '
'
test_expect_success 'subtest: 2 TODO tests, one passin' '
- write_and_run_sub_test_lib_test partially-passing-todos <<-\EOF &&
+ write_and_run_sub_test_lib_test_err partially-passing-todos <<-\EOF &&
test_expect_failure "pretend we have a known breakage" "false"
test_expect_success "pretend we have a passing test" "true"
test_expect_failure "pretend we have fixed another known breakage" "true"
@@ -219,41 +219,44 @@ test_expect_success 'subtest: --verbose option' '
test_expect_success "failing test" false
test_done
EOF
- mv t1234-verbose/out t1234-verbose/out+ &&
- grep -v "^Initialized empty" t1234-verbose/out+ >t1234-verbose/out &&
- check_sub_test_lib_test t1234-verbose <<-\EOF
- > expecting success of 1234.1 '\''passing test'\'': true
+ mv t1234-verbose/err t1234-verbose/err+ &&
+ grep -v "^Initialized empty" t1234-verbose/err+ >t1234-verbose/err &&
+ check_sub_test_lib_test_err t1234-verbose \
+ <<-\EOF_OUT 3<<-\EOF_ERR
> ok 1 - passing test
+ > ok 2 - test with output
+ > not ok 3 - failing test
+ > # false
+ > # failed 1 among 3 test(s)
+ > 1..3
+ EOF_OUT
+ > expecting success of 1234.1 '\''passing test'\'': true
> Z
> expecting success of 1234.2 '\''test with output'\'': echo foo
> foo
- > ok 2 - test with output
> Z
> expecting success of 1234.3 '\''failing test'\'': false
- > not ok 3 - failing test
- > # false
> Z
- > # failed 1 among 3 test(s)
- > 1..3
- EOF
+ EOF_ERR
'
test_expect_success 'subtest: --verbose-only option' '
run_sub_test_lib_test_err \
t1234-verbose \
--verbose-only=2 &&
- check_sub_test_lib_test t1234-verbose <<-\EOF
+ check_sub_test_lib_test_err t1234-verbose <<-\EOF_OUT 3<<-\EOF_ERR
> ok 1 - passing test
- > Z
- > expecting success of 1234.2 '\''test with output'\'': echo foo
- > foo
> ok 2 - test with output
- > Z
> not ok 3 - failing test
> # false
> # failed 1 among 3 test(s)
> 1..3
- EOF
+ EOF_OUT
+ > Z
+ > expecting success of 1234.2 '\''test with output'\'': echo foo
+ > foo
+ > Z
+ EOF_ERR
'
test_expect_success 'subtest: skip one with GIT_SKIP_TESTS' '
diff --git a/t/t0001-init.sh b/t/t0001-init.sh
index f11a40811f..f593c53687 100755
--- a/t/t0001-init.sh
+++ b/t/t0001-init.sh
@@ -658,6 +658,17 @@ test_expect_success 'init warns about invalid init.defaultRefFormat' '
test_cmp expected actual
'
+test_expect_success 'default ref format' '
+ test_when_finished "rm -rf refformat" &&
+ (
+ sane_unset GIT_DEFAULT_REF_FORMAT &&
+ git init refformat
+ ) &&
+ git version --build-options | sed -ne "s/^default-ref-format: //p" >expect &&
+ git -C refformat rev-parse --show-ref-format >actual &&
+ test_cmp expect actual
+'
+
backends="files reftable"
for format in $backends
do
@@ -738,6 +749,40 @@ test_expect_success "GIT_DEFAULT_REF_FORMAT= overrides init.defaultRefFormat" '
test_cmp expect actual
'
+test_expect_success "init with feature.experimental=true" '
+ test_when_finished "rm -rf refformat" &&
+ test_config_global feature.experimental true &&
+ (
+ sane_unset GIT_DEFAULT_REF_FORMAT &&
+ git init refformat
+ ) &&
+ echo reftable >expect &&
+ git -C refformat rev-parse --show-ref-format >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success "init.defaultRefFormat overrides feature.experimental=true" '
+ test_when_finished "rm -rf refformat" &&
+ test_config_global feature.experimental true &&
+ test_config_global init.defaultRefFormat files &&
+ (
+ sane_unset GIT_DEFAULT_REF_FORMAT &&
+ git init refformat
+ ) &&
+ echo files >expect &&
+ git -C refformat rev-parse --show-ref-format >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success "GIT_DEFAULT_REF_FORMAT= overrides feature.experimental=true" '
+ test_when_finished "rm -rf refformat" &&
+ test_config_global feature.experimental true &&
+ GIT_DEFAULT_REF_FORMAT=files git init refformat &&
+ echo files >expect &&
+ git -C refformat rev-parse --show-ref-format >actual &&
+ test_cmp expect actual
+'
+
for from_format in $backends
do
test_expect_success "re-init with same format ($from_format)" '
diff --git a/t/t0021-conversion.sh b/t/t0021-conversion.sh
index bf10d253ec..f0d50d769e 100755
--- a/t/t0021-conversion.sh
+++ b/t/t0021-conversion.sh
@@ -281,7 +281,7 @@ test_expect_success 'required filter with absent smudge field' '
test_expect_success 'filtering large input to small output should use little memory' '
test_config filter.devnull.clean "cat >/dev/null" &&
test_config filter.devnull.required true &&
- for i in $(test_seq 1 30); do printf "%1048576d" 1 || return 1; done >30MB &&
+ test_seq -f "%1048576d" 1 30 >30MB &&
echo "30MB filter=devnull" >.gitattributes &&
GIT_MMAP_LIMIT=1m GIT_ALLOC_LIMIT=1m git add 30MB
'
@@ -299,7 +299,7 @@ test_expect_success 'filter that does not read is fine' '
test_expect_success EXPENSIVE 'filter large file' '
test_config filter.largefile.smudge cat &&
test_config filter.largefile.clean cat &&
- for i in $(test_seq 1 2048); do printf "%1048576d" 1 || return 1; done >2GB &&
+ test_seq -f "%1048576d" 1 2048 >2GB &&
echo "2GB filter=largefile" >.gitattributes &&
git add 2GB 2>err &&
test_must_be_empty err &&
diff --git a/t/t0050-filesystem.sh b/t/t0050-filesystem.sh
index 5c9dc90d0b..ca8568067d 100755
--- a/t/t0050-filesystem.sh
+++ b/t/t0050-filesystem.sh
@@ -10,53 +10,35 @@ export GIT_TEST_DEFAULT_INITIAL_BRANCH_NAME
auml=$(printf '\303\244')
aumlcdiar=$(printf '\141\314\210')
-if test_have_prereq CASE_INSENSITIVE_FS
-then
- say "will test on a case insensitive filesystem"
- test_case=test_expect_failure
-else
- test_case=test_expect_success
-fi
-
if test_have_prereq UTF8_NFD_TO_NFC
then
- say "will test on a unicode corrupting filesystem"
test_unicode=test_expect_failure
else
test_unicode=test_expect_success
fi
-test_have_prereq SYMLINKS ||
- say "will test on a filesystem lacking symbolic links"
-
-if test_have_prereq CASE_INSENSITIVE_FS
-then
-test_expect_success "detection of case insensitive filesystem during repo init" '
+test_expect_success CASE_INSENSITIVE_FS "detection of case insensitive filesystem during repo init" '
test $(git config --bool core.ignorecase) = true
'
-else
-test_expect_success "detection of case insensitive filesystem during repo init" '
+
+test_expect_success !CASE_INSENSITIVE_FS "detection of case insensitive filesystem during repo init" '
{
test_must_fail git config --bool core.ignorecase >/dev/null ||
test $(git config --bool core.ignorecase) = false
}
'
-fi
-if test_have_prereq SYMLINKS
-then
-test_expect_success "detection of filesystem w/o symlink support during repo init" '
+test_expect_success SYMLINKS "detection of filesystem w/o symlink support during repo init" '
{
test_must_fail git config --bool core.symlinks ||
test "$(git config --bool core.symlinks)" = true
}
'
-else
-test_expect_success "detection of filesystem w/o symlink support during repo init" '
+
+test_expect_success !SYMLINKS "detection of filesystem w/o symlink support during repo init" '
v=$(git config --bool core.symlinks) &&
test "$v" = false
'
-fi
test_expect_success "setup case tests" '
git config core.ignorecase true &&
diff --git a/t/t0063-string-list.sh b/t/t0063-string-list.sh
deleted file mode 100755
index aac63ba506..0000000000
--- a/t/t0063-string-list.sh
+++ /dev/null
@@ -1,142 +0,0 @@
-#!/bin/sh
-#
-# Copyright (c) 2012 Michael Haggerty
-#
-
-test_description='Test string list functionality'
-
-. ./test-lib.sh
-
-test_split () {
- cat >expected &&
- test_expect_success "split $1 at $2, max $3" "
- test-tool string-list split '$1' '$2' '$3' >actual &&
- test_cmp expected actual &&
- test-tool string-list split_in_place '$1' '$2' '$3' >actual &&
- test_cmp expected actual
- "
-}
-
-test_split_in_place() {
- cat >expected &&
- test_expect_success "split (in place) $1 at $2, max $3" "
- test-tool string-list split_in_place '$1' '$2' '$3' >actual &&
- test_cmp expected actual
- "
-}
-
-test_split "foo:bar:baz" ":" "-1" <<EOF
-3
-[0]: "foo"
-[1]: "bar"
-[2]: "baz"
-EOF
-
-test_split "foo:bar:baz" ":" "0" <<EOF
-1
-[0]: "foo:bar:baz"
-EOF
-
-test_split "foo:bar:baz" ":" "1" <<EOF
-2
-[0]: "foo"
-[1]: "bar:baz"
-EOF
-
-test_split "foo:bar:baz" ":" "2" <<EOF
-3
-[0]: "foo"
-[1]: "bar"
-[2]: "baz"
-EOF
-
-test_split "foo:bar:" ":" "-1" <<EOF
-3
-[0]: "foo"
-[1]: "bar"
-[2]: ""
-EOF
-
-test_split "" ":" "-1" <<EOF
-1
-[0]: ""
-EOF
-
-test_split ":" ":" "-1" <<EOF
-2
-[0]: ""
-[1]: ""
-EOF
-
-test_split_in_place "foo:;:bar:;:baz:;:" ":;" "-1" <<EOF
-10
-[0]: "foo"
-[1]: ""
-[2]: ""
-[3]: "bar"
-[4]: ""
-[5]: ""
-[6]: "baz"
-[7]: ""
-[8]: ""
-[9]: ""
-EOF
-
-test_split_in_place "foo:;:bar:;:baz" ":;" "0" <<EOF
-1
-[0]: "foo:;:bar:;:baz"
-EOF
-
-test_split_in_place "foo:;:bar:;:baz" ":;" "1" <<EOF
-2
-[0]: "foo"
-[1]: ";:bar:;:baz"
-EOF
-
-test_split_in_place "foo:;:bar:;:baz" ":;" "2" <<EOF
-3
-[0]: "foo"
-[1]: ""
-[2]: ":bar:;:baz"
-EOF
-
-test_split_in_place "foo:;:bar:;:" ":;" "-1" <<EOF
-7
-[0]: "foo"
-[1]: ""
-[2]: ""
-[3]: "bar"
-[4]: ""
-[5]: ""
-[6]: ""
-EOF
-
-test_expect_success "test filter_string_list" '
- test "x-" = "x$(test-tool string-list filter - y)" &&
- test "x-" = "x$(test-tool string-list filter no y)" &&
- test yes = "$(test-tool string-list filter yes y)" &&
- test yes = "$(test-tool string-list filter no:yes y)" &&
- test yes = "$(test-tool string-list filter yes:no y)" &&
- test y1:y2 = "$(test-tool string-list filter y1:y2 y)" &&
- test y2:y1 = "$(test-tool string-list filter y2:y1 y)" &&
- test "x-" = "x$(test-tool string-list filter x1:x2 y)"
-'
-
-test_expect_success "test remove_duplicates" '
- test "x-" = "x$(test-tool string-list remove_duplicates -)" &&
- test "x" = "x$(test-tool string-list remove_duplicates "")" &&
- test a = "$(test-tool string-list remove_duplicates a)" &&
- test a = "$(test-tool string-list remove_duplicates a:a)" &&
- test a = "$(test-tool string-list remove_duplicates a:a:a:a:a)" &&
- test a:b = "$(test-tool string-list remove_duplicates a:b)" &&
- test a:b = "$(test-tool string-list remove_duplicates a:a:b)" &&
- test a:b = "$(test-tool string-list remove_duplicates a:b:b)" &&
- test a:b:c = "$(test-tool string-list remove_duplicates a:b:c)" &&
- test a:b:c = "$(test-tool string-list remove_duplicates a:a:b:c)" &&
- test a:b:c = "$(test-tool string-list remove_duplicates a:b:b:c)" &&
- test a:b:c = "$(test-tool string-list remove_duplicates a:b:c:c)" &&
- test a:b:c = "$(test-tool string-list remove_duplicates a:a:b:b:c:c)" &&
- test a:b:c = "$(test-tool string-list remove_duplicates a:a:a:b:b:b:c:c:c)"
-'
-
-test_done
diff --git a/t/t0411-clone-from-partial.sh b/t/t0411-clone-from-partial.sh
index 196fc61784..9e6bca5625 100755
--- a/t/t0411-clone-from-partial.sh
+++ b/t/t0411-clone-from-partial.sh
@@ -59,6 +59,12 @@ test_expect_success 'pack-objects should fetch from promisor remote and execute
test_expect_success 'clone from promisor remote does not lazy-fetch by default' '
rm -f script-executed &&
+
+ # The --path-walk feature of "git pack-objects" is not
+ # compatible with this kind of fetch from an incomplete repo.
+ GIT_TEST_PACK_PATH_WALK=0 &&
+ export GIT_TEST_PACK_PATH_WALK &&
+
test_must_fail git clone evil no-lazy 2>err &&
test_grep "lazy fetching disabled" err &&
test_path_is_missing script-executed
diff --git a/t/t0450/adoc-help-mismatches b/t/t0450/adoc-help-mismatches
index c4a15fd0cb..06b469bdee 100644
--- a/t/t0450/adoc-help-mismatches
+++ b/t/t0450/adoc-help-mismatches
@@ -38,7 +38,6 @@ merge-one-file
multi-pack-index
name-rev
notes
-pack-objects
push
range-diff
rebase
diff --git a/t/t0610-reftable-basics.sh b/t/t0610-reftable-basics.sh
index 1be534a895..3ea5d51532 100755
--- a/t/t0610-reftable-basics.sh
+++ b/t/t0610-reftable-basics.sh
@@ -477,11 +477,7 @@ test_expect_success !CYGWIN 'ref transaction: many concurrent writers' '
test_commit --no-tag initial &&
head=$(git rev-parse HEAD) &&
- for i in $(test_seq 100)
- do
- printf "%s commit\trefs/heads/branch-%s\n" "$head" "$i" ||
- return 1
- done >expect &&
+ test_seq -f "$head commit\trefs/heads/branch-%d" 100 >expect &&
printf "%s commit\trefs/heads/main\n" "$head" >>expect &&
for i in $(test_seq 100)
diff --git a/t/t0612-reftable-jgit-compatibility.sh b/t/t0612-reftable-jgit-compatibility.sh
index d0d7e80b49..7df2ad5817 100755
--- a/t/t0612-reftable-jgit-compatibility.sh
+++ b/t/t0612-reftable-jgit-compatibility.sh
@@ -112,14 +112,11 @@ test_expect_success 'JGit can read multi-level index' '
cd repo &&
test_commit A &&
- awk "
- BEGIN {
- print \"start\";
- for (i = 0; i < 10000; i++)
- printf \"create refs/heads/branch-%d HEAD\n\", i;
- print \"commit\";
- }
- " >input &&
+ {
+ echo start &&
+ test_seq -f "create refs/heads/branch-%d HEAD" 10000 &&
+ echo commit
+ } >input &&
git update-ref --stdin <input &&
test_same_refs &&
diff --git a/t/t0613-reftable-write-options.sh b/t/t0613-reftable-write-options.sh
index 6447920c9b..d77e601111 100755
--- a/t/t0613-reftable-write-options.sh
+++ b/t/t0613-reftable-write-options.sh
@@ -66,11 +66,7 @@ test_expect_success 'many refs results in multiple blocks' '
(
cd repo &&
test_commit initial &&
- for i in $(test_seq 200)
- do
- printf "update refs/heads/branch-%d HEAD\n" "$i" ||
- return 1
- done >input &&
+ test_seq -f "update refs/heads/branch-%d HEAD" 200 >input &&
git update-ref --stdin <input &&
git pack-refs &&
@@ -180,11 +176,7 @@ test_expect_success 'restart interval at every single record' '
(
cd repo &&
test_commit initial &&
- for i in $(test_seq 10)
- do
- printf "update refs/heads/branch-%d HEAD\n" "$i" ||
- return 1
- done >input &&
+ test_seq -f "update refs/heads/branch-%d HEAD" 10 >input &&
git update-ref --stdin <input &&
git -c reftable.restartInterval=1 pack-refs &&
@@ -224,11 +216,7 @@ test_expect_success 'object index gets written by default with ref index' '
(
cd repo &&
test_commit initial &&
- for i in $(test_seq 5)
- do
- printf "update refs/heads/branch-%d HEAD\n" "$i" ||
- return 1
- done >input &&
+ test_seq -f "update refs/heads/branch-%d HEAD" 5 >input &&
git update-ref --stdin <input &&
git -c reftable.blockSize=100 pack-refs &&
@@ -263,11 +251,7 @@ test_expect_success 'object index can be disabled' '
(
cd repo &&
test_commit initial &&
- for i in $(test_seq 5)
- do
- printf "update refs/heads/branch-%d HEAD\n" "$i" ||
- return 1
- done >input &&
+ test_seq -f "update refs/heads/branch-%d HEAD" 5 >input &&
git update-ref --stdin <input &&
git -c reftable.blockSize=100 -c reftable.indexObjects=false pack-refs &&
diff --git a/t/t1006-cat-file.sh b/t/t1006-cat-file.sh
index 317da6869c..1f61b666a7 100755
--- a/t/t1006-cat-file.sh
+++ b/t/t1006-cat-file.sh
@@ -113,53 +113,55 @@ strlen () {
run_tests () {
type=$1
- oid=$2
- size=$3
- content=$4
- pretty_content=$5
+ object_name="$2"
+ mode=$3
+ size=$4
+ content=$5
+ pretty_content=$6
+ oid=${7:-"$object_name"}
batch_output="$oid $type $size
$content"
test_expect_success "$type exists" '
- git cat-file -e $oid
+ git cat-file -e "$object_name"
'
test_expect_success "Type of $type is correct" '
echo $type >expect &&
- git cat-file -t $oid >actual &&
+ git cat-file -t "$object_name" >actual &&
test_cmp expect actual
'
test_expect_success "Size of $type is correct" '
echo $size >expect &&
- git cat-file -s $oid >actual &&
+ git cat-file -s "$object_name" >actual &&
test_cmp expect actual
'
test -z "$content" ||
test_expect_success "Content of $type is correct" '
echo_without_newline "$content" >expect &&
- git cat-file $type $oid >actual &&
+ git cat-file $type "$object_name" >actual &&
test_cmp expect actual
'
test_expect_success "Pretty content of $type is correct" '
echo_without_newline "$pretty_content" >expect &&
- git cat-file -p $oid >actual &&
+ git cat-file -p "$object_name" >actual &&
test_cmp expect actual
'
test -z "$content" ||
test_expect_success "--batch output of $type is correct" '
echo "$batch_output" >expect &&
- echo $oid | git cat-file --batch >actual &&
+ echo "$object_name" | git cat-file --batch >actual &&
test_cmp expect actual
'
test_expect_success "--batch-check output of $type is correct" '
echo "$oid $type $size" >expect &&
- echo_without_newline $oid | git cat-file --batch-check >actual &&
+ echo_without_newline "$object_name" | git cat-file --batch-check >actual &&
test_cmp expect actual
'
@@ -168,13 +170,13 @@ $content"
test -z "$content" ||
test_expect_success "--batch-command $opt output of $type content is correct" '
echo "$batch_output" >expect &&
- test_write_lines "contents $oid" | git cat-file --batch-command $opt >actual &&
+ test_write_lines "contents $object_name" | git cat-file --batch-command $opt >actual &&
test_cmp expect actual
'
test_expect_success "--batch-command $opt output of $type info is correct" '
echo "$oid $type $size" >expect &&
- test_write_lines "info $oid" |
+ test_write_lines "info $object_name" |
git cat-file --batch-command $opt >actual &&
test_cmp expect actual
'
@@ -182,30 +184,45 @@ $content"
test_expect_success "custom --batch-check format" '
echo "$type $oid" >expect &&
- echo $oid | git cat-file --batch-check="%(objecttype) %(objectname)" >actual &&
+ echo "$object_name" | git cat-file --batch-check="%(objecttype) %(objectname)" >actual &&
test_cmp expect actual
'
test_expect_success "custom --batch-command format" '
echo "$type $oid" >expect &&
- echo "info $oid" | git cat-file --batch-command="%(objecttype) %(objectname)" >actual &&
+ echo "info $object_name" | git cat-file --batch-command="%(objecttype) %(objectname)" >actual &&
test_cmp expect actual
'
- test_expect_success '--batch-check with %(rest)' '
+ # FIXME: %(rest) is incompatible with object names that include whitespace,
+ # e.g. HEAD:path/to/a/file with spaces. Use the resolved OID as input to
+ # test this instead of the raw object name.
+ if echo "$object_name" | grep -q " "; then
+ test_rest=test_expect_failure
+ else
+ test_rest=test_expect_success
+ fi
+
+ $test_rest '--batch-check with %(rest)' '
echo "$type this is some extra content" >expect &&
- echo "$oid this is some extra content" |
+ echo "$object_name this is some extra content" |
git cat-file --batch-check="%(objecttype) %(rest)" >actual &&
test_cmp expect actual
'
+ test_expect_success '--batch-check with %(objectmode)' '
+ echo "$mode $oid" >expect &&
+ echo $object_name | git cat-file --batch-check="%(objectmode) %(objectname)" >actual &&
+ test_cmp expect actual
+ '
+
test -z "$content" ||
test_expect_success "--batch without type ($type)" '
{
echo "$size" &&
echo "$content"
} >expect &&
- echo $oid | git cat-file --batch="%(objectsize)" >actual &&
+ echo "$object_name" | git cat-file --batch="%(objectsize)" >actual &&
test_cmp expect actual
'
@@ -215,7 +232,7 @@ $content"
echo "$type" &&
echo "$content"
} >expect &&
- echo $oid | git cat-file --batch="%(objecttype)" >actual &&
+ echo "$object_name" | git cat-file --batch="%(objecttype)" >actual &&
test_cmp expect actual
'
}
@@ -230,13 +247,14 @@ test_expect_success "setup" '
git config extensions.compatobjectformat $test_compat_hash_algo &&
echo_without_newline "$hello_content" > hello &&
git update-index --add hello &&
+ echo_without_newline "$hello_content" > "path with spaces" &&
+ git update-index --add --chmod=+x "path with spaces" &&
git commit -m "add hello file"
'
run_blob_tests () {
oid=$1
-
- run_tests 'blob' $oid $hello_size "$hello_content" "$hello_content"
+ run_tests 'blob' $oid "" $hello_size "$hello_content" "$hello_content"
test_expect_success '--batch-command --buffer with flush for blob info' '
echo "$oid blob $hello_size" >expect &&
@@ -269,13 +287,17 @@ test_expect_success '--batch-check without %(rest) considers whole line' '
tree_oid=$(git write-tree)
tree_compat_oid=$(git rev-parse --output-object-format=$test_compat_hash_algo $tree_oid)
-tree_size=$(($(test_oid rawsz) + 13))
-tree_compat_size=$(($(test_oid --hash=compat rawsz) + 13))
-tree_pretty_content="100644 blob $hello_oid hello${LF}"
-tree_compat_pretty_content="100644 blob $hello_compat_oid hello${LF}"
-
-run_tests 'tree' $tree_oid $tree_size "" "$tree_pretty_content"
-run_tests 'tree' $tree_compat_oid $tree_compat_size "" "$tree_compat_pretty_content"
+tree_size=$((2 * $(test_oid rawsz) + 13 + 24))
+tree_compat_size=$((2 * $(test_oid --hash=compat rawsz) + 13 + 24))
+tree_pretty_content="100644 blob $hello_oid hello${LF}100755 blob $hello_oid path with spaces${LF}"
+tree_compat_pretty_content="100644 blob $hello_compat_oid hello${LF}100755 blob $hello_compat_oid path with spaces${LF}"
+
+run_tests 'tree' $tree_oid "" $tree_size "" "$tree_pretty_content"
+run_tests 'tree' $tree_compat_oid "" $tree_compat_size "" "$tree_compat_pretty_content"
+run_tests 'blob' "$tree_oid:hello" "100644" $hello_size "" "$hello_content" $hello_oid
+run_tests 'blob' "$tree_compat_oid:hello" "100644" $hello_size "" "$hello_content" $hello_compat_oid
+run_tests 'blob' "$tree_oid:path with spaces" "100755" $hello_size "" "$hello_content" $hello_oid
+run_tests 'blob' "$tree_compat_oid:path with spaces" "100755" $hello_size "" "$hello_content" $hello_compat_oid
commit_message="Initial commit"
commit_oid=$(echo_without_newline "$commit_message" | git commit-tree $tree_oid)
@@ -294,8 +316,8 @@ committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
$commit_message"
-run_tests 'commit' $commit_oid $commit_size "$commit_content" "$commit_content"
-run_tests 'commit' $commit_compat_oid $commit_compat_size "$commit_compat_content" "$commit_compat_content"
+run_tests 'commit' $commit_oid "" $commit_size "$commit_content" "$commit_content"
+run_tests 'commit' $commit_compat_oid "" $commit_compat_size "$commit_compat_content" "$commit_compat_content"
tag_header_without_oid="type blob
tag hellotag
@@ -318,8 +340,8 @@ tag_size=$(strlen "$tag_content")
tag_compat_oid=$(git rev-parse --output-object-format=$test_compat_hash_algo $tag_oid)
tag_compat_size=$(strlen "$tag_compat_content")
-run_tests 'tag' $tag_oid $tag_size "$tag_content" "$tag_content"
-run_tests 'tag' $tag_compat_oid $tag_compat_size "$tag_compat_content" "$tag_compat_content"
+run_tests 'tag' $tag_oid "" $tag_size "$tag_content" "$tag_content"
+run_tests 'tag' $tag_compat_oid "" $tag_compat_size "$tag_compat_content" "$tag_compat_content"
test_expect_success "Reach a blob from a tag pointing to it" '
echo_without_newline "$hello_content" >expect &&
@@ -1198,6 +1220,31 @@ test_expect_success 'cat-file --batch-check respects replace objects' '
test_cmp expect actual
'
+test_expect_success 'batch-check with a submodule' '
+ # FIXME: this call to mktree is incompatible with compatObjectFormat
+ # because the submodule OID cannot be mapped to the compat hash algo.
+ test_unconfig extensions.compatobjectformat &&
+ printf "160000 commit $(test_oid deadbeef)\tsub\n" >tree-with-sub &&
+ tree=$(git mktree <tree-with-sub) &&
+ test_config extensions.compatobjectformat $test_compat_hash_algo &&
+
+ git cat-file --batch-check >actual <<-EOF &&
+ $tree:sub
+ EOF
+ printf "$(test_oid deadbeef) submodule\n" >expect &&
+ test_cmp expect actual
+'
+
+test_expect_success 'batch-check with a submodule, object exists' '
+ printf "160000 commit $commit_oid\tsub\n" >tree-with-sub &&
+ tree=$(git mktree <tree-with-sub) &&
+ git cat-file --batch-check >actual <<-EOF &&
+ $tree:sub
+ EOF
+ printf "$commit_oid commit $commit_size\n" >expect &&
+ test_cmp expect actual
+'
+
# Pull the entry for object with oid "$1" out of the output of
# "cat-file --batch", including its object content (which requires
# parsing and reading a set amount of bytes, hence perl).
diff --git a/t/t1007-hash-object.sh b/t/t1007-hash-object.sh
index dbbe9fb0d4..de076293b6 100755
--- a/t/t1007-hash-object.sh
+++ b/t/t1007-hash-object.sh
@@ -30,7 +30,7 @@ setup_repo() {
test_repo=test
push_repo() {
- test_create_repo $test_repo
+ git init --quiet $test_repo
cd $test_repo
setup_repo
@@ -252,9 +252,9 @@ test_expect_success '--literally complains about non-standard types' '
test_must_fail git hash-object -t bogus --literally --stdin
'
-test_expect_success '--stdin outside of repository (uses SHA-1)' '
+test_expect_success '--stdin outside of repository (uses default hash)' '
nongit git hash-object --stdin <hello >actual &&
- echo "$(test_oid --hash=sha1 hello)" >expect &&
+ echo "$(test_oid --hash=builtin hello)" >expect &&
test_cmp expect actual
'
diff --git a/t/t1021-rerere-in-workdir.sh b/t/t1021-rerere-in-workdir.sh
deleted file mode 100755
index 0b892894eb..0000000000
--- a/t/t1021-rerere-in-workdir.sh
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/bin/sh
-
-test_description='rerere run in a workdir'
-GIT_TEST_DEFAULT_INITIAL_BRANCH_NAME=main
-export GIT_TEST_DEFAULT_INITIAL_BRANCH_NAME
-
-. ./test-lib.sh
-
-test_expect_success SYMLINKS setup '
- git config rerere.enabled true &&
- >world &&
- git add world &&
- test_tick &&
- git commit -m initial &&
-
- echo hello >world &&
- test_tick &&
- git commit -a -m hello &&
-
- git checkout -b side HEAD^ &&
- echo goodbye >world &&
- test_tick &&
- git commit -a -m goodbye &&
-
- git checkout main
-'
-
-test_expect_success SYMLINKS 'rerere in workdir' '
- rm -rf .git/rr-cache &&
- "$SHELL_PATH" "$TEST_DIRECTORY/../contrib/workdir/git-new-workdir" . work &&
- (
- cd work &&
- test_must_fail git merge side &&
- git rerere status >actual &&
- echo world >expect &&
- test_cmp expect actual
- )
-'
-
-# This fails because we don't resolve relative symlink in mkdir_in_gitdir()
-# For the purpose of helping contrib/workdir/git-new-workdir users, we do not
-# have to support relative symlinks, but it might be nicer to make this work
-# with a relative symbolic link someday.
-test_expect_failure SYMLINKS 'rerere in workdir (relative)' '
- rm -rf .git/rr-cache &&
- "$SHELL_PATH" "$TEST_DIRECTORY/../contrib/workdir/git-new-workdir" . krow &&
- (
- cd krow &&
- rm -f .git/rr-cache &&
- ln -s ../.git/rr-cache .git/rr-cache &&
- test_must_fail git merge side &&
- git rerere status >actual &&
- echo world >expect &&
- test_cmp expect actual
- )
-'
-
-test_done
diff --git a/t/t1300-config.sh b/t/t1300-config.sh
index 51a85e83c2..f856821839 100755
--- a/t/t1300-config.sh
+++ b/t/t1300-config.sh
@@ -2851,4 +2851,15 @@ test_expect_success 'writing to stdin is rejected' '
done
+test_expect_success 'writing value with trailing CR not stripped on read' '
+ test_when_finished "rm -rf cr-test" &&
+
+ printf "bar\r\n" >expect &&
+ git init cr-test &&
+ git -C cr-test config set core.foo $(printf "bar\r") &&
+ git -C cr-test config get core.foo >actual &&
+
+ test_cmp expect actual
+'
+
test_done
diff --git a/t/t1400-update-ref.sh b/t/t1400-update-ref.sh
index d29d23cb89..96648a6e5d 100755
--- a/t/t1400-update-ref.sh
+++ b/t/t1400-update-ref.sh
@@ -1380,10 +1380,7 @@ test_expect_success 'fails with duplicate ref update via symref' '
test_expect_success ULIMIT_FILE_DESCRIPTORS 'large transaction creating branches does not burst open file limit' '
(
- for i in $(test_seq 33)
- do
- echo "create refs/heads/$i HEAD" || exit 1
- done >large_input &&
+ test_seq -f "create refs/heads/%d HEAD" 33 >large_input &&
run_with_limited_open_files git update-ref --stdin <large_input &&
git rev-parse --verify -q refs/heads/33
)
@@ -1391,10 +1388,7 @@ test_expect_success ULIMIT_FILE_DESCRIPTORS 'large transaction creating branches
test_expect_success ULIMIT_FILE_DESCRIPTORS 'large transaction deleting branches does not burst open file limit' '
(
- for i in $(test_seq 33)
- do
- echo "delete refs/heads/$i HEAD" || exit 1
- done >large_input &&
+ test_seq -f "delete refs/heads/%d HEAD" 33 >large_input &&
run_with_limited_open_files git update-ref --stdin <large_input &&
test_must_fail git rev-parse --verify -q refs/heads/33
)
@@ -2299,6 +2293,51 @@ do
test_grep -q "refname conflict" stdout
)
'
+
+ test_expect_success "stdin $type batch-updates delete incorrect symbolic ref" '
+ git init repo &&
+ test_when_finished "rm -fr repo" &&
+ (
+ cd repo &&
+ test_commit c1 &&
+ head=$(git rev-parse HEAD) &&
+ git symbolic-ref refs/heads/symbolic refs/heads/non-existent &&
+
+ format_command $type "delete refs/heads/symbolic" "$head" >stdin &&
+ git update-ref $type --stdin --batch-updates <stdin >stdout &&
+ test_grep "reference does not exist" stdout
+ )
+ '
+
+ test_expect_success "stdin $type batch-updates delete with incorrect old_oid" '
+ git init repo &&
+ test_when_finished "rm -fr repo" &&
+ (
+ cd repo &&
+ test_commit c1 &&
+ git branch new-branch &&
+ test_commit c2 &&
+ head=$(git rev-parse HEAD) &&
+
+ format_command $type "delete refs/heads/new-branch" "$head" >stdin &&
+ git update-ref $type --stdin --batch-updates <stdin >stdout &&
+ test_grep "incorrect old value provided" stdout
+ )
+ '
+
+ test_expect_success "stdin $type batch-updates delete non-existent ref" '
+ git init repo &&
+ test_when_finished "rm -fr repo" &&
+ (
+ cd repo &&
+ test_commit commit &&
+ head=$(git rev-parse HEAD) &&
+
+ format_command $type "delete refs/heads/non-existent" "$head" >stdin &&
+ git update-ref $type --stdin --batch-updates <stdin >stdout &&
+ test_grep "reference does not exist" stdout
+ )
+ '
done
test_expect_success 'update-ref should also create reflog for HEAD' '
@@ -2310,4 +2349,23 @@ test_expect_success 'update-ref should also create reflog for HEAD' '
test_cmp expect actual
'
+test_expect_success REFFILES 'empty directories are pruned when aborting a transaction' '
+ test_path_is_missing .git/refs/heads/nested &&
+ git update-ref --stdin <<-EOF &&
+ create refs/heads/nested/something HEAD
+ prepare
+ abort
+ EOF
+ test_path_is_missing .git/refs/heads/nested
+'
+
+test_expect_success REFFILES 'empty directories are pruned when not committing' '
+ test_path_is_missing .git/refs/heads/nested &&
+ git update-ref --stdin <<-EOF &&
+ create refs/heads/nested/something HEAD
+ prepare
+ EOF
+ test_path_is_missing .git/refs/heads/nested
+'
+
test_done
diff --git a/t/t1410-reflog.sh b/t/t1410-reflog.sh
index 42b501f163..e30f87a358 100755
--- a/t/t1410-reflog.sh
+++ b/t/t1410-reflog.sh
@@ -673,4 +673,32 @@ test_expect_success 'reflog drop --all with reference' '
)
'
+test_expect_success 'expire with pattern config' '
+ # Split refs/heads/ into two roots so we can apply config to each. Make
+ # two branches per root to verify that config is applied correctly
+ # multiple times.
+ git branch root1/branch1 &&
+ git branch root1/branch2 &&
+ git branch root2/branch1 &&
+ git branch root2/branch2 &&
+
+ test_config "gc.reflogexpire" "never" &&
+ test_config "gc.refs/heads/root2/*.reflogExpire" "now" &&
+ git reflog expire \
+ root1/branch1 root1/branch2 \
+ root2/branch1 root2/branch2 &&
+
+ cat >expect <<-\EOF &&
+ root1/branch1@{0}
+ root1/branch2@{0}
+ EOF
+ git log -g --branches="root*" --format=%gD >actual.raw &&
+ # The sole reflog entry of each branch points to the same commit, so
+ # the order in which they are shown is nondeterministic. We just care
+ # about the what was expired (and what was not), so sort to get a known
+ # order.
+ sort <actual.raw >actual.sorted &&
+ test_cmp expect actual.sorted
+'
+
test_done
diff --git a/t/t1416-ref-transaction-hooks.sh b/t/t1416-ref-transaction-hooks.sh
index 8c777f7cf8..d91dd3a3b5 100755
--- a/t/t1416-ref-transaction-hooks.sh
+++ b/t/t1416-ref-transaction-hooks.sh
@@ -120,8 +120,6 @@ test_expect_success 'interleaving hook calls succeed' '
cat >expect <<-EOF &&
hooks/update refs/tags/PRE $ZERO_OID $PRE_OID
- hooks/reference-transaction prepared
- hooks/reference-transaction committed
hooks/update refs/tags/POST $ZERO_OID $POST_OID
hooks/reference-transaction prepared
hooks/reference-transaction committed
diff --git a/t/t1517-outside-repo.sh b/t/t1517-outside-repo.sh
index 6824581317..8f59b867f2 100755
--- a/t/t1517-outside-repo.sh
+++ b/t/t1517-outside-repo.sh
@@ -114,4 +114,11 @@ test_expect_success 'update-server-info does not crash with -h' '
test_grep "[Uu]sage: git update-server-info " usage
'
+test_expect_success 'prune does not crash with -h' '
+ test_expect_code 129 git prune -h >usage &&
+ test_grep "[Uu]sage: git prune " usage &&
+ test_expect_code 129 nongit git prune -h >usage &&
+ test_grep "[Uu]sage: git prune " usage
+'
+
test_done
diff --git a/t/t2400-worktree-add.sh b/t/t2400-worktree-add.sh
index 90638fa886..023e1301c8 100755
--- a/t/t2400-worktree-add.sh
+++ b/t/t2400-worktree-add.sh
@@ -42,8 +42,8 @@ test_expect_success '"add" using - shorthand' '
test_expect_success '"add" refuses to checkout locked branch' '
test_must_fail git worktree add zere main &&
- ! test -d zere &&
- ! test -d .git/worktrees/zere
+ test_path_is_missing zere &&
+ test_path_is_missing .git/worktrees/zere
'
test_expect_success 'checking out paths not complaining about linked checkouts' '
@@ -70,14 +70,14 @@ test_expect_success '"add" worktree' '
test_expect_success '"add" worktree with lock' '
git worktree add --detach --lock here-with-lock main &&
test_when_finished "git worktree unlock here-with-lock || :" &&
- test -f .git/worktrees/here-with-lock/locked
+ test_path_is_file .git/worktrees/here-with-lock/locked
'
test_expect_success '"add" worktree with lock and reason' '
lock_reason="why not" &&
git worktree add --detach --lock --reason "$lock_reason" here-with-lock-reason main &&
test_when_finished "git worktree unlock here-with-lock-reason || :" &&
- test -f .git/worktrees/here-with-lock-reason/locked &&
+ test_path_is_file .git/worktrees/here-with-lock-reason/locked &&
echo "$lock_reason" >expect &&
test_cmp expect .git/worktrees/here-with-lock-reason/locked
'
@@ -412,14 +412,14 @@ test_expect_success '"add --orphan" with empty repository' '
test_expect_success '"add" worktree with orphan branch and lock' '
git worktree add --lock --orphan -b orphanbr orphan-with-lock &&
test_when_finished "git worktree unlock orphan-with-lock || :" &&
- test -f .git/worktrees/orphan-with-lock/locked
+ test_path_is_file .git/worktrees/orphan-with-lock/locked
'
test_expect_success '"add" worktree with orphan branch, lock, and reason' '
lock_reason="why not" &&
git worktree add --detach --lock --reason "$lock_reason" orphan-with-lock-reason main &&
test_when_finished "git worktree unlock orphan-with-lock-reason || :" &&
- test -f .git/worktrees/orphan-with-lock-reason/locked &&
+ test_path_is_file .git/worktrees/orphan-with-lock-reason/locked &&
echo "$lock_reason" >expect &&
test_cmp expect .git/worktrees/orphan-with-lock-reason/locked
'
@@ -474,7 +474,7 @@ test_expect_success 'local clone --shared from linked checkout' '
test_expect_success '"add" worktree with --no-checkout' '
git worktree add --no-checkout -b swamp swamp &&
- ! test -e swamp/init.t &&
+ test_path_is_missing swamp/init.t &&
git -C swamp reset --hard &&
test_cmp init.t swamp/init.t
'
@@ -497,7 +497,7 @@ test_expect_success 'put a worktree under rebase' '
test_expect_success 'add a worktree, checking out a rebased branch' '
test_must_fail git worktree add new-rebase under-rebase &&
- ! test -d new-rebase
+ test_path_is_missing new-rebase
'
test_expect_success 'checking out a rebased branch from another worktree' '
@@ -535,7 +535,7 @@ test_expect_success 'checkout a branch under bisect' '
git worktree list >actual &&
grep "under-bisect.*detached HEAD" actual &&
test_must_fail git worktree add new-bisect under-bisect &&
- ! test -d new-bisect
+ test_path_is_missing new-bisect
)
'
@@ -1165,7 +1165,7 @@ test_expect_success '"add" not tripped up by magic worktree matching"' '
test_expect_success FUNNYNAMES 'sanitize generated worktree name' '
git worktree add --detach ". weird*..?.lock.lock" &&
- test -d .git/worktrees/---weird-.-
+ test_path_is_dir .git/worktrees/---weird-.-
'
test_expect_success '"add" should not fail because of another bad worktree' '
diff --git a/t/t3000-ls-files-others.sh b/t/t3000-ls-files-others.sh
index 13f66fd649..b41e7f0daa 100755
--- a/t/t3000-ls-files-others.sh
+++ b/t/t3000-ls-files-others.sh
@@ -73,25 +73,6 @@ test_expect_success 'ls-files --others handles non-submodule .git' '
test_cmp expected1 output
'
-test_expect_success SYMLINKS 'ls-files --others with symlinked submodule' '
- git init super &&
- git init sub &&
- (
- cd sub &&
- >a &&
- git add a &&
- git commit -m sub &&
- git pack-refs --all
- ) &&
- (
- cd super &&
- "$SHELL_PATH" "$TEST_DIRECTORY/../contrib/workdir/git-new-workdir" ../sub sub &&
- git ls-files --others --exclude-standard >../actual
- ) &&
- echo sub/ >expect &&
- test_cmp expect actual
-'
-
test_expect_success 'setup nested pathspec search' '
test_create_repo nested &&
(
diff --git a/t/t3418-rebase-continue.sh b/t/t3418-rebase-continue.sh
index 127216f722..b8a8dd77e7 100755
--- a/t/t3418-rebase-continue.sh
+++ b/t/t3418-rebase-continue.sh
@@ -328,6 +328,19 @@ test_expect_success 'there is no --no-reschedule-failed-exec in an ongoing rebas
test_expect_code 129 git rebase --edit-todo --no-reschedule-failed-exec
'
+test_expect_success 'no change in comment character due to conflicts markers with core.commentChar=auto' '
+ git checkout -b branch-a &&
+ test_commit A F1 &&
+ git checkout -b branch-b HEAD^ &&
+ test_commit B F1 &&
+ test_must_fail git rebase branch-a &&
+ printf "B\nA\n" >F1 &&
+ git add F1 &&
+ GIT_EDITOR="cat >actual" git -c core.commentChar=auto rebase --continue &&
+ # Check that "#" is still the comment character.
+ test_grep "^# Changes to be committed" actual
+'
+
test_orig_head_helper () {
test_when_finished 'git rebase --abort &&
git checkout topic &&
diff --git a/t/t3600-rm.sh b/t/t3600-rm.sh
index 98259e2ada..1f16e6b522 100755
--- a/t/t3600-rm.sh
+++ b/t/t3600-rm.sh
@@ -17,11 +17,6 @@ test_expect_success 'Initialize test directory' '
git commit -m "add normal files"
'
-if test_have_prereq !FUNNYNAMES
-then
- say 'Your filesystem does not allow tabs in filenames.'
-fi
-
test_expect_success FUNNYNAMES 'add files with funny names' '
touch -- "tab embedded" "newline${LF}embedded" &&
git add -- "tab embedded" "newline${LF}embedded" &&
diff --git a/t/t3903-stash.sh b/t/t3903-stash.sh
index 74666ff3e4..0bb4648e36 100755
--- a/t/t3903-stash.sh
+++ b/t/t3903-stash.sh
@@ -11,6 +11,13 @@ export GIT_TEST_DEFAULT_INITIAL_BRANCH_NAME
. ./test-lib.sh
. "$TEST_DIRECTORY"/lib-unique-files.sh
+test_expect_success 'setup' '
+ test_oid_cache <<-EOF
+ export_base sha1:73c9bab443d1f88ac61aa533d2eeaaa15451239c
+ export_base sha256:f210fa6346e3e2ce047bdb570426b17075980c1ac01fec8fc4b75bd3ab4bcfe4
+ EOF
+'
+
test_expect_success 'usage on cmd and subcommand invalid option' '
test_expect_code 129 git stash --invalid-option 2>usage &&
grep "or: git stash" usage &&
@@ -1177,6 +1184,28 @@ test_expect_success 'stash -- <pathspec> stashes and restores the file' '
test_path_is_file bar
'
+test_expect_success 'stash --patch <pathspec> stash and restores the file' '
+ test_write_lines b c >file &&
+ git commit -m "add a few lines" file &&
+ test_write_lines a b c d >file &&
+ test_write_lines b c d >expect-file &&
+ echo changed-other-file >other-file &&
+ test_write_lines s y n | git stash -m "stash bar" --patch file &&
+ test_cmp expect-file file &&
+ echo changed-other-file >expect &&
+ test_cmp expect other-file &&
+ git checkout HEAD -- file &&
+ git stash pop &&
+ test_cmp expect other-file &&
+ test_write_lines a b c >expect &&
+ test_cmp expect file
+'
+
+test_expect_success 'stash <pathspec> -p is rejected' '
+ test_must_fail git stash file -p 2>err &&
+ test_grep "subcommand wasn${SQ}t specified; ${SQ}push${SQ} can${SQ}t be assumed due to unexpected token ${SQ}file${SQ}" err
+'
+
test_expect_success 'stash -- <pathspec> stashes in subdirectory' '
mkdir sub &&
>foo &&
@@ -1412,6 +1441,100 @@ test_expect_success 'stash --keep-index --include-untracked with empty tree' '
)
'
+test_expect_success 'stash export and import round-trip stashes' '
+ git reset &&
+ >untracked &&
+ >tracked1 &&
+ >tracked2 &&
+ git add tracked* &&
+ git stash -- &&
+ >subdir/untracked &&
+ >subdir/tracked1 &&
+ >subdir/tracked2 &&
+ git add subdir/tracked* &&
+ git stash --include-untracked -- subdir/ &&
+ git tag t-stash0 stash@{0} &&
+ git tag t-stash1 stash@{1} &&
+ simple=$(git stash export --print) &&
+ git stash clear &&
+ git stash import "$simple" &&
+ test_cmp_rev stash@{0} t-stash0 &&
+ test_cmp_rev stash@{1} t-stash1 &&
+ git stash export --to-ref refs/heads/foo &&
+ test_cmp_rev "$(test_oid empty_tree)" foo: &&
+ test_cmp_rev "$(test_oid empty_tree)" foo^: &&
+ test_cmp_rev t-stash0 foo^2 &&
+ test_cmp_rev t-stash1 foo^^2 &&
+ git log --first-parent --format="%s" refs/heads/foo >log &&
+ grep "^git stash: " log >log2 &&
+ test_line_count = 13 log2 &&
+ git stash clear &&
+ git stash import foo &&
+ test_cmp_rev stash@{0} t-stash0 &&
+ test_cmp_rev stash@{1} t-stash1
+'
+
+test_expect_success 'stash import appends commits' '
+ git log --format=oneline -g refs/stash >out &&
+ cat out out >out2 &&
+ git stash import refs/heads/foo &&
+ git log --format=oneline -g refs/stash >actual &&
+ test_line_count = $(wc -l <out2) actual
+'
+
+test_expect_success 'stash export can accept specified stashes' '
+ git stash clear &&
+ git stash import foo &&
+ git stash export --to-ref refs/heads/bar stash@{1} stash@{0} &&
+ git stash clear &&
+ git stash import refs/heads/bar &&
+ test_cmp_rev stash@{1} t-stash0 &&
+ test_cmp_rev stash@{0} t-stash1 &&
+ git log --format=oneline -g refs/stash >actual &&
+ test_line_count = 2 actual
+'
+
+test_expect_success 'stash export rejects invalid arguments' '
+ test_must_fail git stash export --print --to-ref refs/heads/invalid 2>err &&
+ grep "exactly one of --print and --to-ref is required" err &&
+ test_must_fail git stash export 2>err2 &&
+ grep "exactly one of --print and --to-ref is required" err2
+'
+
+test_expect_success 'stash can import and export zero stashes' '
+ git stash clear &&
+ git stash export --to-ref refs/heads/baz &&
+ test_cmp_rev "$(test_oid empty_tree)" baz: &&
+ test_cmp_rev "$(test_oid export_base)" baz &&
+ test_must_fail git rev-parse baz^1 &&
+ git stash import baz &&
+ test_must_fail git rev-parse refs/stash
+'
+
+test_expect_success 'stash rejects invalid attempts to import commits' '
+ git stash import foo &&
+ test_must_fail git stash import HEAD 2>output &&
+ oid=$(git rev-parse HEAD) &&
+ grep "$oid is not a valid exported stash commit" output &&
+ test_cmp_rev stash@{0} t-stash0 &&
+
+ git checkout --orphan orphan &&
+ git commit-tree $(test_oid empty_tree) -p "$oid" -p "$oid^" -m "" >fake-commit &&
+ git update-ref refs/heads/orphan "$(cat fake-commit)" &&
+ oid=$(git rev-parse HEAD) &&
+ test_must_fail git stash import orphan 2>output &&
+ grep "found stash commit $oid without expected prefix" output &&
+ test_cmp_rev stash@{0} t-stash0 &&
+
+ git checkout --orphan orphan2 &&
+ git commit-tree $(test_oid empty_tree) -m "" >fake-commit &&
+ git update-ref refs/heads/orphan2 "$(cat fake-commit)" &&
+ oid=$(git rev-parse HEAD) &&
+ test_must_fail git stash import orphan2 2>output &&
+ grep "found root commit $oid with invalid data" output &&
+ test_cmp_rev stash@{0} t-stash0
+'
+
test_expect_success 'stash apply should succeed with unmodified file' '
echo base >file &&
git add file &&
@@ -1549,11 +1672,9 @@ test_expect_success 'stash create reports a locked index' '
echo change >A.file &&
touch .git/index.lock &&
- cat >expect <<-EOF &&
- error: could not write index
- EOF
test_must_fail git stash create 2>err &&
- test_cmp expect err
+ test_grep "error: could not write index" err &&
+ test_grep "error: Unable to create '.*index.lock'" err
)
'
@@ -1566,11 +1687,9 @@ test_expect_success 'stash push reports a locked index' '
echo change >A.file &&
touch .git/index.lock &&
- cat >expect <<-EOF &&
- error: could not write index
- EOF
test_must_fail git stash push 2>err &&
- test_cmp expect err
+ test_grep "error: could not write index" err &&
+ test_grep "error: Unable to create '.*index.lock'" err
)
'
@@ -1584,11 +1703,41 @@ test_expect_success 'stash apply reports a locked index' '
git stash push &&
touch .git/index.lock &&
- cat >expect <<-EOF &&
- error: could not write index
- EOF
test_must_fail git stash apply 2>err &&
- test_cmp expect err
+ test_grep "error: could not write index" err &&
+ test_grep "error: Unable to create '.*index.lock'" err
+ )
+'
+
+test_expect_success 'submodules does not affect the branch recorded in stash message' '
+ git init sub_project &&
+ (
+ cd sub_project &&
+ echo "Initial content in sub_project" >sub_file.txt &&
+ git add sub_file.txt &&
+ git commit -m "Initial commit in sub_project"
+ ) &&
+
+ git init main_project &&
+ (
+ cd main_project &&
+ echo "Initial content in main_project" >main_file.txt &&
+ git add main_file.txt &&
+ git commit -m "Initial commit in main_project" &&
+
+ git -c protocol.file.allow=always submodule add ../sub_project sub &&
+ git commit -m "Added submodule sub_project" &&
+
+ git checkout -b feature_main &&
+ git -C sub checkout -b feature_sub &&
+
+ git checkout -b work_branch &&
+ echo "Important work to be stashed" >work_item.txt &&
+ git add work_item.txt &&
+ git stash push -m "custom stash for work_branch" &&
+
+ git stash list >../actual_stash_list.txt &&
+ grep "On work_branch: custom stash for work_branch" ../actual_stash_list.txt
)
'
diff --git a/t/t4000-diff-format.sh b/t/t4000-diff-format.sh
index a51f881b1c..32b14e3a71 100755
--- a/t/t4000-diff-format.sh
+++ b/t/t4000-diff-format.sh
@@ -36,7 +36,7 @@ test_expect_success 'git diff-files -p after editing work tree.' '
# that's as far as it comes
if [ "$(git config --get core.filemode)" = false ]
then
- say 'filemode disabled on the filesystem'
+ skip_all='filemode disabled on the filesystem'
test_done
fi
diff --git a/t/t4013-diff-various.sh b/t/t4013-diff-various.sh
index 782d97fb7d..8ebd170451 100755
--- a/t/t4013-diff-various.sh
+++ b/t/t4013-diff-various.sh
@@ -206,14 +206,30 @@ do
expect="$TEST_DIRECTORY/t4013/diff.$test"
actual="$pfx-diff.$test"
- test_expect_success "git $cmd # magic is ${magic:-(not used)}" '
+ case "$cmd" in
+ whatchanged | whatchanged" "*)
+ prereq=!WITH_BREAKING_CHANGES
+ ;;
+ *)
+ prereq=;;
+ esac
+
+ test_expect_success $prereq "git $cmd # magic is ${magic:-(not used)}" '
{
echo "$ git $cmd"
+
+ case "$cmd" in
+ whatchanged | whatchanged" "*)
+ run="whatchanged --i-still-use-this"
+ run="$run ${cmd#whatchanged}" ;;
+ *)
+ run=$cmd ;;
+ esac &&
case "$magic" in
"")
- GIT_PRINT_SHA1_ELLIPSIS=yes git $cmd ;;
+ GIT_PRINT_SHA1_ELLIPSIS=yes git $run ;;
noellipses)
- git $cmd ;;
+ git $run ;;
esac |
sed -e "s/^\\(-*\\)$V\\(-*\\)\$/\\1g-i-t--v-e-r-s-i-o-n\2/" \
-e "s/^\\(.*mixed; boundary=\"-*\\)$V\\(-*\\)\"\$/\\1g-i-t--v-e-r-s-i-o-n\2\"/"
@@ -460,6 +476,11 @@ diff-tree --stat --compact-summary initial mode
diff-tree -R --stat --compact-summary initial mode
EOF
+test_expect_success !WITH_BREAKING_CHANGES 'whatchanged needs --i-still-use-this' '
+ test_must_fail git whatchanged >message 2>&1 &&
+ test_grep "nominated for removal" message
+'
+
test_expect_success 'log -m matches pure log' '
git log master >result &&
process_diffs result >expected &&
diff --git a/t/t4018/r-indent b/t/t4018/r-indent
new file mode 100644
index 0000000000..9df440f2a4
--- /dev/null
+++ b/t/t4018/r-indent
@@ -0,0 +1,6 @@
+RIGHT <- function(a, b) {
+ c = mean(a, b)
+ d = c + 2
+ ChangeMe()
+ return (d)
+}
diff --git a/t/t4018/r-indent-nested b/t/t4018/r-indent-nested
new file mode 100644
index 0000000000..30412e6c79
--- /dev/null
+++ b/t/t4018/r-indent-nested
@@ -0,0 +1,10 @@
+LEFT = function(a, b) {
+ c = mean(a, b)
+ RIGHT = function(d, e) {
+ f = var(d, e)
+ g = f + 1
+ ChangeMe()
+ return (g)
+ }
+ return (RIGHT(2, 3))
+}
diff --git a/t/t4018/r-noindent b/t/t4018/r-noindent
new file mode 100644
index 0000000000..6d9b01ffe3
--- /dev/null
+++ b/t/t4018/r-noindent
@@ -0,0 +1,6 @@
+RIGHT <- function(a, b) {
+c = mean(a, b)
+d = c + 2
+ChangeMe()
+return (c)
+}
diff --git a/t/t4041-diff-submodule-option.sh b/t/t4041-diff-submodule-option.sh
index 28f9d83d4c..4d4aa1650f 100755
--- a/t/t4041-diff-submodule-option.sh
+++ b/t/t4041-diff-submodule-option.sh
@@ -48,11 +48,12 @@ commit_file () {
git commit "$@" -m "Commit $*" >/dev/null
}
-test_create_repo sm1 &&
-add_file . foo >/dev/null
-
-head1=$(add_file sm1 foo1 foo2)
-fullhead1=$(cd sm1; git rev-parse --verify HEAD)
+test_expect_success 'setup submodule' '
+ git init sm1 &&
+ add_file . foo &&
+ head1=$(add_file sm1 foo1 foo2) &&
+ fullhead1=$(cd sm1 && git rev-parse --verify HEAD)
+'
test_expect_success 'added submodule' '
git add sm1 &&
@@ -235,10 +236,13 @@ test_expect_success 'typechanged submodule(submodule->blob)' '
test_cmp expected actual
'
-rm -f sm1 &&
-test_create_repo sm1 &&
-head6=$(add_file sm1 foo6 foo7)
-fullhead6=$(cd sm1; git rev-parse --verify HEAD)
+test_expect_success 'setup submodule anew' '
+ rm -f sm1 &&
+ git init sm1 &&
+ head6=$(add_file sm1 foo6 foo7) &&
+ fullhead6=$(cd sm1 && git rev-parse --verify HEAD)
+'
+
test_expect_success 'nonexistent commit' '
git diff-index -p --submodule=log HEAD >actual &&
cat >expected <<-EOF &&
diff --git a/t/t4042-diff-textconv-caching.sh b/t/t4042-diff-textconv-caching.sh
index ff0e73531b..31018ceba2 100755
--- a/t/t4042-diff-textconv-caching.sh
+++ b/t/t4042-diff-textconv-caching.sh
@@ -120,6 +120,14 @@ test_expect_success 'log notes cache and still use cache for -p' '
'
test_expect_success 'caching is silently ignored outside repo' '
+ test_oid_cache <<-\EOM &&
+ oid1 sha1:5626abf
+ oid1 sha256:a4ed1f3
+ oid2 sha1:f719efd
+ oid2 sha256:aa9e7dc
+ EOM
+ oid1=$(test_oid --hash=builtin oid1) &&
+ oid2=$(test_oid --hash=builtin oid2) &&
mkdir -p non-repo &&
echo one >non-repo/one &&
echo two >non-repo/two &&
@@ -129,9 +137,9 @@ test_expect_success 'caching is silently ignored outside repo' '
-c diff.test.textconv="tr a-z A-Z <" \
-c diff.test.cachetextconv=true \
diff --no-index one two >actual &&
- cat >expect <<-\EOF &&
+ cat >expect <<-EOF &&
diff --git a/one b/two
- index 5626abf..f719efd 100644
+ index $oid1..$oid2 100644
--- a/one
+++ b/two
@@ -1 +1 @@
diff --git a/t/t4053-diff-no-index.sh b/t/t4053-diff-no-index.sh
index 5e5bad61ca..01db9243ab 100755
--- a/t/t4053-diff-no-index.sh
+++ b/t/t4053-diff-no-index.sh
@@ -295,4 +295,79 @@ test_expect_success PIPE,SYMLINKS 'diff --no-index reads from pipes' '
test_cmp expect actual
'
+test_expect_success 'diff --no-index F F rejects pathspecs' '
+ test_must_fail git diff --no-index -- a/1 a/2 a 2>actual.err &&
+ test_grep "usage: git diff --no-index" actual.err
+'
+
+test_expect_success 'diff --no-index D F rejects pathspecs' '
+ test_must_fail git diff --no-index -- a a/2 a 2>actual.err &&
+ test_grep "usage: git diff --no-index" actual.err
+'
+
+test_expect_success 'diff --no-index F D rejects pathspecs' '
+ test_must_fail git diff --no-index -- a/1 b b 2>actual.err &&
+ test_grep "usage: git diff --no-index" actual.err
+'
+
+test_expect_success 'diff --no-index rejects absolute pathspec' '
+ test_must_fail git diff --no-index -- a b $(pwd)/a/1
+'
+
+test_expect_success 'diff --no-index with pathspec' '
+ test_expect_code 1 git diff --name-status --no-index a b 1 >actual &&
+ cat >expect <<-EOF &&
+ D a/1
+ EOF
+ test_cmp expect actual
+'
+
+test_expect_success 'diff --no-index with pathspec no matches' '
+ test_expect_code 0 git diff --name-status --no-index a b missing
+'
+
+test_expect_success 'diff --no-index with negative pathspec' '
+ test_expect_code 1 git diff --name-status --no-index a b ":!2" >actual &&
+ cat >expect <<-EOF &&
+ D a/1
+ EOF
+ test_cmp expect actual
+'
+
+test_expect_success 'setup nested' '
+ mkdir -p c/1/2 &&
+ mkdir -p d/1/2 &&
+ echo 1 >c/1/2/a &&
+ echo 2 >c/1/2/b
+'
+
+test_expect_success 'diff --no-index with pathspec nested negative pathspec' '
+ test_expect_code 0 git diff --no-index c d ":!1"
+'
+
+test_expect_success 'diff --no-index with pathspec nested pathspec' '
+ test_expect_code 1 git diff --name-status --no-index c d 1/2 >actual &&
+ cat >expect <<-EOF &&
+ D c/1/2/a
+ D c/1/2/b
+ EOF
+ test_cmp expect actual
+'
+
+test_expect_success 'diff --no-index with pathspec glob' '
+ test_expect_code 1 git diff --name-status --no-index c d ":(glob)**/a" >actual &&
+ cat >expect <<-EOF &&
+ D c/1/2/a
+ EOF
+ test_cmp expect actual
+'
+
+test_expect_success 'diff --no-index with pathspec glob and exclude' '
+ test_expect_code 1 git diff --name-status --no-index c d ":(glob,exclude)**/a" >actual &&
+ cat >expect <<-EOF &&
+ D c/1/2/b
+ EOF
+ test_cmp expect actual
+'
+
test_done
diff --git a/t/t4060-diff-submodule-option-diff-format.sh b/t/t4060-diff-submodule-option-diff-format.sh
index 76b83101d3..dbfeb7470b 100755
--- a/t/t4060-diff-submodule-option-diff-format.sh
+++ b/t/t4060-diff-submodule-option-diff-format.sh
@@ -363,9 +363,12 @@ test_expect_success 'typechanged submodule(submodule->blob)' '
diff_cmp expected actual
'
-rm -f sm1 &&
-test_create_repo sm1 &&
-head6=$(add_file sm1 foo6 foo7)
+test_expect_success 'setup' '
+ rm -f sm1 &&
+ git init sm1 &&
+ head6=$(add_file sm1 foo6 foo7)
+'
+
test_expect_success 'nonexistent commit' '
git diff-index -p --submodule=diff HEAD >actual &&
cat >expected <<-EOF &&
diff --git a/t/t4140-apply-ita.sh b/t/t4140-apply-ita.sh
index c614eaf04c..0b11a8aef4 100755
--- a/t/t4140-apply-ita.sh
+++ b/t/t4140-apply-ita.sh
@@ -7,6 +7,10 @@ test_description='git apply of i-t-a file'
test_expect_success setup '
test_write_lines 1 2 3 4 5 >blueprint &&
+ cat blueprint >committed-file &&
+ git add committed-file &&
+ git commit -m "commit" &&
+
cat blueprint >test-file &&
git add -N test-file &&
git diff >creation-patch &&
@@ -14,7 +18,14 @@ test_expect_success setup '
rm -f test-file &&
git diff >deletion-patch &&
- grep "deleted file mode 100644" deletion-patch
+ grep "deleted file mode 100644" deletion-patch &&
+
+ git rm -f test-file &&
+ test_write_lines 6 >>committed-file &&
+ cat blueprint >test-file &&
+ git add -N test-file &&
+ git diff >complex-patch &&
+ git restore committed-file
'
test_expect_success 'apply creation patch to ita path (--cached)' '
@@ -53,4 +64,22 @@ test_expect_success 'apply deletion patch to ita path (--index)' '
git ls-files --stage --error-unmatch test-file
'
+test_expect_success 'apply creation patch to existing index with -N' '
+ git rm -f test-file &&
+ cat blueprint >index-file &&
+ git add index-file &&
+ git apply -N creation-patch &&
+
+ git ls-files --stage --error-unmatch index-file &&
+ git ls-files --stage --error-unmatch test-file
+'
+
+test_expect_success 'apply complex patch with -N' '
+ git rm -f test-file index-file &&
+ git apply -N complex-patch &&
+
+ git ls-files --stage --error-unmatch test-file &&
+ git diff | grep "a/committed-file"
+'
+
test_done
diff --git a/t/t4150-am.sh b/t/t4150-am.sh
index 2ae93d3c96..699a81ab5c 100755
--- a/t/t4150-am.sh
+++ b/t/t4150-am.sh
@@ -1086,7 +1086,7 @@ test_expect_success 'am works with multi-line in-body headers' '
# bump from, date, and subject down to in-body header
awk "
/^From:/{
- print \"From: x <x\@example.com>\";
+ print \"From: x <x@example.com>\";
print \"Date: Sat, 1 Jan 2000 00:00:00 +0000\";
print \"Subject: x\n\";
}; 1
diff --git a/t/t4202-log.sh b/t/t4202-log.sh
index 51f7beb59f..05cee9e41b 100755
--- a/t/t4202-log.sh
+++ b/t/t4202-log.sh
@@ -134,6 +134,12 @@ test_expect_success 'diff-filter=D' '
'
+test_expect_success 'all-negative filter' '
+ git log --no-renames --format=%s --diff-filter=d HEAD >actual &&
+ printf "%s\n" fifth fourth third second initial >expect &&
+ test_cmp expect actual
+'
+
test_expect_success 'diff-filter=R' '
git log -M --pretty="format:%s" --diff-filter=R HEAD >actual &&
@@ -486,10 +492,16 @@ test_expect_success !FAIL_PREREQS 'log with various grep.patternType configurati
)
'
-for cmd in show whatchanged reflog format-patch
+cmds="show reflog format-patch"
+if test_have_prereq !WITH_BREAKING_CHANGES
+then
+ cmds="$cmds whatchanged"
+fi
+for cmd in $cmds
do
case "$cmd" in
format-patch) myarg="HEAD~.." ;;
+ whatchanged) myarg=--i-still-use-this ;;
*) myarg= ;;
esac
@@ -1201,20 +1213,27 @@ test_expect_success 'reflog is expected format' '
test_cmp expect actual
'
-test_expect_success 'whatchanged is expected format' '
+test_expect_success !WITH_BREAKING_CHANGES 'whatchanged is expected format' '
+ whatchanged="whatchanged --i-still-use-this" &&
git log --no-merges --raw >expect &&
- git whatchanged >actual &&
+ git $whatchanged >actual &&
test_cmp expect actual
'
test_expect_success 'log.abbrevCommit configuration' '
+ whatchanged="whatchanged --i-still-use-this" &&
+
git log --abbrev-commit >expect.log.abbrev &&
git log --no-abbrev-commit >expect.log.full &&
git log --pretty=raw >expect.log.raw &&
git reflog --abbrev-commit >expect.reflog.abbrev &&
git reflog --no-abbrev-commit >expect.reflog.full &&
- git whatchanged --abbrev-commit >expect.whatchanged.abbrev &&
- git whatchanged --no-abbrev-commit >expect.whatchanged.full &&
+
+ if test_have_prereq !WITH_BREAKING_CHANGES
+ then
+ git $whatchanged --abbrev-commit >expect.whatchanged.abbrev &&
+ git $whatchanged --no-abbrev-commit >expect.whatchanged.full
+ fi &&
test_config log.abbrevCommit true &&
@@ -1231,10 +1250,13 @@ test_expect_success 'log.abbrevCommit configuration' '
git reflog --no-abbrev-commit >actual &&
test_cmp expect.reflog.full actual &&
- git whatchanged >actual &&
- test_cmp expect.whatchanged.abbrev actual &&
- git whatchanged --no-abbrev-commit >actual &&
- test_cmp expect.whatchanged.full actual
+ if test_have_prereq !WITH_BREAKING_CHANGES
+ then
+ git $whatchanged >actual &&
+ test_cmp expect.whatchanged.abbrev actual &&
+ git $whatchanged --no-abbrev-commit >actual &&
+ test_cmp expect.whatchanged.full actual
+ fi
'
test_expect_success '--abbrev-commit with core.abbrev=false' '
diff --git a/t/t4203-mailmap.sh b/t/t4203-mailmap.sh
index 4a6242ff99..74b7ddccb2 100755
--- a/t/t4203-mailmap.sh
+++ b/t/t4203-mailmap.sh
@@ -1133,4 +1133,37 @@ test_expect_success 'git cat-file --batch-command returns correct size with --us
test_cmp expect actual
'
+test_expect_success 'git cat-file --mailmap works with different author and committer' '
+ test_when_finished "rm .mailmap" &&
+ cat >.mailmap <<-\EOF &&
+ Mailmapped User <mailmapped-user@gitlab.com> C O Mitter <committer@example.com>
+ EOF
+ git commit --allow-empty -m "different author/committer" \
+ --author="Different Author <different@example.com>" &&
+ cat >expect <<-\EOF &&
+ author Different Author <different@example.com>
+ committer Mailmapped User <mailmapped-user@gitlab.com>
+ EOF
+ git cat-file --mailmap commit HEAD >log &&
+ sed -n -e "/^author /s/>.*/>/p" -e "/^committer /s/>.*/>/p" log >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'git cat-file --mailmap maps both author and committer when both need mapping' '
+ test_when_finished "rm .mailmap" &&
+ cat >.mailmap <<-\EOF &&
+ Mapped Author <mapped-author@example.com> <different@example.com>
+ Mapped Committer <mapped-committer@example.com> C O Mitter <committer@example.com>
+ EOF
+ git commit --allow-empty -m "both author and committer mapped" \
+ --author="Different Author <different@example.com>" &&
+ cat >expect <<-\EOF &&
+ author Mapped Author <mapped-author@example.com>
+ committer Mapped Committer <mapped-committer@example.com>
+ EOF
+ git cat-file --mailmap commit HEAD >log &&
+ sed -n -e "/^author /s/>.*/>/p" -e "/^committer /s/>.*/>/p" log >actual &&
+ test_cmp expect actual
+'
+
test_done
diff --git a/t/t4216-log-bloom.sh b/t/t4216-log-bloom.sh
index 8910d53cac..639868ac56 100755
--- a/t/t4216-log-bloom.sh
+++ b/t/t4216-log-bloom.sh
@@ -66,8 +66,9 @@ sane_unset GIT_TRACE2_CONFIG_PARAMS
setup () {
rm -f "$TRASH_DIRECTORY/trace.perf" &&
- git -c core.commitGraph=false log --pretty="format:%s" $1 >log_wo_bloom &&
- GIT_TRACE2_PERF="$TRASH_DIRECTORY/trace.perf" git -c core.commitGraph=true log --pretty="format:%s" $1 >log_w_bloom
+ eval git -c core.commitGraph=false log --pretty="format:%s" "$1" >log_wo_bloom &&
+ eval "GIT_TRACE2_PERF=\"$TRASH_DIRECTORY/trace.perf\"" \
+ git -c core.commitGraph=true log --pretty="format:%s" "$1" >log_w_bloom
}
test_bloom_filters_used () {
@@ -138,10 +139,6 @@ test_expect_success 'git log with --walk-reflogs does not use Bloom filters' '
test_bloom_filters_not_used "--walk-reflogs -- A"
'
-test_expect_success 'git log -- multiple path specs does not use Bloom filters' '
- test_bloom_filters_not_used "-- file4 A/file1"
-'
-
test_expect_success 'git log -- "." pathspec at root does not use Bloom filters' '
test_bloom_filters_not_used "-- ."
'
@@ -151,9 +148,17 @@ test_expect_success 'git log with wildcard that resolves to a single path uses B
test_bloom_filters_used "-- *renamed"
'
-test_expect_success 'git log with wildcard that resolves to a multiple paths does not uses Bloom filters' '
- test_bloom_filters_not_used "-- *" &&
- test_bloom_filters_not_used "-- file*"
+test_expect_success 'git log with multiple literal paths uses Bloom filter' '
+ test_bloom_filters_used "-- file4 A/file1" &&
+ test_bloom_filters_used "-- *" &&
+ test_bloom_filters_used "-- file*"
+'
+
+test_expect_success 'git log with path contains a wildcard does not use Bloom filter' '
+ test_bloom_filters_not_used "-- file\*" &&
+ test_bloom_filters_not_used "-- A/\* file4" &&
+ test_bloom_filters_not_used "-- file4 A/\*" &&
+ test_bloom_filters_not_used "-- * A/\*"
'
test_expect_success 'setup - add commit-graph to the chain without Bloom filters' '
diff --git a/t/t5004-archive-corner-cases.sh b/t/t5004-archive-corner-cases.sh
index 5174995191..027dedd976 100755
--- a/t/t5004-archive-corner-cases.sh
+++ b/t/t5004-archive-corner-cases.sh
@@ -176,10 +176,7 @@ test_expect_success EXPENSIVE,UNZIP,UNZIP_ZIP64_SUPPORT \
blob=$(echo $s | git hash-object -w --stdin) &&
# create tree containing 65500 entries of that blob
- for i in $(test_seq 1 65500)
- do
- echo "100644 blob $blob $i" || return 1
- done >tree &&
+ test_seq -f "100644 blob $blob\t%d" 1 65500 >tree &&
tree=$(git mktree <tree) &&
# zip it, creating an archive a bit bigger than 4GB
diff --git a/t/t5300-pack-object.sh b/t/t5300-pack-object.sh
index a5932b6a8b..73445782e7 100755
--- a/t/t5300-pack-object.sh
+++ b/t/t5300-pack-object.sh
@@ -525,7 +525,7 @@ test_expect_success 'index-pack --strict <pack> works in non-repo' '
test_path_is_file foo.idx
'
-test_expect_success SHA1 'show-index works OK outside a repository' '
+test_expect_success DEFAULT_HASH_ALGORITHM 'show-index works OK outside a repository' '
nongit git show-index <foo.idx
'
@@ -658,7 +658,7 @@ do
test_commit -C repo initial &&
git -C repo repack -ad &&
git -C repo verify-pack "$(pwd)"/repo/.git/objects/pack/*.idx &&
- if test $hash = sha1
+ if test $hash = $GIT_TEST_BUILTIN_HASH
then
nongit git verify-pack "$(pwd)"/repo/.git/objects/pack/*.idx
else
@@ -676,7 +676,7 @@ do
test_commit -C repo initial &&
git -C repo repack -ad &&
git -C repo index-pack --verify "$(pwd)"/repo/.git/objects/pack/*.pack &&
- if test $hash = sha1
+ if test $hash = $GIT_TEST_BUILTIN_HASH
then
nongit git index-pack --verify "$(pwd)"/repo/.git/objects/pack/*.pack
else
@@ -723,4 +723,23 @@ test_expect_success '--name-hash-version=2 and --write-bitmap-index are incompat
! test_grep "currently, --write-bitmap-index requires --name-hash-version=1" err
'
+test_expect_success '--path-walk pack everything' '
+ git -C server rev-parse HEAD >in &&
+ GIT_PROGRESS_DELAY=0 git -C server pack-objects \
+ --stdout --revs --path-walk --progress <in >out.pack 2>err &&
+ grep "Compressing objects by path" err &&
+ git -C server index-pack --stdin <out.pack
+'
+
+test_expect_success '--path-walk thin pack' '
+ cat >in <<-EOF &&
+ $(git -C server rev-parse HEAD)
+ ^$(git -C server rev-parse HEAD~2)
+ EOF
+ GIT_PROGRESS_DELAY=0 git -C server pack-objects \
+ --thin --stdout --revs --path-walk --progress <in >out.pack 2>err &&
+ grep "Compressing objects by path" err &&
+ git -C server index-pack --fix-thin --stdin <out.pack
+'
+
test_done
diff --git a/t/t5306-pack-nobase.sh b/t/t5306-pack-nobase.sh
index 805d60ff31..609399d54f 100755
--- a/t/t5306-pack-nobase.sh
+++ b/t/t5306-pack-nobase.sh
@@ -59,6 +59,11 @@ test_expect_success 'indirectly clone patch_clone' '
git pull ../.git &&
test $(git rev-parse HEAD) = $B &&
+ # The --path-walk feature of "git pack-objects" is not
+ # compatible with this kind of fetch from an incomplete repo.
+ GIT_TEST_PACK_PATH_WALK=0 &&
+ export GIT_TEST_PACK_PATH_WALK &&
+
git pull ../patch_clone/.git &&
test $(git rev-parse HEAD) = $C
)
diff --git a/t/t5310-pack-bitmaps.sh b/t/t5310-pack-bitmaps.sh
index a62b463eaf..6718fb98c0 100755
--- a/t/t5310-pack-bitmaps.sh
+++ b/t/t5310-pack-bitmaps.sh
@@ -158,8 +158,9 @@ test_bitmap_cases () {
ls .git/objects/pack/ | grep bitmap >output &&
test_line_count = 1 output &&
# verify equivalent packs are generated with/without using bitmap index
- packasha1=$(git pack-objects --no-use-bitmap-index --all packa </dev/null) &&
- packbsha1=$(git pack-objects --use-bitmap-index --all packb </dev/null) &&
+ # Be careful to not use the path-walk option in either case.
+ packasha1=$(git pack-objects --no-use-bitmap-index --no-path-walk --all packa </dev/null) &&
+ packbsha1=$(git pack-objects --use-bitmap-index --no-path-walk --all packb </dev/null) &&
list_packed_objects packa-$packasha1.idx >packa.objects &&
list_packed_objects packb-$packbsha1.idx >packb.objects &&
test_cmp packa.objects packb.objects
@@ -388,6 +389,14 @@ test_bitmap_cases () {
git init --bare client.git &&
(
cd client.git &&
+
+ # This test relies on reusing a delta, but if the
+ # path-walk machinery is engaged, the base object
+ # is considered too small to use during the
+ # dynamic computation, so is not used.
+ GIT_TEST_PACK_PATH_WALK=0 &&
+ export GIT_TEST_PACK_PATH_WALK &&
+
git config transfer.unpackLimit 1 &&
git fetch .. delta-reuse-old:delta-reuse-old &&
git fetch .. delta-reuse-new:delta-reuse-new &&
@@ -486,6 +495,36 @@ test_bitmap_cases () {
grep "ignoring extra bitmap" trace2.txt
)
'
+
+ test_expect_success 'load corrupt bitmap' '
+ rm -fr repo &&
+ git init repo &&
+ test_when_finished "rm -fr repo" &&
+ (
+ cd repo &&
+ git config pack.writeBitmapLookupTable '"$writeLookupTable"' &&
+
+ test_commit base &&
+
+ git repack -adb &&
+ bitmap="$(ls .git/objects/pack/pack-*.bitmap)" &&
+ chmod +w $bitmap &&
+
+ test-tool bitmap list-commits-with-offset >offsets &&
+ xor_off=$(head -n1 offsets | awk "{print \$3}") &&
+ printf '\161' |
+ dd of=$bitmap count=1 bs=1 conv=notrunc seek=$xor_off &&
+
+ git rev-list --objects --no-object-names HEAD >expect.raw &&
+ git rev-list --objects --use-bitmap-index --no-object-names HEAD \
+ >actual.raw &&
+
+ sort expect.raw >expect &&
+ sort actual.raw >actual &&
+
+ test_cmp expect actual
+ )
+ '
}
test_bitmap_cases
diff --git a/t/t5316-pack-delta-depth.sh b/t/t5316-pack-delta-depth.sh
index defaa06d65..03dfb7a61e 100755
--- a/t/t5316-pack-delta-depth.sh
+++ b/t/t5316-pack-delta-depth.sh
@@ -89,15 +89,18 @@ max_chain() {
# adjusted (or scrapped if the heuristics have become too unreliable)
test_expect_success 'packing produces a long delta' '
# Use --window=0 to make sure we are seeing reused deltas,
- # not computing a new long chain.
- pack=$(git pack-objects --all --window=0 </dev/null pack) &&
+ # not computing a new long chain. (Also avoid the --path-walk
+ # option as it may break delta chains.)
+ pack=$(git pack-objects --all --window=0 --no-path-walk </dev/null pack) &&
echo 9 >expect &&
max_chain pack-$pack.pack >actual &&
test_cmp expect actual
'
test_expect_success '--depth limits depth' '
- pack=$(git pack-objects --all --depth=5 </dev/null pack) &&
+ # Avoid --path-walk to avoid breaking delta chains across path
+ # boundaries.
+ pack=$(git pack-objects --all --depth=5 --no-path-walk </dev/null pack) &&
echo 5 >expect &&
max_chain pack-$pack.pack >actual &&
test_cmp expect actual
diff --git a/t/t5323-pack-redundant.sh b/t/t5323-pack-redundant.sh
index bc30bc9652..2d96afd6f7 100755
--- a/t/t5323-pack-redundant.sh
+++ b/t/t5323-pack-redundant.sh
@@ -45,6 +45,11 @@ fi
main_repo=main.git
shared_repo=shared.git
+test_expect_success 'pack-redundant needs --i-still-use-this' '
+ test_must_fail git pack-redundant >message 2>&1 &&
+ test_grep "nominated for removal" message
+'
+
git_pack_redundant='git pack-redundant --i-still-use-this'
# Create commits in <repo> and assign each commit's oid to shell variables
diff --git a/t/t5331-pack-objects-stdin.sh b/t/t5331-pack-objects-stdin.sh
index b48c0cbe8f..4a8df5a389 100755
--- a/t/t5331-pack-objects-stdin.sh
+++ b/t/t5331-pack-objects-stdin.sh
@@ -64,7 +64,7 @@ test_expect_success '--stdin-packs is incompatible with --filter' '
cd stdin-packs &&
test_must_fail git pack-objects --stdin-packs --stdout \
--filter=blob:none </dev/null 2>err &&
- test_grep "cannot use --filter with --stdin-packs" err
+ test_grep "options .--stdin-packs. and .--filter. cannot be used together" err
)
'
@@ -236,4 +236,124 @@ test_expect_success 'pack-objects --stdin with packfiles from main and alternate
test_cmp expected-objects actual-objects
'
+objdir=.git/objects
+packdir=$objdir/pack
+
+objects_in_packs () {
+ for p in "$@"
+ do
+ git show-index <"$packdir/pack-$p.idx" || return 1
+ done >objects.raw &&
+
+ cut -d' ' -f2 objects.raw | sort &&
+ rm -f objects.raw
+}
+
+test_expect_success '--stdin-packs=follow walks into unknown packs' '
+ test_when_finished "rm -fr repo" &&
+
+ git init repo &&
+ (
+ cd repo &&
+
+ for c in A B C D
+ do
+ test_commit "$c" || return 1
+ done &&
+
+ A="$(echo A | git pack-objects --revs $packdir/pack)" &&
+ B="$(echo A..B | git pack-objects --revs $packdir/pack)" &&
+ C="$(echo B..C | git pack-objects --revs $packdir/pack)" &&
+ D="$(echo C..D | git pack-objects --revs $packdir/pack)" &&
+ test_commit E &&
+
+ git prune-packed &&
+
+ cat >in <<-EOF &&
+ pack-$B.pack
+ ^pack-$C.pack
+ pack-$D.pack
+ EOF
+
+ # With just --stdin-packs, pack "A" is unknown to us, so
+ # only objects from packs "B" and "D" are included in
+ # the output pack.
+ P=$(git pack-objects --stdin-packs $packdir/pack <in) &&
+ objects_in_packs $B $D >expect &&
+ objects_in_packs $P >actual &&
+ test_cmp expect actual &&
+
+ # But with --stdin-packs=follow, objects from both
+ # included packs reach objects from the unknown pack, so
+ # objects from pack "A" is included in the output pack
+ # in addition to the above.
+ P=$(git pack-objects --stdin-packs=follow $packdir/pack <in) &&
+ objects_in_packs $A $B $D >expect &&
+ objects_in_packs $P >actual &&
+ test_cmp expect actual &&
+
+ # And with --unpacked, we will pick up objects from unknown
+ # packs that are reachable from loose objects. Loose object E
+ # reaches objects in pack A, but there are three excluded packs
+ # in between.
+ #
+ # The resulting pack should include objects reachable from E
+ # that are not present in packs B, C, or D, along with those
+ # present in pack A.
+ cat >in <<-EOF &&
+ ^pack-$B.pack
+ ^pack-$C.pack
+ ^pack-$D.pack
+ EOF
+
+ P=$(git pack-objects --stdin-packs=follow --unpacked \
+ $packdir/pack <in) &&
+
+ {
+ objects_in_packs $A &&
+ git rev-list --objects --no-object-names D..E
+ }>expect.raw &&
+ sort expect.raw >expect &&
+ objects_in_packs $P >actual &&
+ test_cmp expect actual
+ )
+'
+
+stdin_packs__follow_with_only () {
+ rm -fr stdin_packs__follow_with_only &&
+ git init stdin_packs__follow_with_only &&
+ (
+ cd stdin_packs__follow_with_only &&
+
+ test_commit A &&
+ test_commit B &&
+
+ git rev-parse "$@" >B.objects &&
+
+ echo A | git pack-objects --revs $packdir/pack &&
+ B="$(git pack-objects $packdir/pack <B.objects)" &&
+
+ git cat-file --batch-check="%(objectname)" --batch-all-objects >objs &&
+ for obj in $(cat objs)
+ do
+ rm -f $objdir/$(test_oid_to_path $obj) || return 1
+ done &&
+
+ ( cd $packdir && ls pack-*.pack ) >in &&
+ git pack-objects --stdin-packs=follow --stdout >/dev/null <in
+ )
+}
+
+test_expect_success '--stdin-packs=follow tolerates missing blobs' '
+ stdin_packs__follow_with_only HEAD HEAD^{tree}
+'
+
+test_expect_success '--stdin-packs=follow tolerates missing trees' '
+ stdin_packs__follow_with_only HEAD HEAD:B.t
+'
+
+test_expect_success '--stdin-packs=follow tolerates missing commits' '
+ stdin_packs__follow_with_only HEAD HEAD^{tree}
+'
+
test_done
diff --git a/t/t5332-multi-pack-reuse.sh b/t/t5332-multi-pack-reuse.sh
index 57cad7708f..395d09444c 100755
--- a/t/t5332-multi-pack-reuse.sh
+++ b/t/t5332-multi-pack-reuse.sh
@@ -7,6 +7,13 @@ test_description='pack-objects multi-pack reuse'
GIT_TEST_MULTI_PACK_INDEX=0
GIT_TEST_MULTI_PACK_INDEX_WRITE_INCREMENTAL=0
+
+# The --path-walk option does not consider the preferred pack
+# at all for reusing deltas, so this variable changes the
+# behavior of this test, if enabled.
+GIT_TEST_PACK_PATH_WALK=0
+export GIT_TEST_PACK_PATH_WALK
+
objdir=.git/objects
packdir=$objdir/pack
diff --git a/t/t5333-pseudo-merge-bitmaps.sh b/t/t5333-pseudo-merge-bitmaps.sh
index 56674db562..1f7a5d82ee 100755
--- a/t/t5333-pseudo-merge-bitmaps.sh
+++ b/t/t5333-pseudo-merge-bitmaps.sh
@@ -234,8 +234,8 @@ test_expect_success 'pseudo-merge pattern with capture groups' '
test_commit_bulk 16 &&
git rev-list HEAD~16.. >in &&
- sed "s|\(.*\)|create refs/remotes/$r/tags/\1 \1" in |
- git update-ref --stdin || return 1
+ sed "s|\(.*\)|create refs/remotes/$r/tags/\1 \1|" in >refs &&
+ git update-ref --stdin <refs || return 1
done &&
git \
@@ -445,4 +445,21 @@ test_expect_success 'pseudo-merge closure' '
)
'
+test_expect_success 'use pseudo-merge in boundary traversal' '
+ git init pseudo-merge-boundary-traversal &&
+ (
+ cd pseudo-merge-boundary-traversal &&
+
+ git config bitmapPseudoMerge.test.pattern refs/ &&
+ git config pack.useBitmapBoundaryTraversal true &&
+
+ test_commit A &&
+ git repack -adb &&
+ test_commit B &&
+
+ nr=$(git rev-list --count --use-bitmap-index HEAD~1..HEAD) &&
+ test 1 -eq "$nr"
+ )
+'
+
test_done
diff --git a/t/t5408-send-pack-stdin.sh b/t/t5408-send-pack-stdin.sh
index 526a675045..ec339761c2 100755
--- a/t/t5408-send-pack-stdin.sh
+++ b/t/t5408-send-pack-stdin.sh
@@ -69,15 +69,24 @@ test_expect_success 'stdin mixed with cmdline' '
test_expect_success 'cmdline refs written in order' '
clear_remote &&
- test_must_fail git send-pack remote.git A:foo B:foo &&
- verify_push A foo
+ test_must_fail git send-pack remote.git A:foo B:foo 2>err &&
+ test_grep "multiple updates for ref ${SQ}refs/heads/foo${SQ} not allowed" err &&
+ test_must_fail git --git-dir=remote.git rev-parse foo
+'
+
+test_expect_success 'cmdline refs with multiple duplicates' '
+ clear_remote &&
+ test_must_fail git send-pack remote.git A:foo B:foo C:foo 2>err &&
+ test_grep "multiple updates for ref ${SQ}refs/heads/foo${SQ} not allowed" err &&
+ test_must_fail git --git-dir=remote.git rev-parse foo
'
test_expect_success '--stdin refs come after cmdline' '
clear_remote &&
echo A:foo >input &&
test_must_fail git send-pack remote.git --stdin B:foo <input &&
- verify_push B foo
+ test_grep "multiple updates for ref ${SQ}refs/heads/foo${SQ} not allowed" err &&
+ test_must_fail git --git-dir=remote.git rev-parse foo
'
test_expect_success 'refspecs and --mirror do not mix (cmdline)' '
diff --git a/t/t5505-remote.sh b/t/t5505-remote.sh
index bef0250e89..2701eef85e 100755
--- a/t/t5505-remote.sh
+++ b/t/t5505-remote.sh
@@ -1644,4 +1644,18 @@ test_expect_success 'empty config clears remote.*.pushurl list' '
test_cmp expect actual
'
+test_expect_success 'forbid adding subset of existing remote' '
+ test_when_finished "git remote rm outer" &&
+ git remote add outer url &&
+ test_must_fail git remote add outer/inner url 2>err &&
+ test_grep ".outer/inner. is a subset of existing remote .outer." err
+'
+
+test_expect_success 'forbid adding superset of existing remote' '
+ test_when_finished "git remote rm outer/inner" &&
+ git remote add outer/inner url &&
+ test_must_fail git remote add outer url 2>err &&
+ test_grep ".outer. is a superset of existing remote .outer/inner." err
+'
+
test_done
diff --git a/t/t5516-fetch-push.sh b/t/t5516-fetch-push.sh
index dabcc5f811..4e9c27b0f2 100755
--- a/t/t5516-fetch-push.sh
+++ b/t/t5516-fetch-push.sh
@@ -744,8 +744,8 @@ test_expect_success 'pushing valid refs triggers post-receive and post-update ho
EOF
cat >update.expect <<-EOF &&
- refs/heads/main $orgmain $newmain
refs/heads/next $orgnext $newnext
+ refs/heads/main $orgmain $newmain
EOF
cat >post-receive.expect <<-EOF &&
@@ -808,8 +808,8 @@ test_expect_success 'deletion of a non-existent ref is not fed to post-receive a
EOF
cat >update.expect <<-EOF &&
- refs/heads/main $orgmain $newmain
refs/heads/nonexistent $ZERO_OID $ZERO_OID
+ refs/heads/main $orgmain $newmain
EOF
cat >post-receive.expect <<-EOF &&
@@ -868,10 +868,10 @@ test_expect_success 'mixed ref updates, deletes, invalid deletes trigger hooks w
EOF
cat >update.expect <<-EOF &&
- refs/heads/main $orgmain $newmain
refs/heads/next $orgnext $newnext
- refs/heads/seen $orgseen $newseen
refs/heads/nonexistent $ZERO_OID $ZERO_OID
+ refs/heads/main $orgmain $newmain
+ refs/heads/seen $orgseen $newseen
EOF
cat >post-receive.expect <<-EOF &&
@@ -1909,4 +1909,23 @@ test_expect_success 'push with config push.useBitmaps' '
--thin --delta-base-offset -q --no-use-bitmap-index <false
'
+test_expect_success 'push with config pack.usePathWalk=true' '
+ mk_test testrepo heads/main &&
+ git checkout main &&
+ test_config pack.usePathWalk true &&
+ GIT_TRACE2_EVENT="$(pwd)/path-walk.txt" \
+ git push --quiet testrepo main:test &&
+
+ test_region pack-objects path-walk path-walk.txt
+'
+
+test_expect_success 'push with F/D conflict with deletion and creation' '
+ test_when_finished "git branch -D branch" &&
+ git branch branch/conflict &&
+ mk_test testrepo heads/branch/conflict &&
+ git branch -D branch/conflict &&
+ git branch branch &&
+ git push testrepo :refs/heads/branch/conflict refs/heads/branch
+'
+
test_done
diff --git a/t/t5538-push-shallow.sh b/t/t5538-push-shallow.sh
index e91fcc173e..dc0e972943 100755
--- a/t/t5538-push-shallow.sh
+++ b/t/t5538-push-shallow.sh
@@ -123,4 +123,45 @@ EOF
git cat-file blob $(echo 1|git hash-object --stdin) >/dev/null
)
'
+
+test_expect_success 'push new commit from shallow clone has correct object count' '
+ git init origin &&
+ test_commit -C origin a &&
+ test_commit -C origin b &&
+
+ git clone --depth=1 "file://$(pwd)/origin" client &&
+ git -C client checkout -b topic &&
+ git -C client commit --allow-empty -m "empty" &&
+ GIT_PROGRESS_DELAY=0 git -C client push --progress origin topic 2>err &&
+ test_grep "Enumerating objects: 1, done." err
+'
+
+test_expect_success 'push new commit from shallow clone has good deltas' '
+ git init base &&
+ test_seq 1 999 >base/a &&
+ test_commit -C base initial &&
+ git -C base add a &&
+ git -C base commit -m "big a" &&
+
+ git clone --depth=1 "file://$(pwd)/base" deltas &&
+ git -C deltas checkout -b deltas &&
+ test_seq 1 1000 >deltas/a &&
+ git -C deltas commit -a -m "bigger a" &&
+ GIT_PROGRESS_DELAY=0 git -C deltas push --progress origin deltas 2>err &&
+
+ test_grep "Enumerating objects: 5, done" err &&
+
+ # If the delta base is found, then this message uses "bytes".
+ # If the delta base is not found, then this message uses "KiB".
+ test_grep "Writing objects: .* bytes" err &&
+
+ git -C deltas commit --amend -m "changed message" &&
+ GIT_TRACE2_EVENT="$(pwd)/config-push.txt" \
+ GIT_PROGRESS_DELAY=0 git -C deltas -c pack.usePathWalk=true \
+ push --progress -f origin deltas 2>err &&
+
+ test_grep "Enumerating objects: 1, done" err &&
+ test_region pack-objects path-walk config-push.txt
+'
+
test_done
diff --git a/t/t5558-clone-bundle-uri.sh b/t/t5558-clone-bundle-uri.sh
index 9b211a626b..7a0943bd36 100755
--- a/t/t5558-clone-bundle-uri.sh
+++ b/t/t5558-clone-bundle-uri.sh
@@ -1279,6 +1279,29 @@ test_expect_success 'bundles are downloaded once during fetch --all' '
trace-mult.txt >bundle-fetches &&
test_line_count = 1 bundle-fetches
'
+
+test_expect_success 'bundles with space in URI are rejected' '
+ test_when_finished "rm -rf busted repo" &&
+ mkdir -p "$HOME/busted/ /$HOME/repo/.git/objects/bundles" &&
+ git clone --bundle-uri="$HTTPD_URL/bogus $HOME/busted/" "$HTTPD_URL/smart/fetch.git" repo 2>err &&
+ test_grep "error: bundle-uri: URI is malformed: " err &&
+ find busted -type f >files &&
+ test_must_be_empty files
+'
+
+test_expect_success 'bundles with newline in URI are rejected' '
+ test_when_finished "rm -rf busted repo" &&
+ git clone --bundle-uri="$HTTPD_URL/bogus\nget $HTTPD_URL/bogus $HOME/busted" "$HTTPD_URL/smart/fetch.git" repo 2>err &&
+ test_grep "error: bundle-uri: URI is malformed: " err &&
+ test_path_is_missing "$HOME/busted"
+'
+
+test_expect_success 'bundles with newline in target path are rejected' '
+ git clone --bundle-uri="$HTTPD_URL/bogus" "$HTTPD_URL/smart/fetch.git" "$(printf "escape\nget $HTTPD_URL/bogus .")" 2>err &&
+ test_grep "error: bundle-uri: filename is malformed: " err &&
+ test_path_is_missing escape
+'
+
# Do not add tests here unless they use the HTTP server, as they will
# not run unless the HTTP dependencies exist.
diff --git a/t/t6422-merge-rename-corner-cases.sh b/t/t6422-merge-rename-corner-cases.sh
index 9cbe7ca782..f14c0fb30e 100755
--- a/t/t6422-merge-rename-corner-cases.sh
+++ b/t/t6422-merge-rename-corner-cases.sh
@@ -1146,10 +1146,7 @@ test_conflicts_with_adds_and_renames() {
cd simple_${sideL}_${sideR} &&
# Create some related files now
- for i in $(test_seq 1 10)
- do
- echo Random base content line $i
- done >file_v1 &&
+ test_seq -f "Random base content line %d" 1 10 >file_v1 &&
cp file_v1 file_v2 &&
echo modification >>file_v2 &&
@@ -1293,10 +1290,7 @@ test_setup_nested_conflicts_from_rename_rename () {
cd nested_conflicts_from_rename_rename &&
# Create some related files now
- for i in $(test_seq 1 10)
- do
- echo Random base content line $i
- done >file_v1 &&
+ test_seq -f "Random base content line %d" 1 10 >file_v1 &&
cp file_v1 file_v2 &&
cp file_v1 file_v3 &&
diff --git a/t/t6601-path-walk.sh b/t/t6601-path-walk.sh
index 8d187f7279..56bd1e3c5b 100755
--- a/t/t6601-path-walk.sh
+++ b/t/t6601-path-walk.sh
@@ -376,6 +376,26 @@ test_expect_success 'topic, not base, boundary with pruning' '
test_cmp_sorted expect out
'
+test_expect_success 'topic, not base, --edge-aggressive with pruning' '
+ test-tool path-walk --prune --edge-aggressive -- topic --not base >out &&
+
+ cat >expect <<-EOF &&
+ 0:commit::$(git rev-parse topic)
+ 1:tree::$(git rev-parse topic^{tree})
+ 1:tree::$(git rev-parse base^{tree}):UNINTERESTING
+ 2:tree:right/:$(git rev-parse topic:right)
+ 2:tree:right/:$(git rev-parse base:right):UNINTERESTING
+ 3:blob:right/c:$(git rev-parse base:right/c):UNINTERESTING
+ 3:blob:right/c:$(git rev-parse topic:right/c)
+ blobs:2
+ commits:1
+ tags:0
+ trees:4
+ EOF
+
+ test_cmp_sorted expect out
+'
+
test_expect_success 'trees are reported exactly once' '
test_when_finished "rm -rf unique-trees" &&
test_create_repo unique-trees &&
diff --git a/t/t7007-show.sh b/t/t7007-show.sh
index d6cc69e0f2..2d322b53d1 100755
--- a/t/t7007-show.sh
+++ b/t/t7007-show.sh
@@ -167,4 +167,28 @@ test_expect_success 'show --graph is forbidden' '
test_must_fail git show --graph HEAD
'
+test_expect_success 'show unmerged index' '
+ git reset --hard &&
+
+ git switch -C base &&
+ echo "base" >conflicting &&
+ git add conflicting &&
+ git commit -m "base" &&
+
+ git branch hello &&
+ git branch goodbye &&
+
+ git switch hello &&
+ echo "hello" >conflicting &&
+ git commit -am "hello" &&
+
+ git switch goodbye &&
+ echo "goodbye" >conflicting &&
+ git commit -am "goodbye" &&
+
+ git switch hello &&
+ test_must_fail git merge goodbye &&
+ git show --merge HEAD
+'
+
test_done
diff --git a/t/t7401-submodule-summary.sh b/t/t7401-submodule-summary.sh
index 9c3cc4cf40..66c3ec2da2 100755
--- a/t/t7401-submodule-summary.sh
+++ b/t/t7401-submodule-summary.sh
@@ -38,10 +38,11 @@ commit_file () {
git commit "$@" -m "Commit $*" >/dev/null
}
-test_create_repo sm1 &&
-add_file . foo >/dev/null
-
-head1=$(add_file sm1 foo1 foo2)
+test_expect_success 'setup submodule' '
+ git init sm1 &&
+ add_file . foo &&
+ head1=$(add_file sm1 foo1 foo2)
+'
test_expect_success 'added submodule' "
git add sm1 &&
@@ -214,9 +215,12 @@ test_expect_success 'typechanged submodule(submodule->blob)' "
test_cmp expected actual
"
-rm -f sm1 &&
-test_create_repo sm1 &&
-head6=$(add_file sm1 foo6 foo7)
+test_expect_success 'setup submodule' '
+ rm -f sm1 &&
+ git init sm1 &&
+ head6=$(add_file sm1 foo6 foo7)
+'
+
test_expect_success 'nonexistent commit' "
git submodule summary >actual &&
cat >expected <<-EOF &&
diff --git a/t/t7406-submodule-update.sh b/t/t7406-submodule-update.sh
index c562bad042..3adab12091 100755
--- a/t/t7406-submodule-update.sh
+++ b/t/t7406-submodule-update.sh
@@ -1095,12 +1095,15 @@ test_expect_success 'submodule update --quiet passes quietness to fetch with a s
(cd super5 &&
# This test var can mess with the stderr output checked in this test.
GIT_TEST_NAME_HASH_VERSION=1 \
+ GIT_TEST_PACK_PATH_WALK=0 \
git submodule update --quiet --init --depth=1 submodule3 >out 2>err &&
test_must_be_empty out &&
test_must_be_empty err
) &&
git clone super4 super6 &&
(cd super6 &&
+ # This test variable will create a "warning" message to stderr
+ GIT_TEST_PACK_PATH_WALK=0 \
git submodule update --init --depth=1 submodule3 >out 2>err &&
test_file_not_empty out &&
test_file_not_empty err
@@ -1134,6 +1137,67 @@ test_expect_success 'setup clean recursive superproject' '
git clone --recurse-submodules top top-clean
'
+test_expect_success 'submodule update with multiple remotes' '
+ test_when_finished "rm -fr top-cloned" &&
+ cp -r top-clean top-cloned &&
+
+ # Create a commit in each repo, starting with bottom
+ test_commit -C bottom multiple_remote_commit &&
+ # Create middle commit
+ git -C middle/bottom fetch &&
+ git -C middle/bottom checkout -f FETCH_HEAD &&
+ git -C middle add bottom &&
+ git -C middle commit -m "multiple_remote_commit" &&
+ # Create top commit
+ git -C top/middle fetch &&
+ git -C top/middle checkout -f FETCH_HEAD &&
+ git -C top add middle &&
+ git -C top commit -m "multiple_remote_commit" &&
+
+ # rename the submodule remote
+ git -C top-cloned/middle remote rename origin upstream &&
+
+ # Add another remote
+ git -C top-cloned/middle remote add other bogus &&
+
+ # Make the update of "middle" a no-op, otherwise we error out
+ # because of its unmerged state
+ test_config -C top-cloned submodule.middle.update !true &&
+ git -C top-cloned submodule update --recursive 2>actual.err &&
+ cat >expect.err <<-\EOF &&
+ EOF
+ test_cmp expect.err actual.err
+'
+
+test_expect_success 'submodule update with renamed remote' '
+ test_when_finished "rm -fr top-cloned" &&
+ cp -r top-clean top-cloned &&
+
+ # Create a commit in each repo, starting with bottom
+ test_commit -C bottom rename_commit &&
+ # Create middle commit
+ git -C middle/bottom fetch &&
+ git -C middle/bottom checkout -f FETCH_HEAD &&
+ git -C middle add bottom &&
+ git -C middle commit -m "rename_commit" &&
+ # Create top commit
+ git -C top/middle fetch &&
+ git -C top/middle checkout -f FETCH_HEAD &&
+ git -C top add middle &&
+ git -C top commit -m "rename_commit" &&
+
+ # rename the submodule remote
+ git -C top-cloned/middle remote rename origin upstream &&
+
+ # Make the update of "middle" a no-op, otherwise we error out
+ # because of its unmerged state
+ test_config -C top-cloned submodule.middle.update !true &&
+ git -C top-cloned submodule update --recursive 2>actual.err &&
+ cat >expect.err <<-\EOF &&
+ EOF
+ test_cmp expect.err actual.err
+'
+
test_expect_success 'submodule update should skip unmerged submodules' '
test_when_finished "rm -fr top-cloned" &&
cp -r top-clean top-cloned &&
diff --git a/t/t7422-submodule-output.sh b/t/t7422-submodule-output.sh
index 023a5cbdc4..aea1ddf117 100755
--- a/t/t7422-submodule-output.sh
+++ b/t/t7422-submodule-output.sh
@@ -180,17 +180,14 @@ test_expect_success !MINGW 'git submodule status --recursive propagates SIGPIPE'
COMMIT=$(git rev-parse HEAD) &&
for i in $(test_seq 2000)
do
- printf "[submodule \"sm-$i\"]\npath = recursive-submodule-path-$i\n" "$i" ||
+ echo "[submodule \"sm-$i\"]" &&
+ echo "path = recursive-submodule-path-$i" ||
return 1
done >gitmodules &&
BLOB=$(git hash-object -w --stdin <gitmodules) &&
printf "100644 blob $BLOB\t.gitmodules\n" >tree &&
- for i in $(test_seq 2000)
- do
- printf "160000 commit $COMMIT\trecursive-submodule-path-%d\n" "$i" ||
- return 1
- done >>tree &&
+ test_seq -f "160000 commit $COMMIT\trecursive-submodule-path-%d" 2000 >>tree &&
TREE=$(git mktree <tree) &&
COMMIT=$(git commit-tree "$TREE") &&
diff --git a/t/t7450-bad-git-dotfiles.sh b/t/t7450-bad-git-dotfiles.sh
index 9367794641..14b5743b96 100755
--- a/t/t7450-bad-git-dotfiles.sh
+++ b/t/t7450-bad-git-dotfiles.sh
@@ -372,4 +372,37 @@ test_expect_success 'checkout -f --recurse-submodules must not use a nested gitd
test_path_is_missing nested_checkout/thing2/.git
'
+test_expect_success SYMLINKS,!WINDOWS,!MINGW 'submodule must not checkout into different directory' '
+ test_when_finished "rm -rf sub repo bad-clone" &&
+
+ git init sub &&
+ write_script sub/post-checkout <<-\EOF &&
+ touch "$PWD/foo"
+ EOF
+ git -C sub add post-checkout &&
+ git -C sub commit -m hook &&
+
+ git init repo &&
+ git -C repo -c protocol.file.allow=always submodule add "$PWD/sub" sub &&
+ git -C repo mv sub $(printf "sub\r") &&
+
+ # Ensure config values containing CR are wrapped in quotes.
+ git config unset -f repo/.gitmodules submodule.sub.path &&
+ printf "\tpath = \"sub\r\"\n" >>repo/.gitmodules &&
+
+ git config unset -f repo/.git/modules/sub/config core.worktree &&
+ {
+ printf "[core]\n" &&
+ printf "\tworktree = \"../../../sub\r\"\n"
+ } >>repo/.git/modules/sub/config &&
+
+ ln -s .git/modules/sub/hooks repo/sub &&
+ git -C repo add -A &&
+ git -C repo commit -m submodule &&
+
+ git -c protocol.file.allow=always clone --recurse-submodules repo bad-clone &&
+ ! test -f "$PWD/foo" &&
+ test -f $(printf "bad-clone/sub\r/post-checkout")
+'
+
test_done
diff --git a/t/t7528-signed-commit-ssh.sh b/t/t7528-signed-commit-ssh.sh
index 065f780636..0f887a3ebe 100755
--- a/t/t7528-signed-commit-ssh.sh
+++ b/t/t7528-signed-commit-ssh.sh
@@ -84,18 +84,26 @@ test_expect_success GPGSSH 'sign commits using literal public keys with ssh-agen
test_config gpg.format ssh &&
eval $(ssh-agent) &&
test_when_finished "kill ${SSH_AGENT_PID}" &&
- ssh-add "${GPGSSH_KEY_PRIMARY}" &&
- echo 1 >file && git add file &&
- git commit -a -m rsa-inline -S"$(cat "${GPGSSH_KEY_PRIMARY}.pub")" &&
- echo 2 >file &&
- test_config user.signingkey "$(cat "${GPGSSH_KEY_PRIMARY}.pub")" &&
- git commit -a -m rsa-config -S &&
- ssh-add "${GPGSSH_KEY_ECDSA}" &&
- echo 3 >file &&
- git commit -a -m ecdsa-inline -S"key::$(cat "${GPGSSH_KEY_ECDSA}.pub")" &&
- echo 4 >file &&
- test_config user.signingkey "key::$(cat "${GPGSSH_KEY_ECDSA}.pub")" &&
- git commit -a -m ecdsa-config -S
+ test_when_finished "test_unconfig user.signingkey" &&
+ mkdir tmpdir &&
+ TMPDIR="$(pwd)/tmpdir" &&
+ (
+ export TMPDIR &&
+ ssh-add "${GPGSSH_KEY_PRIMARY}" &&
+ echo 1 >file && git add file &&
+ git commit -a -m rsa-inline -S"$(cat "${GPGSSH_KEY_PRIMARY}.pub")" &&
+ echo 2 >file &&
+ git config user.signingkey "$(cat "${GPGSSH_KEY_PRIMARY}.pub")" &&
+ git commit -a -m rsa-config -S &&
+ ssh-add "${GPGSSH_KEY_ECDSA}" &&
+ echo 3 >file &&
+ git commit -a -m ecdsa-inline -S"key::$(cat "${GPGSSH_KEY_ECDSA}.pub")" &&
+ echo 4 >file &&
+ git config user.signingkey "key::$(cat "${GPGSSH_KEY_ECDSA}.pub")" &&
+ git commit -a -m ecdsa-config -S
+ ) &&
+ find tmpdir -type f >tmpfiles &&
+ test_must_be_empty tmpfiles
'
test_expect_success GPGSSH,GPGSSH_VERIFYTIME 'create signed commits with keys having defined lifetimes' '
diff --git a/t/t7600-merge.sh b/t/t7600-merge.sh
index 2a8df29219..9838094b66 100755
--- a/t/t7600-merge.sh
+++ b/t/t7600-merge.sh
@@ -185,8 +185,19 @@ test_expect_success 'reject non-strategy with a git-merge-foo name' '
test_expect_success 'merge c0 with c1' '
echo "OBJID HEAD@{0}: merge c1: Fast-forward" >reflog.expected &&
+ cat >expect <<-\EOF &&
+ Updating FROM..TO
+ Fast-forward
+ file | 2 +-
+ other | 9 +++++++++
+ 2 files changed, 10 insertions(+), 1 deletion(-)
+ create mode 100644 other
+ EOF
+
git reset --hard c0 &&
- git merge c1 &&
+ git merge c1 >out &&
+ sed -e "1s/^Updating [0-9a-f.]*/Updating FROM..TO/" out >actual &&
+ test_cmp expect actual &&
verify_merge file result.1 &&
verify_head "$c1" &&
@@ -205,6 +216,67 @@ test_expect_success 'merge c0 with c1 with --ff-only' '
verify_head "$c1"
'
+test_expect_success 'the same merge with merge.stat=diffstat' '
+ cat >expect <<-\EOF &&
+ Updating FROM..TO
+ Fast-forward
+ file | 2 +-
+ other | 9 +++++++++
+ 2 files changed, 10 insertions(+), 1 deletion(-)
+ create mode 100644 other
+ EOF
+
+ git reset --hard c0 &&
+ git -c merge.stat=diffstat merge c1 >out &&
+ sed -e "1s/^Updating [0-9a-f.]*/Updating FROM..TO/" out >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'the same merge with compact summary' '
+ cat >expect <<-\EOF &&
+ Updating FROM..TO
+ Fast-forward
+ file | 2 +-
+ other (new) | 9 +++++++++
+ 2 files changed, 10 insertions(+), 1 deletion(-)
+ EOF
+
+ git reset --hard c0 &&
+ git merge --compact-summary c1 >out &&
+ sed -e "1s/^Updating [0-9a-f.]*/Updating FROM..TO/" out >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'the same merge with compact summary' '
+ cat >expect <<-\EOF &&
+ Updating FROM..TO
+ Fast-forward
+ file | 2 +-
+ other (new) | 9 +++++++++
+ 2 files changed, 10 insertions(+), 1 deletion(-)
+ EOF
+
+ git reset --hard c0 &&
+ git merge --compact-summary c1 >out &&
+ sed -e "1s/^Updating [0-9a-f.]*/Updating FROM..TO/" out >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'the same merge with merge.stat=compact' '
+ cat >expect <<-\EOF &&
+ Updating FROM..TO
+ Fast-forward
+ file | 2 +-
+ other (new) | 9 +++++++++
+ 2 files changed, 10 insertions(+), 1 deletion(-)
+ EOF
+
+ git reset --hard c0 &&
+ git -c merge.stat=compact merge c1 >out &&
+ sed -e "1s/^Updating [0-9a-f.]*/Updating FROM..TO/" out >actual &&
+ test_cmp expect actual
+'
+
test_debug 'git log --graph --decorate --oneline --all'
test_expect_success 'merge from unborn branch' '
diff --git a/t/t7704-repack-cruft.sh b/t/t7704-repack-cruft.sh
index 8aebfb45f5..aa2e2e6ad8 100755
--- a/t/t7704-repack-cruft.sh
+++ b/t/t7704-repack-cruft.sh
@@ -724,4 +724,149 @@ test_expect_success 'cruft repack respects --quiet' '
)
'
+setup_cruft_exclude_tests() {
+ git init "$1" &&
+ (
+ cd "$1" &&
+
+ git config repack.midxMustContainCruft false &&
+
+ test_commit one &&
+
+ test_commit --no-tag two &&
+ two="$(git rev-parse HEAD)" &&
+ test_commit --no-tag three &&
+ three="$(git rev-parse HEAD)" &&
+ git reset --hard one &&
+ git reflog expire --all --expire=all &&
+
+ GIT_TEST_MULTI_PACK_INDEX=0 git repack --cruft -d &&
+
+ git merge $two &&
+ test_commit four
+ )
+}
+
+test_expect_success 'repack --write-midx excludes cruft where possible' '
+ setup_cruft_exclude_tests exclude-cruft-when-possible &&
+ (
+ cd exclude-cruft-when-possible &&
+
+ GIT_TEST_MULTI_PACK_INDEX=0 \
+ git repack -d --geometric=2 --write-midx --write-bitmap-index &&
+
+ test-tool read-midx --show-objects $objdir >midx &&
+ cruft="$(ls $packdir/*.mtimes)" &&
+ test_grep ! "$(basename "$cruft" .mtimes).idx" midx &&
+
+ git rev-list --all --objects --no-object-names >reachable.raw &&
+ sort reachable.raw >reachable.objects &&
+ awk "/\.pack$/ { print \$1 }" <midx | sort >midx.objects &&
+
+ test_cmp reachable.objects midx.objects
+ )
+'
+
+test_expect_success 'repack --write-midx includes cruft when instructed' '
+ setup_cruft_exclude_tests exclude-cruft-when-instructed &&
+ (
+ cd exclude-cruft-when-instructed &&
+
+ GIT_TEST_MULTI_PACK_INDEX=0 \
+ git -c repack.midxMustContainCruft=true repack \
+ -d --geometric=2 --write-midx --write-bitmap-index &&
+
+ test-tool read-midx --show-objects $objdir >midx &&
+ cruft="$(ls $packdir/*.mtimes)" &&
+ test_grep "$(basename "$cruft" .mtimes).idx" midx &&
+
+ git cat-file --batch-check="%(objectname)" --batch-all-objects \
+ >all.objects &&
+ awk "/\.pack$/ { print \$1 }" <midx | sort >midx.objects &&
+
+ test_cmp all.objects midx.objects
+ )
+'
+
+test_expect_success 'repack --write-midx includes cruft when necessary' '
+ setup_cruft_exclude_tests exclude-cruft-when-necessary &&
+ (
+ cd exclude-cruft-when-necessary &&
+
+ test_path_is_file $(ls $packdir/pack-*.mtimes) &&
+ ( cd $packdir && ls pack-*.idx ) | sort >packs.all &&
+ git multi-pack-index write --stdin-packs --bitmap <packs.all &&
+
+ test_commit five &&
+ GIT_TEST_MULTI_PACK_INDEX=0 \
+ git repack -d --geometric=2 --write-midx --write-bitmap-index &&
+
+ test-tool read-midx --show-objects $objdir >midx &&
+ awk "/\.pack$/ { print \$1 }" <midx | sort >midx.objects &&
+ git cat-file --batch-all-objects --batch-check="%(objectname)" \
+ >expect.objects &&
+ test_cmp expect.objects midx.objects &&
+
+ grep "^pack-" midx >midx.packs &&
+ test_line_count = "$(($(wc -l <packs.all) + 1))" midx.packs
+ )
+'
+
+test_expect_success 'repack --write-midx includes cruft when already geometric' '
+ git init repack--write-midx-geometric-noop &&
+ (
+ cd repack--write-midx-geometric-noop &&
+
+ git branch -M main &&
+ test_commit A &&
+ test_commit B &&
+
+ git checkout -B side &&
+ test_commit --no-tag C &&
+ C="$(git rev-parse HEAD)" &&
+
+ git checkout main &&
+ git branch -D side &&
+ git reflog expire --all --expire=all &&
+
+ # At this point we have two packs: one containing the
+ # objects belonging to commits A and B, and another
+ # (cruft) pack containing the objects belonging to
+ # commit C.
+ git repack --cruft -d &&
+
+ # Create a third pack which contains a merge commit
+ # making commit C reachable again.
+ #
+ # --no-ff is important here, as it ensures that we
+ # actually write a new object and subsequently a new
+ # pack to contain it.
+ git merge --no-ff $C &&
+ git repack -d &&
+
+ ls $packdir/pack-*.idx | sort >packs.all &&
+ cruft="$(ls $packdir/pack-*.mtimes)" &&
+ cruft="${cruft%.mtimes}.idx" &&
+
+ for idx in $(grep -v $cruft <packs.all)
+ do
+ git show-index <$idx >out &&
+ wc -l <out || return 1
+ done >sizes.raw &&
+
+ # Make sure that there are two non-cruft packs, and
+ # that one of them contains at least twice as many
+ # objects as the other, ensuring that they are already
+ # in a geometric progression.
+ sort -n sizes.raw >sizes &&
+ test_line_count = 2 sizes &&
+ s1=$(head -n 1 sizes) &&
+ s2=$(tail -n 1 sizes) &&
+ test "$s2" -gt "$((2 * $s1))" &&
+
+ git -c repack.midxMustContainCruft=false repack --geometric=2 \
+ --write-midx --write-bitmap-index
+ )
+'
+
test_done
diff --git a/t/t7815-grep-binary.sh b/t/t7815-grep-binary.sh
index b7d83f9a5d..55d5e6de17 100755
--- a/t/t7815-grep-binary.sh
+++ b/t/t7815-grep-binary.sh
@@ -63,7 +63,7 @@ test_expect_success 'git grep ile a' '
git grep ile a
'
-test_expect_failure !CYGWIN 'git grep .fi a' '
+test_expect_failure !CYGWIN,!MACOS 'git grep .fi a' '
git grep .fi a
'
diff --git a/t/t7900-maintenance.sh b/t/t7900-maintenance.sh
index 8cf89e285f..ddd273d8dc 100755
--- a/t/t7900-maintenance.sh
+++ b/t/t7900-maintenance.sh
@@ -49,9 +49,9 @@ test_expect_success 'run [--auto|--quiet]' '
git maintenance run --auto 2>/dev/null &&
GIT_TRACE2_EVENT="$(pwd)/run-no-quiet.txt" \
git maintenance run --no-quiet 2>/dev/null &&
- test_subcommand git gc --quiet --no-detach <run-no-auto.txt &&
- test_subcommand ! git gc --auto --quiet --no-detach <run-auto.txt &&
- test_subcommand git gc --no-quiet --no-detach <run-no-quiet.txt
+ test_subcommand git gc --quiet --no-detach --skip-foreground-tasks <run-no-auto.txt &&
+ test_subcommand ! git gc --auto --quiet --no-detach --skip-foreground-tasks <run-auto.txt &&
+ test_subcommand git gc --no-quiet --no-detach --skip-foreground-tasks <run-no-quiet.txt
'
test_expect_success 'maintenance.auto config option' '
@@ -154,9 +154,9 @@ test_expect_success 'run --task=<task>' '
git maintenance run --task=commit-graph 2>/dev/null &&
GIT_TRACE2_EVENT="$(pwd)/run-both.txt" \
git maintenance run --task=commit-graph --task=gc 2>/dev/null &&
- test_subcommand ! git gc --quiet --no-detach <run-commit-graph.txt &&
- test_subcommand git gc --quiet --no-detach <run-gc.txt &&
- test_subcommand git gc --quiet --no-detach <run-both.txt &&
+ test_subcommand ! git gc --quiet --no-detach --skip-foreground-tasks <run-commit-graph.txt &&
+ test_subcommand git gc --quiet --no-detach --skip-foreground-tasks <run-gc.txt &&
+ test_subcommand git gc --quiet --no-detach --skip-foreground-tasks <run-both.txt &&
test_subcommand git commit-graph write --split --reachable --no-progress <run-commit-graph.txt &&
test_subcommand ! git commit-graph write --split --reachable --no-progress <run-gc.txt &&
test_subcommand git commit-graph write --split --reachable --no-progress <run-both.txt
@@ -610,7 +610,12 @@ test_expect_success 'rerere-gc task with --auto honors maintenance.rerere-gc.aut
test_expect_success '--auto and --schedule incompatible' '
test_must_fail git maintenance run --auto --schedule=daily 2>err &&
- test_grep "at most one" err
+ test_grep "cannot be used together" err
+'
+
+test_expect_success '--task and --schedule incompatible' '
+ test_must_fail git maintenance run --task=pack-refs --schedule=daily 2>err &&
+ test_grep "cannot be used together" err
'
test_expect_success 'invalid --schedule value' '
diff --git a/t/t9001-send-email.sh b/t/t9001-send-email.sh
index 0c1af43f6f..e56e0c8d77 100755
--- a/t/t9001-send-email.sh
+++ b/t/t9001-send-email.sh
@@ -201,6 +201,13 @@ test_expect_success $PREREQ 'cc trailer with get_maintainer.pl output' '
test_cmp expected-cc commandline1
'
+test_expect_failure $PREREQ 'invalid smtp server port value' '
+ clean_fake_sendmail &&
+ git send-email -1 --to=recipient@example.com \
+ --smtp-server-port=bogus-symbolic-name \
+ --smtp-server="$(pwd)/fake.sendmail"
+'
+
test_expect_success $PREREQ 'setup expect' "
cat >expected-show-all-headers <<\EOF
0001-Second.patch
diff --git a/t/t9300-fast-import.sh b/t/t9300-fast-import.sh
index b258dbf1df..4dc3d645bf 100755
--- a/t/t9300-fast-import.sh
+++ b/t/t9300-fast-import.sh
@@ -120,7 +120,7 @@ test_expect_success 'A: create pack from stdin' '
INPUT_END
git fast-import --export-marks=marks.out <input &&
- git whatchanged main
+ git log --raw main
'
test_expect_success 'A: verify pack' '
@@ -279,7 +279,7 @@ test_expect_success 'A: verify marks import does not crash' '
INPUT_END
git fast-import --import-marks=marks.out <input &&
- git whatchanged verify--import-marks
+ git log --raw verify--import-marks
'
test_expect_success 'A: verify pack' '
@@ -652,7 +652,7 @@ test_expect_success 'C: incremental import create pack from stdin' '
INPUT_END
git fast-import <input &&
- git whatchanged branch
+ git log --raw branch
'
test_expect_success 'C: verify pack' '
@@ -715,7 +715,7 @@ test_expect_success 'D: inline data in commit' '
INPUT_END
git fast-import <input &&
- git whatchanged branch
+ git log --raw branch
'
test_expect_success 'D: verify pack' '
@@ -882,7 +882,7 @@ test_expect_success 'H: deletall, add 1' '
INPUT_END
git fast-import <input &&
- git whatchanged H
+ git log --raw H
'
test_expect_success 'H: verify pack' '
@@ -2066,7 +2066,7 @@ test_expect_success 'Q: commit notes' '
INPUT_END
git fast-import <input &&
- git whatchanged notes-test
+ git log --raw notes-test
'
test_expect_success 'Q: verify pack' '
diff --git a/t/t9301-fast-import-notes.sh b/t/t9301-fast-import-notes.sh
index 1ae4d7c0d3..e62173cf1f 100755
--- a/t/t9301-fast-import-notes.sh
+++ b/t/t9301-fast-import-notes.sh
@@ -76,7 +76,7 @@ INPUT_END
test_expect_success 'set up main branch' '
git fast-import <input &&
- git whatchanged main
+ git log --raw main
'
commit4=$(git rev-parse refs/heads/main)
diff --git a/t/t9350-fast-export.sh b/t/t9350-fast-export.sh
index 76619765fc..46700dbc40 100755
--- a/t/t9350-fast-export.sh
+++ b/t/t9350-fast-export.sh
@@ -314,7 +314,7 @@ test_expect_success GPG 'signed-commits=abort' '
test_expect_success GPG 'signed-commits=verbatim' '
git fast-export --signed-commits=verbatim --reencode=no commit-signing >output &&
- grep "^gpgsig sha" output &&
+ test_grep -E "^gpgsig $GIT_DEFAULT_HASH openpgp" output &&
grep "encoding ISO-8859-1" output &&
(
cd new &&
@@ -328,7 +328,7 @@ test_expect_success GPG 'signed-commits=verbatim' '
test_expect_success GPG 'signed-commits=warn-verbatim' '
git fast-export --signed-commits=warn-verbatim --reencode=no commit-signing >output 2>err &&
- grep "^gpgsig sha" output &&
+ test_grep -E "^gpgsig $GIT_DEFAULT_HASH openpgp" output &&
grep "encoding ISO-8859-1" output &&
test -s err &&
(
@@ -369,6 +369,62 @@ test_expect_success GPG 'signed-commits=warn-strip' '
'
+test_expect_success GPGSM 'setup X.509 signed commit' '
+
+ git checkout -b x509-signing main &&
+ test_config gpg.format x509 &&
+ test_config user.signingkey $GIT_COMMITTER_EMAIL &&
+ echo "X.509 content" >file &&
+ git add file &&
+ git commit -S -m "X.509 signed commit" &&
+ X509_COMMIT=$(git rev-parse HEAD) &&
+ git checkout main
+
+'
+
+test_expect_success GPGSM 'round-trip X.509 signed commit' '
+
+ git fast-export --signed-commits=verbatim x509-signing >output &&
+ test_grep -E "^gpgsig $GIT_DEFAULT_HASH x509" output &&
+ (
+ cd new &&
+ git fast-import &&
+ git cat-file commit refs/heads/x509-signing >actual &&
+ grep "^gpgsig" actual &&
+ IMPORTED=$(git rev-parse refs/heads/x509-signing) &&
+ test $X509_COMMIT = $IMPORTED
+ ) <output
+
+'
+
+test_expect_success GPGSSH 'setup SSH signed commit' '
+
+ git checkout -b ssh-signing main &&
+ test_config gpg.format ssh &&
+ test_config user.signingkey "${GPGSSH_KEY_PRIMARY}" &&
+ echo "SSH content" >file &&
+ git add file &&
+ git commit -S -m "SSH signed commit" &&
+ SSH_COMMIT=$(git rev-parse HEAD) &&
+ git checkout main
+
+'
+
+test_expect_success GPGSSH 'round-trip SSH signed commit' '
+
+ git fast-export --signed-commits=verbatim ssh-signing >output &&
+ test_grep -E "^gpgsig $GIT_DEFAULT_HASH ssh" output &&
+ (
+ cd new &&
+ git fast-import &&
+ git cat-file commit refs/heads/ssh-signing >actual &&
+ grep "^gpgsig" actual &&
+ IMPORTED=$(git rev-parse refs/heads/ssh-signing) &&
+ test $SSH_COMMIT = $IMPORTED
+ ) <output
+
+'
+
test_expect_success 'setup submodule' '
test_config_global protocol.file.allow always &&
@@ -905,4 +961,46 @@ test_expect_success 'fast-export handles --end-of-options' '
test_cmp expect actual
'
+test_expect_success GPG 'setup a commit with dual signatures on its SHA-1 and SHA-256 formats' '
+ # Create a signed SHA-256 commit
+ git init --object-format=sha256 explicit-sha256 &&
+ git -C explicit-sha256 config extensions.compatObjectFormat sha1 &&
+ git -C explicit-sha256 checkout -b dual-signed &&
+ test_commit -C explicit-sha256 A &&
+ echo B >explicit-sha256/B &&
+ git -C explicit-sha256 add B &&
+ test_tick &&
+ git -C explicit-sha256 commit -S -m "signed" B &&
+ SHA256_B=$(git -C explicit-sha256 rev-parse dual-signed) &&
+
+ # Create the corresponding SHA-1 commit
+ SHA1_B=$(git -C explicit-sha256 rev-parse --output-object-format=sha1 dual-signed) &&
+
+ # Check that the resulting SHA-1 commit has both signatures
+ echo $SHA1_B | git -C explicit-sha256 cat-file --batch >out &&
+ test_grep -E "^gpgsig " out &&
+ test_grep -E "^gpgsig-sha256 " out
+'
+
+test_expect_success GPG 'export and import of doubly signed commit' '
+ git -C explicit-sha256 fast-export --signed-commits=verbatim dual-signed >output &&
+ test_grep -E "^gpgsig sha1 openpgp" output &&
+ test_grep -E "^gpgsig sha256 openpgp" output &&
+
+ (
+ cd new &&
+ git fast-import &&
+ git cat-file commit refs/heads/dual-signed >actual &&
+ test_grep -E "^gpgsig " actual &&
+ test_grep -E "^gpgsig-sha256 " actual &&
+ IMPORTED=$(git rev-parse refs/heads/dual-signed) &&
+ if test "$GIT_DEFAULT_HASH" = "sha1"
+ then
+ test $SHA1_B = $IMPORTED
+ else
+ test $SHA256_B = $IMPORTED
+ fi
+ ) <output
+'
+
test_done
diff --git a/t/t9500-gitweb-standalone-no-errors.sh b/t/t9500-gitweb-standalone-no-errors.sh
index 7679780fb8..578d6c8b32 100755
--- a/t/t9500-gitweb-standalone-no-errors.sh
+++ b/t/t9500-gitweb-standalone-no-errors.sh
@@ -700,19 +700,17 @@ test_expect_success \
# ----------------------------------------------------------------------
# syntax highlighting
+test_lazy_prereq HIGHLIGHT '
+ highlight_version=$(highlight --version </dev/null 2>/dev/null) &&
+ test -n "$highlight_version"
+'
-highlight_version=$(highlight --version </dev/null 2>/dev/null)
-if [ $? -eq 127 ]; then
- say "Skipping syntax highlighting tests: 'highlight' not found"
-elif test -z "$highlight_version"; then
- say "Skipping syntax highlighting tests: incorrect 'highlight' found"
-else
- test_set_prereq HIGHLIGHT
+test_expect_success HIGHLIGHT '
cat >>gitweb_config.perl <<-\EOF
our $highlight_bin = "highlight";
- $feature{'highlight'}{'override'} = 1;
+ $feature{"highlight"}{"override"} = 1;
EOF
-fi
+'
test_expect_success HIGHLIGHT \
'syntax highlighting (no highlight, unknown syntax)' \
diff --git a/t/t9822-git-p4-path-encoding.sh b/t/t9822-git-p4-path-encoding.sh
index 572d395498..e6e07facd4 100755
--- a/t/t9822-git-p4-path-encoding.sh
+++ b/t/t9822-git-p4-path-encoding.sh
@@ -7,12 +7,17 @@ test_description='Clone repositories with non ASCII paths'
UTF8_ESCAPED="a-\303\244_o-\303\266_u-\303\274.txt"
ISO8859_ESCAPED="a-\344_o-\366_u-\374.txt"
-ISO8859="$(printf "$ISO8859_ESCAPED")" &&
-echo content123 >"$ISO8859" &&
-rm "$ISO8859" || {
+test_lazy_prereq FS_ACCEPTS_ISO_8859_1 '
+ ISO8859="$(printf "$ISO8859_ESCAPED")" &&
+ echo content123 >"$ISO8859" &&
+ rm "$ISO8859"
+'
+
+if ! test_have_prereq FS_ACCEPTS_ISO_8859_1
+then
skip_all="fs does not accept ISO-8859-1 filenames"
test_done
-}
+fi
test_expect_success 'start p4d' '
start_p4d
diff --git a/t/t9835-git-p4-metadata-encoding-python2.sh b/t/t9835-git-p4-metadata-encoding-python2.sh
index 6116f806f6..b969c7e0d5 100755
--- a/t/t9835-git-p4-metadata-encoding-python2.sh
+++ b/t/t9835-git-p4-metadata-encoding-python2.sh
@@ -12,23 +12,25 @@ failing, and produces maximally sane output in git.'
## SECTION REPEATED IN t9836 ##
###############################
+EXTRA_PATH="$(pwd)/temp_python"
+mkdir "$EXTRA_PATH"
+PATH="$EXTRA_PATH:$PATH"
+export PATH
+
# These tests are specific to Python 2. Write a custom script that executes
# git-p4 directly with the Python 2 interpreter to ensure that we use that
# version even if Git was compiled with Python 3.
-python_target_binary=$(which python2)
-if test -n "$python_target_binary"
-then
- mkdir temp_python
- PATH="$(pwd)/temp_python:$PATH"
- export PATH
-
- write_script temp_python/git-p4-python2 <<-EOF
+test_lazy_prereq P4_PYTHON2 '
+ python_target_binary=$(which python2) &&
+ test -n "$python_target_binary" &&
+ write_script "$EXTRA_PATH"/git-p4-python2 <<-EOF &&
exec "$python_target_binary" "$(git --exec-path)/git-p4" "\$@"
EOF
-fi
+ ( git p4-python2 || true ) >err &&
+ test_grep "valid commands" err
+'
-git p4-python2 >err
-if ! grep 'valid commands' err
+if ! test_have_prereq P4_PYTHON2
then
skip_all="skipping python2 git p4 tests; python2 not available"
test_done
diff --git a/t/t9836-git-p4-metadata-encoding-python3.sh b/t/t9836-git-p4-metadata-encoding-python3.sh
index 5e5217a66b..da6669bf71 100755
--- a/t/t9836-git-p4-metadata-encoding-python3.sh
+++ b/t/t9836-git-p4-metadata-encoding-python3.sh
@@ -12,23 +12,25 @@ failing, and produces maximally sane output in git.'
## SECTION REPEATED IN t9835 ##
###############################
+EXTRA_PATH="$(pwd)/temp_python"
+mkdir "$EXTRA_PATH"
+PATH="$EXTRA_PATH:$PATH"
+export PATH
+
# These tests are specific to Python 3. Write a custom script that executes
# git-p4 directly with the Python 3 interpreter to ensure that we use that
# version even if Git was compiled with Python 2.
-python_target_binary=$(which python3)
-if test -n "$python_target_binary"
-then
- mkdir temp_python
- PATH="$(pwd)/temp_python:$PATH"
- export PATH
-
- write_script temp_python/git-p4-python3 <<-EOF
+test_lazy_prereq P4_PYTHON3 '
+ python_target_binary=$(which python3) &&
+ test -n "$python_target_binary" &&
+ write_script "$EXTRA_PATH"/git-p4-python3 <<-EOF &&
exec "$python_target_binary" "$(git --exec-path)/git-p4" "\$@"
EOF
-fi
+ ( git p4-python3 || true ) >err &&
+ test_grep "valid commands" err
+'
-git p4-python3 >err
-if ! grep 'valid commands' err
+if ! test_have_prereq P4_PYTHON3
then
skip_all="skipping python3 git p4 tests; python3 not available"
test_done
diff --git a/t/t9903-bash-prompt.sh b/t/t9903-bash-prompt.sh
index d667dda654..637a6f13a6 100755
--- a/t/t9903-bash-prompt.sh
+++ b/t/t9903-bash-prompt.sh
@@ -66,10 +66,6 @@ test_expect_success 'prompt - unborn branch' '
test_cmp expected "$actual"
'
-if test_have_prereq !FUNNYNAMES; then
- say 'Your filesystem does not allow newlines in filenames.'
-fi
-
test_expect_success FUNNYNAMES 'prompt - with newline in path' '
repo_with_newline="repo
with
diff --git a/t/test-lib-functions.sh b/t/test-lib-functions.sh
index bee4a2ca34..a28de7b19b 100644
--- a/t/test-lib-functions.sh
+++ b/t/test-lib-functions.sh
@@ -1451,9 +1451,21 @@ test_cmp_fspath () {
# test_seq 1 5 -- outputs 1 2 3 4 5 one line at a time
#
# or with one argument (end), in which case it starts counting
-# from 1.
+# from 1. In addition to the start/end arguments, you can pass an optional
+# printf format. For example:
+#
+# test_seq -f "line %d" 1 5
+#
+# would print 5 lines, "line 1" through "line 5".
test_seq () {
+ local fmt="%d"
+ case "$1" in
+ -f)
+ fmt="$2"
+ shift 2
+ ;;
+ esac
case $# in
1) set 1 "$@" ;;
2) ;;
@@ -1462,7 +1474,7 @@ test_seq () {
test_seq_counter__=$1
while test "$test_seq_counter__" -le "$2"
do
- echo "$test_seq_counter__"
+ printf "$fmt\n" "$test_seq_counter__"
test_seq_counter__=$(( $test_seq_counter__ + 1 ))
done
}
@@ -1695,7 +1707,7 @@ test_set_hash () {
# Detect the hash algorithm in use.
test_detect_hash () {
- case "$GIT_TEST_DEFAULT_HASH" in
+ case "${GIT_TEST_DEFAULT_HASH:-$GIT_TEST_BUILTIN_HASH}" in
"sha256")
test_hash_algo=sha256
test_compat_hash_algo=sha1
@@ -1767,6 +1779,9 @@ test_oid () {
--hash=compat)
algo="$test_compat_hash_algo" &&
shift;;
+ --hash=builtin)
+ algo="$GIT_TEST_BUILTIN_HASH" &&
+ shift;;
--hash=*)
algo="${1#--hash=}" &&
shift;;
diff --git a/t/test-lib.sh b/t/test-lib.sh
index 92d0db13d7..621cd31ae1 100644
--- a/t/test-lib.sh
+++ b/t/test-lib.sh
@@ -134,7 +134,8 @@ export GIT_TEST_DEFAULT_INITIAL_BRANCH_NAME
################################################################
# It appears that people try to run tests without building...
-"${GIT_TEST_INSTALLED:-$GIT_BUILD_DIR}/git$X" >/dev/null
+GIT_BINARY="${GIT_TEST_INSTALLED:-$GIT_BUILD_DIR}/git$X"
+"$GIT_BINARY" >/dev/null
if test $? != 1
then
if test -n "$GIT_TEST_INSTALLED"
@@ -470,7 +471,7 @@ then
then
: Executed by a Bash version supporting BASH_XTRACEFD. Good.
else
- echo >&2 "warning: ignoring -x; '$0' is untraceable without BASH_XTRACEFD"
+ echo >&2 "# warning: ignoring -x; '$0' is untraceable without BASH_XTRACEFD"
trace=
fi
fi
@@ -536,7 +537,8 @@ export GIT_COMMITTER_EMAIL GIT_COMMITTER_NAME
export GIT_COMMITTER_DATE GIT_AUTHOR_DATE
export EDITOR
-GIT_DEFAULT_HASH="${GIT_TEST_DEFAULT_HASH:-sha1}"
+GIT_TEST_BUILTIN_HASH=$("$GIT_BINARY" version --build-options | sed -ne 's/^default-hash: //p')
+GIT_DEFAULT_HASH="${GIT_TEST_DEFAULT_HASH:-$GIT_TEST_BUILTIN_HASH}"
export GIT_DEFAULT_HASH
GIT_DEFAULT_REF_FORMAT="${GIT_TEST_DEFAULT_REF_FORMAT:-files}"
export GIT_DEFAULT_REF_FORMAT
@@ -707,7 +709,7 @@ then
exec 3>>"$GIT_TEST_TEE_OUTPUT_FILE" 4>&3
elif test "$verbose" = "t"
then
- exec 4>&2 3>&1
+ exec 4>&2 3>&2
else
exec 4>/dev/null 3>/dev/null
fi
@@ -949,7 +951,7 @@ maybe_setup_verbose () {
test -z "$verbose_only" && return
if match_pattern_list $test_count "$verbose_only"
then
- exec 4>&2 3>&1
+ exec 4>&2 3>&2
# Emit a delimiting blank line when going from
# non-verbose to verbose. Within verbose mode the
# delimiter is printed by test_expect_*. The choice
@@ -1272,7 +1274,14 @@ test_done () {
check_test_results_san_file_ "$test_failure"
- if test -z "$skip_all" && test -n "$invert_exit_code"
+ if test "$test_fixed" != 0
+ then
+ if test -z "$invert_exit_code"
+ then
+ GIT_EXIT_OK=t
+ exit 1
+ fi
+ elif test -z "$skip_all" && test -n "$invert_exit_code"
then
say_color warn "# faking up non-zero exit with --invert-exit-code"
GIT_EXIT_OK=t
@@ -1638,6 +1647,12 @@ fi
# Fix some commands on Windows, and other OS-specific things
uname_s=$(uname -s)
case $uname_s in
+Darwin)
+ test_set_prereq MACOS
+ test_set_prereq POSIXPERM
+ test_set_prereq BSLASHPSPEC
+ test_set_prereq EXECKEEPSPID
+ ;;
*MINGW*)
# Windows has its own (incompatible) sort and find
sort () {
@@ -1895,6 +1910,10 @@ test_lazy_prereq SHA1 '
esac
'
+test_lazy_prereq DEFAULT_HASH_ALGORITHM '
+ test "$GIT_TEST_BUILTIN_HASH" = "$GIT_DEFAULT_HASH"
+'
+
test_lazy_prereq DEFAULT_REPO_FORMAT '
test_have_prereq SHA1,REFFILES
'
diff --git a/t/unit-tests/u-prio-queue.c b/t/unit-tests/u-prio-queue.c
index 145e689c9c..63e58114ae 100644
--- a/t/unit-tests/u-prio-queue.c
+++ b/t/unit-tests/u-prio-queue.c
@@ -13,6 +13,7 @@ static int intcmp(const void *va, const void *vb, void *data UNUSED)
#define STACK -3
#define GET -4
#define REVERSE -5
+#define REPLACE -6
static int show(int *v)
{
@@ -51,6 +52,15 @@ static void test_prio_queue(int *input, size_t input_size,
case REVERSE:
prio_queue_reverse(&pq);
break;
+ case REPLACE:
+ peek = prio_queue_peek(&pq);
+ cl_assert(i + 1 < input_size);
+ cl_assert(input[i + 1] >= 0);
+ cl_assert(j < result_size);
+ cl_assert_equal_i(result[j], show(peek));
+ j++;
+ prio_queue_replace(&pq, &input[++i]);
+ break;
default:
prio_queue_put(&pq, &input[i]);
break;
@@ -81,6 +91,13 @@ void test_prio_queue__empty(void)
((int []){ 1, 2, MISSING, 1, 2, MISSING }));
}
+void test_prio_queue__replace(void)
+{
+ TEST_INPUT(((int []){ REPLACE, 6, 2, 4, REPLACE, 5, 7, GET,
+ REPLACE, 1, DUMP }),
+ ((int []){ MISSING, 2, 4, 5, 1, 6, 7 }));
+}
+
void test_prio_queue__stack(void)
{
TEST_INPUT(((int []){ STACK, 8, 1, 5, 4, 6, 2, 3, DUMP }),
@@ -92,3 +109,9 @@ void test_prio_queue__reverse_stack(void)
TEST_INPUT(((int []){ STACK, 1, 2, 3, 4, 5, 6, REVERSE, DUMP }),
((int []){ 1, 2, 3, 4, 5, 6 }));
}
+
+void test_prio_queue__replace_stack(void)
+{
+ TEST_INPUT(((int []){ STACK, 8, 1, 5, REPLACE, 4, 6, 2, 3, DUMP }),
+ ((int []){ 5, 3, 2, 6, 4, 1, 8 }));
+}
diff --git a/t/unit-tests/u-string-list.c b/t/unit-tests/u-string-list.c
new file mode 100644
index 0000000000..d4ba5f9fa5
--- /dev/null
+++ b/t/unit-tests/u-string-list.c
@@ -0,0 +1,227 @@
+#include "unit-test.h"
+#include "string-list.h"
+
+static void t_vcreate_string_list_dup(struct string_list *list,
+ int free_util, va_list ap)
+{
+ const char *arg;
+
+ cl_assert(list->strdup_strings);
+
+ string_list_clear(list, free_util);
+ while ((arg = va_arg(ap, const char *)))
+ string_list_append(list, arg);
+}
+
+static void t_create_string_list_dup(struct string_list *list, int free_util, ...)
+{
+ va_list ap;
+
+ cl_assert(list->strdup_strings);
+
+ string_list_clear(list, free_util);
+ va_start(ap, free_util);
+ t_vcreate_string_list_dup(list, free_util, ap);
+ va_end(ap);
+}
+
+static void t_string_list_clear(struct string_list *list, int free_util)
+{
+ string_list_clear(list, free_util);
+ cl_assert_equal_p(list->items, NULL);
+ cl_assert_equal_i(list->nr, 0);
+ cl_assert_equal_i(list->alloc, 0);
+}
+
+static void t_string_list_equal(struct string_list *list,
+ struct string_list *expected_strings)
+{
+ cl_assert_equal_i(list->nr, expected_strings->nr);
+ cl_assert(list->nr <= list->alloc);
+ for (size_t i = 0; i < expected_strings->nr; i++)
+ cl_assert_equal_s(list->items[i].string,
+ expected_strings->items[i].string);
+}
+
+static void t_string_list_split(const char *data, int delim, int maxsplit, ...)
+{
+ struct string_list expected_strings = STRING_LIST_INIT_DUP;
+ struct string_list list = STRING_LIST_INIT_DUP;
+ va_list ap;
+ int len;
+
+ va_start(ap, maxsplit);
+ t_vcreate_string_list_dup(&expected_strings, 0, ap);
+ va_end(ap);
+
+ string_list_clear(&list, 0);
+ len = string_list_split(&list, data, delim, maxsplit);
+ cl_assert_equal_i(len, expected_strings.nr);
+ t_string_list_equal(&list, &expected_strings);
+
+ string_list_clear(&expected_strings, 0);
+ string_list_clear(&list, 0);
+}
+
+void test_string_list__split(void)
+{
+ t_string_list_split("foo:bar:baz", ':', -1, "foo", "bar", "baz", NULL);
+ t_string_list_split("foo:bar:baz", ':', 0, "foo:bar:baz", NULL);
+ t_string_list_split("foo:bar:baz", ':', 1, "foo", "bar:baz", NULL);
+ t_string_list_split("foo:bar:baz", ':', 2, "foo", "bar", "baz", NULL);
+ t_string_list_split("foo:bar:", ':', -1, "foo", "bar", "", NULL);
+ t_string_list_split("", ':', -1, "", NULL);
+ t_string_list_split(":", ':', -1, "", "", NULL);
+}
+
+static void t_string_list_split_in_place(const char *data, const char *delim,
+ int maxsplit, ...)
+{
+ struct string_list expected_strings = STRING_LIST_INIT_DUP;
+ struct string_list list = STRING_LIST_INIT_NODUP;
+ char *string = xstrdup(data);
+ va_list ap;
+ int len;
+
+ va_start(ap, maxsplit);
+ t_vcreate_string_list_dup(&expected_strings, 0, ap);
+ va_end(ap);
+
+ string_list_clear(&list, 0);
+ len = string_list_split_in_place(&list, string, delim, maxsplit);
+ cl_assert_equal_i(len, expected_strings.nr);
+ t_string_list_equal(&list, &expected_strings);
+
+ free(string);
+ string_list_clear(&expected_strings, 0);
+ string_list_clear(&list, 0);
+}
+
+void test_string_list__split_in_place(void)
+{
+ t_string_list_split_in_place("foo:;:bar:;:baz:;:", ":;", -1,
+ "foo", "", "", "bar", "", "", "baz", "", "", "", NULL);
+ t_string_list_split_in_place("foo:;:bar:;:baz", ":;", 0,
+ "foo:;:bar:;:baz", NULL);
+ t_string_list_split_in_place("foo:;:bar:;:baz", ":;", 1,
+ "foo", ";:bar:;:baz", NULL);
+ t_string_list_split_in_place("foo:;:bar:;:baz", ":;", 2,
+ "foo", "", ":bar:;:baz", NULL);
+ t_string_list_split_in_place("foo:;:bar:;:", ":;", -1,
+ "foo", "", "", "bar", "", "", "", NULL);
+}
+
+static int prefix_cb(struct string_list_item *item, void *cb_data)
+{
+ const char *prefix = (const char *)cb_data;
+ return starts_with(item->string, prefix);
+}
+
+static void t_string_list_filter(struct string_list *list, ...)
+{
+ struct string_list expected_strings = STRING_LIST_INIT_DUP;
+ const char *prefix = "y";
+ va_list ap;
+
+ va_start(ap, list);
+ t_vcreate_string_list_dup(&expected_strings, 0, ap);
+ va_end(ap);
+
+ filter_string_list(list, 0, prefix_cb, (void *)prefix);
+ t_string_list_equal(list, &expected_strings);
+
+ string_list_clear(&expected_strings, 0);
+}
+
+void test_string_list__filter(void)
+{
+ struct string_list list = STRING_LIST_INIT_DUP;
+
+ t_create_string_list_dup(&list, 0, NULL);
+ t_string_list_filter(&list, NULL);
+
+ t_create_string_list_dup(&list, 0, "no", NULL);
+ t_string_list_filter(&list, NULL);
+
+ t_create_string_list_dup(&list, 0, "yes", NULL);
+ t_string_list_filter(&list, "yes", NULL);
+
+ t_create_string_list_dup(&list, 0, "no", "yes", NULL);
+ t_string_list_filter(&list, "yes", NULL);
+
+ t_create_string_list_dup(&list, 0, "yes", "no", NULL);
+ t_string_list_filter(&list, "yes", NULL);
+
+ t_create_string_list_dup(&list, 0, "y1", "y2", NULL);
+ t_string_list_filter(&list, "y1", "y2", NULL);
+
+ t_create_string_list_dup(&list, 0, "y2", "y1", NULL);
+ t_string_list_filter(&list, "y2", "y1", NULL);
+
+ t_create_string_list_dup(&list, 0, "x1", "x2", NULL);
+ t_string_list_filter(&list, NULL);
+
+ t_string_list_clear(&list, 0);
+}
+
+static void t_string_list_remove_duplicates(struct string_list *list, ...)
+{
+ struct string_list expected_strings = STRING_LIST_INIT_DUP;
+ va_list ap;
+
+ va_start(ap, list);
+ t_vcreate_string_list_dup(&expected_strings, 0, ap);
+ va_end(ap);
+
+ string_list_remove_duplicates(list, 0);
+ t_string_list_equal(list, &expected_strings);
+
+ string_list_clear(&expected_strings, 0);
+}
+
+void test_string_list__remove_duplicates(void)
+{
+ struct string_list list = STRING_LIST_INIT_DUP;
+
+ t_create_string_list_dup(&list, 0, NULL);
+ t_string_list_remove_duplicates(&list, NULL);
+
+ t_create_string_list_dup(&list, 0, "", NULL);
+ t_string_list_remove_duplicates(&list, "", NULL);
+
+ t_create_string_list_dup(&list, 0, "a", NULL);
+ t_string_list_remove_duplicates(&list, "a", NULL);
+
+ t_create_string_list_dup(&list, 0, "a", "a", NULL);
+ t_string_list_remove_duplicates(&list, "a", NULL);
+
+ t_create_string_list_dup(&list, 0, "a", "a", "a", NULL);
+ t_string_list_remove_duplicates(&list, "a", NULL);
+
+ t_create_string_list_dup(&list, 0, "a", "a", "b", NULL);
+ t_string_list_remove_duplicates(&list, "a", "b", NULL);
+
+ t_create_string_list_dup(&list, 0, "a", "b", "b", NULL);
+ t_string_list_remove_duplicates(&list, "a", "b", NULL);
+
+ t_create_string_list_dup(&list, 0, "a", "b", "c", NULL);
+ t_string_list_remove_duplicates(&list, "a", "b", "c", NULL);
+
+ t_create_string_list_dup(&list, 0, "a", "a", "b", "c", NULL);
+ t_string_list_remove_duplicates(&list, "a", "b", "c", NULL);
+
+ t_create_string_list_dup(&list, 0, "a", "b", "b", "c", NULL);
+ t_string_list_remove_duplicates(&list, "a", "b", "c", NULL);
+
+ t_create_string_list_dup(&list, 0, "a", "b", "c", "c", NULL);
+ t_string_list_remove_duplicates(&list, "a", "b", "c", NULL);
+
+ t_create_string_list_dup(&list, 0, "a", "a", "b", "b", "c", "c", NULL);
+ t_string_list_remove_duplicates(&list, "a", "b", "c", NULL);
+
+ t_create_string_list_dup(&list, 0, "a", "a", "a", "b", "b", "b",
+ "c", "c", "c", NULL);
+ t_string_list_remove_duplicates(&list, "a", "b", "c", NULL);
+
+ t_string_list_clear(&list, 0);
+}
diff --git a/tag.c b/tag.c
index 05be39067c..1d52686ee1 100644
--- a/tag.c
+++ b/tag.c
@@ -5,7 +5,7 @@
#include "environment.h"
#include "tag.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "commit.h"
#include "tree.h"
#include "blob.h"
@@ -52,7 +52,7 @@ int gpg_verify_tag(const struct object_id *oid, const char *name_to_report,
unsigned long size;
int ret;
- type = oid_object_info(the_repository, oid, NULL);
+ type = odb_read_object_info(the_repository->objects, oid, NULL);
if (type != OBJ_TAG)
return error("%s: cannot verify a non-tag object of type %s.",
name_to_report ?
@@ -60,7 +60,7 @@ int gpg_verify_tag(const struct object_id *oid, const char *name_to_report,
repo_find_unique_abbrev(the_repository, oid, DEFAULT_ABBREV),
type_name(type));
- buf = repo_read_object_file(the_repository, oid, &type, &size);
+ buf = odb_read_object(the_repository->objects, oid, &type, &size);
if (!buf)
return error("%s: unable to read file.",
name_to_report ?
@@ -222,8 +222,8 @@ int parse_tag(struct tag *item)
if (item->object.parsed)
return 0;
- data = repo_read_object_file(the_repository, &item->object.oid, &type,
- &size);
+ data = odb_read_object(the_repository->objects, &item->object.oid,
+ &type, &size);
if (!data)
return error("Could not read %s",
oid_to_hex(&item->object.oid));
diff --git a/tmp-objdir.c b/tmp-objdir.c
index c38fbeb5e8..ae01eae9c4 100644
--- a/tmp-objdir.c
+++ b/tmp-objdir.c
@@ -10,14 +10,14 @@
#include "strbuf.h"
#include "strvec.h"
#include "quote.h"
-#include "object-store.h"
+#include "odb.h"
#include "repository.h"
struct tmp_objdir {
struct repository *repo;
struct strbuf path;
struct strvec env;
- struct object_directory *prev_odb;
+ struct odb_source *prev_source;
int will_destroy;
};
@@ -46,8 +46,8 @@ int tmp_objdir_destroy(struct tmp_objdir *t)
if (t == the_tmp_objdir)
the_tmp_objdir = NULL;
- if (t->prev_odb)
- restore_primary_odb(t->prev_odb, t->path.buf);
+ if (t->prev_source)
+ odb_restore_primary_source(t->repo->objects, t->prev_source, t->path.buf);
err = remove_dir_recursively(&t->path, 0);
@@ -276,11 +276,11 @@ int tmp_objdir_migrate(struct tmp_objdir *t)
if (!t)
return 0;
- if (t->prev_odb) {
- if (t->repo->objects->odb->will_destroy)
+ if (t->prev_source) {
+ if (t->repo->objects->sources->will_destroy)
BUG("migrating an ODB that was marked for destruction");
- restore_primary_odb(t->prev_odb, t->path.buf);
- t->prev_odb = NULL;
+ odb_restore_primary_source(t->repo->objects, t->prev_source, t->path.buf);
+ t->prev_source = NULL;
}
strbuf_addbuf(&src, &t->path);
@@ -304,24 +304,26 @@ const char **tmp_objdir_env(const struct tmp_objdir *t)
void tmp_objdir_add_as_alternate(const struct tmp_objdir *t)
{
- add_to_alternates_memory(t->path.buf);
+ odb_add_to_alternates_memory(t->repo->objects, t->path.buf);
}
void tmp_objdir_replace_primary_odb(struct tmp_objdir *t, int will_destroy)
{
- if (t->prev_odb)
+ if (t->prev_source)
BUG("the primary object database is already replaced");
- t->prev_odb = set_temporary_primary_odb(t->path.buf, will_destroy);
+ t->prev_source = odb_set_temporary_primary_source(t->repo->objects,
+ t->path.buf, will_destroy);
t->will_destroy = will_destroy;
}
struct tmp_objdir *tmp_objdir_unapply_primary_odb(void)
{
- if (!the_tmp_objdir || !the_tmp_objdir->prev_odb)
+ if (!the_tmp_objdir || !the_tmp_objdir->prev_source)
return NULL;
- restore_primary_odb(the_tmp_objdir->prev_odb, the_tmp_objdir->path.buf);
- the_tmp_objdir->prev_odb = NULL;
+ odb_restore_primary_source(the_tmp_objdir->repo->objects,
+ the_tmp_objdir->prev_source, the_tmp_objdir->path.buf);
+ the_tmp_objdir->prev_source = NULL;
return the_tmp_objdir;
}
diff --git a/transport.c b/transport.c
index 6c2801bcbd..c123ac1e38 100644
--- a/transport.c
+++ b/transport.c
@@ -1243,7 +1243,7 @@ struct transport *transport_get(struct remote *remote, const char *url)
ret->smart_options->receivepack = remote->receivepack;
}
- ret->hash_algo = &hash_algos[GIT_HASH_SHA1];
+ ret->hash_algo = &hash_algos[GIT_HASH_SHA1_LEGACY];
return ret;
}
diff --git a/tree-walk.c b/tree-walk.c
index 90655d5237..e449a1320e 100644
--- a/tree-walk.c
+++ b/tree-walk.c
@@ -6,7 +6,7 @@
#include "gettext.h"
#include "hex.h"
#include "object-file.h"
-#include "object-store.h"
+#include "odb.h"
#include "trace2.h"
#include "tree.h"
#include "pathspec.h"
@@ -90,7 +90,7 @@ void *fill_tree_descriptor(struct repository *r,
void *buf = NULL;
if (oid) {
- buf = read_object_with_reference(r, oid, OBJ_TREE, &size, NULL);
+ buf = odb_read_object_peeled(r->objects, oid, OBJ_TREE, &size, NULL);
if (!buf)
die(_("unable to read tree (%s)"), oid_to_hex(oid));
}
@@ -611,7 +611,7 @@ int get_tree_entry(struct repository *r,
unsigned long size;
struct object_id root;
- tree = read_object_with_reference(r, tree_oid, OBJ_TREE, &size, &root);
+ tree = odb_read_object_peeled(r->objects, tree_oid, OBJ_TREE, &size, &root);
if (!tree)
return -1;
@@ -681,10 +681,8 @@ enum get_oid_result get_tree_entry_follow_symlinks(struct repository *r,
void *tree;
struct object_id root;
unsigned long size;
- tree = read_object_with_reference(r,
- &current_tree_oid,
- OBJ_TREE, &size,
- &root);
+ tree = odb_read_object_peeled(r->objects, &current_tree_oid,
+ OBJ_TREE, &size, &root);
if (!tree)
goto done;
@@ -795,9 +793,9 @@ enum get_oid_result get_tree_entry_follow_symlinks(struct repository *r,
*/
retval = DANGLING_SYMLINK;
- contents = repo_read_object_file(r,
- &current_tree_oid, &type,
- &link_len);
+ contents = odb_read_object(r->objects,
+ &current_tree_oid, &type,
+ &link_len);
if (!contents)
goto done;
diff --git a/tree.c b/tree.c
index b85f56267f..1ef743d90f 100644
--- a/tree.c
+++ b/tree.c
@@ -4,7 +4,7 @@
#include "hex.h"
#include "tree.h"
#include "object-name.h"
-#include "object-store.h"
+#include "odb.h"
#include "commit.h"
#include "alloc.h"
#include "tree-walk.h"
@@ -193,8 +193,8 @@ int parse_tree_gently(struct tree *item, int quiet_on_missing)
if (item->object.parsed)
return 0;
- buffer = repo_read_object_file(the_repository, &item->object.oid,
- &type, &size);
+ buffer = odb_read_object(the_repository->objects, &item->object.oid,
+ &type, &size);
if (!buffer)
return quiet_on_missing ? -1 :
error("Could not read %s",
diff --git a/unpack-trees.c b/unpack-trees.c
index 471837f032..f38c761ab9 100644
--- a/unpack-trees.c
+++ b/unpack-trees.c
@@ -26,7 +26,7 @@
#include "symlinks.h"
#include "trace2.h"
#include "fsmonitor.h"
-#include "object-store.h"
+#include "odb.h"
#include "promisor-remote.h"
#include "entry.h"
#include "parallel-checkout.h"
diff --git a/upload-pack.c b/upload-pack.c
index 26f29b85b5..4f26f6afc7 100644
--- a/upload-pack.c
+++ b/upload-pack.c
@@ -10,7 +10,7 @@
#include "pkt-line.h"
#include "sideband.h"
#include "repository.h"
-#include "object-store.h"
+#include "odb.h"
#include "oid-array.h"
#include "object.h"
#include "commit.h"
@@ -509,7 +509,7 @@ static int got_oid(struct upload_pack_data *data,
{
if (get_oid_hex(hex, oid))
die("git upload-pack: expected SHA1 object, got '%s'", hex);
- if (!has_object(the_repository, oid, 0))
+ if (!odb_has_object(the_repository->objects, oid, 0))
return -1;
return do_got_oid(data, oid);
}
diff --git a/usage.c b/usage.c
index 38b46bbbfe..81913236a4 100644
--- a/usage.c
+++ b/usage.c
@@ -67,6 +67,8 @@ static NORETURN void usage_builtin(const char *err, va_list params)
static void die_message_builtin(const char *err, va_list params)
{
+ if (!err)
+ return;
trace2_cmd_error_va(err, params);
vreportf(_("fatal: "), err, params);
}
@@ -372,3 +374,15 @@ void bug_fl(const char *file, int line, const char *fmt, ...)
trace2_cmd_error_va(fmt, ap);
va_end(ap);
}
+
+NORETURN void you_still_use_that(const char *command_name)
+{
+ fprintf(stderr,
+ _("'%s' is nominated for removal.\n"
+ "If you still use this command, please add an extra\n"
+ "option, '--i-still-use-this', on the command line\n"
+ "and let us know you still use it by sending an e-mail\n"
+ "to <git@vger.kernel.org>. Thanks.\n"),
+ command_name);
+ die(_("refusing to run without --i-still-use-this"));
+}
diff --git a/userdiff.c b/userdiff.c
index 05776ccd10..fe710a68bf 100644
--- a/userdiff.c
+++ b/userdiff.c
@@ -327,6 +327,10 @@ PATTERNS("python",
"|[-+0-9.e]+[jJlL]?|0[xX]?[0-9a-fA-F]+[lL]?"
"|[-+*/<>%&^|=!]=|//=?|<<=?|>>=?|\\*\\*=?"),
/* -- */
+PATTERNS("r",
+ "^[ \t]*([a-zA-z][a-zA-Z0-9_.]*[ \t]*(<-|=)[ \t]*function.*)$",
+ /* -- */
+ "[^ \t]+"),
PATTERNS("ruby",
"^[ \t]*((class|module|def)[ \t].*)$",
/* -- */
diff --git a/walker.c b/walker.c
index b470d43e54..8073754517 100644
--- a/walker.c
+++ b/walker.c
@@ -5,7 +5,7 @@
#include "hex.h"
#include "walker.h"
#include "repository.h"
-#include "object-store.h"
+#include "odb.h"
#include "commit.h"
#include "strbuf.h"
#include "tree.h"
@@ -14,6 +14,7 @@
#include "blob.h"
#include "refs.h"
#include "progress.h"
+#include "prio-queue.h"
static struct object_id current_commit_oid;
@@ -78,7 +79,7 @@ static int process_tree(struct walker *walker, struct tree *tree)
#define SEEN (1U << 1)
#define TO_SCAN (1U << 2)
-static struct commit_list *complete = NULL;
+static struct prio_queue complete = { compare_commits_by_commit_date };
static int process_commit(struct walker *walker, struct commit *commit)
{
@@ -87,7 +88,10 @@ static int process_commit(struct walker *walker, struct commit *commit)
if (repo_parse_commit(the_repository, commit))
return -1;
- while (complete && complete->item->date >= commit->date) {
+ while (complete.nr) {
+ struct commit *item = prio_queue_peek(&complete);
+ if (item->date < commit->date)
+ break;
pop_most_recent_commit(&complete, COMPLETE);
}
@@ -150,8 +154,8 @@ static int process(struct walker *walker, struct object *obj)
return 0;
obj->flags |= SEEN;
- if (has_object(the_repository, &obj->oid,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR)) {
+ if (odb_has_object(the_repository->objects, &obj->oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR)) {
/* We already have it, so we should scan it now. */
obj->flags |= TO_SCAN;
}
@@ -233,7 +237,7 @@ static int mark_complete(const char *path UNUSED,
if (commit) {
commit->object.flags |= COMPLETE;
- commit_list_insert(commit, &complete);
+ prio_queue_put(&complete, commit);
}
return 0;
}
@@ -302,7 +306,6 @@ int walker_fetch(struct walker *walker, int targets, char **target,
if (!walker->get_recover) {
refs_for_each_ref(get_main_ref_store(the_repository),
mark_complete, NULL);
- commit_list_sort_by_date(&complete);
}
for (i = 0; i < targets; i++) {
diff --git a/xdiff-interface.c b/xdiff-interface.c
index 1edcd319e6..0e5d38c960 100644
--- a/xdiff-interface.c
+++ b/xdiff-interface.c
@@ -5,7 +5,7 @@
#include "gettext.h"
#include "config.h"
#include "hex.h"
-#include "object-store.h"
+#include "odb.h"
#include "strbuf.h"
#include "xdiff-interface.h"
#include "xdiff/xtypes.h"
@@ -187,7 +187,7 @@ void read_mmblob(mmfile_t *ptr, const struct object_id *oid)
return;
}
- ptr->ptr = repo_read_object_file(the_repository, oid, &type, &size);
+ ptr->ptr = odb_read_object(the_repository->objects, oid, &type, &size);
if (!ptr->ptr || type != OBJ_BLOB)
die("unable to read blob object %s", oid_to_hex(oid));
ptr->size = size;