summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.clang-format2
-rw-r--r--.github/workflows/main.yml4
-rw-r--r--.gitignore2
-rw-r--r--.gitlab-ci.yml4
-rw-r--r--.mailmap1
-rw-r--r--Cargo.toml9
-rw-r--r--Documentation/BreakingChanges.adoc47
-rw-r--r--Documentation/RelNotes/2.52.0.adoc41
-rw-r--r--Documentation/config/alias.adoc3
-rw-r--r--Documentation/git-fast-import.adoc5
-rw-r--r--Documentation/git-pack-refs.adoc53
-rw-r--r--Documentation/git-push.adoc199
-rw-r--r--Documentation/git-refs.adoc10
-rw-r--r--Documentation/git-whatchanged.adoc8
-rw-r--r--Documentation/git.adoc3
-rw-r--r--Documentation/gitcredentials.adoc15
-rw-r--r--Documentation/howto/meson.build4
-rw-r--r--Documentation/meson.build8
-rw-r--r--Documentation/pack-refs-options.adoc52
-rw-r--r--Documentation/technical/meson.build4
-rw-r--r--Makefile218
-rw-r--r--builtin/add.c7
-rw-r--r--builtin/backfill.c2
-rw-r--r--builtin/cat-file.c3
-rw-r--r--builtin/config.c20
-rw-r--r--builtin/count-objects.c3
-rw-r--r--builtin/fast-export.c19
-rw-r--r--builtin/fast-import.c73
-rw-r--r--builtin/fsck.c11
-rw-r--r--builtin/gc.c14
-rw-r--r--builtin/grep.c2
-rw-r--r--builtin/index-pack.c10
-rw-r--r--builtin/log.c8
-rw-r--r--builtin/pack-objects.c38
-rw-r--r--builtin/pack-redundant.c8
-rw-r--r--builtin/pack-refs.c54
-rw-r--r--builtin/receive-pack.c2
-rw-r--r--builtin/reflog.c2
-rw-r--r--builtin/refs.c17
-rw-r--r--builtin/repack.c11
-rw-r--r--builtin/unpack-objects.c5
-rw-r--r--builtin/update-index.c29
-rw-r--r--bulk-checkin.c403
-rw-r--r--bulk-checkin.h61
-rw-r--r--cache-tree.c5
-rwxr-xr-xci/install-dependencies.sh8
-rwxr-xr-xci/run-build-and-tests.sh31
-rwxr-xr-xci/test-documentation.sh4
-rw-r--r--connected.c5
-rw-r--r--contrib/contacts/meson.build4
-rw-r--r--contrib/subtree/meson.build4
-rw-r--r--dir.c18
-rw-r--r--fetch-pack.c4
-rw-r--r--git-compat-util.h2
-rw-r--r--git.c91
-rwxr-xr-xgitk-git/gitk283
-rw-r--r--gpg-interface.c17
-rw-r--r--gpg-interface.h15
-rw-r--r--help.c6
-rw-r--r--http-backend.c5
-rw-r--r--http-push.c3
-rw-r--r--http.c5
-rw-r--r--http.h12
-rw-r--r--imap-send.c2
-rw-r--r--meson.build27
-rw-r--r--meson_options.txt2
-rw-r--r--midx.c29
-rw-r--r--midx.h1
-rw-r--r--object-file.c404
-rw-r--r--object-file.h16
-rw-r--r--object-name.c6
-rw-r--r--odb.c50
-rw-r--r--odb.h82
-rw-r--r--pack-bitmap.c4
-rw-r--r--pack-objects.c4
-rw-r--r--pack-refs.c56
-rw-r--r--pack-refs.h23
-rw-r--r--packfile.c283
-rw-r--r--packfile.h125
-rw-r--r--read-cache.c11
-rw-r--r--refs.c5
-rw-r--r--refs.h6
-rw-r--r--refs/files-backend.c10
-rw-r--r--refs/ref-cache.c2
-rw-r--r--refs/refs-internal.h3
-rw-r--r--refs/reftable-backend.c7
-rw-r--r--remote-curl.c14
-rw-r--r--server-info.c3
-rw-r--r--shared.mak1
-rwxr-xr-xsrc/cargo-meson.sh32
-rw-r--r--src/lib.rs1
-rw-r--r--src/meson.build41
-rw-r--r--src/varint.rs92
-rw-r--r--t/helper/test-find-pack.c2
-rw-r--r--t/helper/test-pack-deltas.c10
-rw-r--r--t/helper/test-pack-mtimes.c2
-rw-r--r--t/meson.build2
-rw-r--r--t/pack-refs-tests.sh431
-rwxr-xr-xt/t0014-alias.sh57
-rwxr-xr-xt/t0300-credentials.sh19
-rwxr-xr-xt/t0601-reffiles-pack-refs.sh430
-rwxr-xr-xt/t1300-config.sh349
-rwxr-xr-xt/t1421-reflog-write.sh36
-rwxr-xr-xt/t1463-refs-optimize.sh17
-rwxr-xr-xt/t6302-for-each-ref-filter.sh65
-rwxr-xr-xt/t9305-fast-import-signatures.sh106
-rw-r--r--transport-helper.c2
-rw-r--r--usage.c33
-rw-r--r--varint.c6
-rw-r--r--varint.h4
110 files changed, 2952 insertions, 1977 deletions
diff --git a/.clang-format b/.clang-format
index dcfd0aad60..86b4fe33e5 100644
--- a/.clang-format
+++ b/.clang-format
@@ -149,7 +149,7 @@ SpaceBeforeCaseColon: false
# f();
# }
# }
-SpaceBeforeParens: ControlStatements
+SpaceBeforeParens: ControlStatementsExceptControlMacros
# Don't insert spaces inside empty '()'
SpaceInEmptyParentheses: false
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index d122e79415..393ea4d1cc 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -379,6 +379,8 @@ jobs:
- jobname: linux-breaking-changes
cc: gcc
image: ubuntu:rolling
+ - jobname: fedora-breaking-changes-meson
+ image: fedora:latest
- jobname: linux-leaks
image: ubuntu:rolling
cc: gcc
@@ -396,8 +398,6 @@ jobs:
# Supported until 2025-04-02.
- jobname: linux32
image: i386/ubuntu:focal
- - jobname: pedantic
- image: fedora:latest
# A RHEL 8 compatible distro. Supported until 2029-05-31.
- jobname: almalinux-8
image: almalinux:8
diff --git a/.gitignore b/.gitignore
index 802ce70e48..78a45cb5be 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,6 @@
/fuzz_corpora
+/target/
+/Cargo.lock
/GIT-BUILD-DIR
/GIT-BUILD-OPTIONS
/GIT-CFLAGS
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 85401b34a5..f7d57d1ee9 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -45,6 +45,8 @@ test:linux:
- jobname: linux-breaking-changes
image: ubuntu:20.04
CC: gcc
+ - jobname: fedora-breaking-changes-meson
+ image: fedora:latest
- jobname: linux-TEST-vars
image: ubuntu:20.04
CC: gcc
@@ -58,8 +60,6 @@ test:linux:
- jobname: linux-asan-ubsan
image: ubuntu:rolling
CC: clang
- - jobname: pedantic
- image: fedora:latest
- jobname: linux-musl-meson
image: alpine:latest
- jobname: linux32
diff --git a/.mailmap b/.mailmap
index afa21abbaa..7b3198171f 100644
--- a/.mailmap
+++ b/.mailmap
@@ -126,6 +126,7 @@ Jon Loeliger <jdl@jdl.com> <jdl@freescale.org>
Jon Seymour <jon.seymour@gmail.com> <jon@blackcubes.dyndns.org>
Jonathan Nieder <jrnieder@gmail.com> <jrnieder@uchicago.edu>
Jonathan del Strother <jon.delStrother@bestbefore.tv> <maillist@steelskies.com>
+Jonathan Tan <jonathantanmy@fastmail.com> <jonathantanmy@google.com>
Josh Triplett <josh@joshtriplett.org> <josh@freedesktop.org>
Josh Triplett <josh@joshtriplett.org> <josht@us.ibm.com>
Julian Phillips <julian@quantumfyre.co.uk> <jp3@quantumfyre.co.uk>
diff --git a/Cargo.toml b/Cargo.toml
new file mode 100644
index 0000000000..45c9b34981
--- /dev/null
+++ b/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "gitcore"
+version = "0.1.0"
+edition = "2018"
+
+[lib]
+crate-type = ["staticlib"]
+
+[dependencies]
diff --git a/Documentation/BreakingChanges.adoc b/Documentation/BreakingChanges.adoc
index 0cba20fadb..90b53abcea 100644
--- a/Documentation/BreakingChanges.adoc
+++ b/Documentation/BreakingChanges.adoc
@@ -171,6 +171,51 @@ JGit, libgit2 and Gitoxide need to support it.
matches the default branch name used in new repositories by many of the
big Git forges.
+* Git will require Rust as a mandatory part of the build process. While Git
+ already started to adopt Rust in Git 2.49, all parts written in Rust are
+ optional for the time being. This includes:
++
+ ** The Rust wrapper around libgit.a that is part of "contrib/" and which has
+ been introduced in Git 2.49.
+ ** Subsystems that have an alternative implementation in Rust to test
+ interoperability between our C and Rust codebase.
+ ** Newly written features that are not mission critical for a fully functional
+ Git client.
++
+These changes are meant as test balloons to allow distributors of Git to prepare
+for Rust becoming a mandatory part of the build process. There will be multiple
+milestones for the introduction of Rust:
++
+--
+1. Initially, with Git 2.52, support for Rust will be auto-detected by Meson and
+ disabled in our Makefile so that the project can sort out the initial
+ infrastructure.
+2. In Git 2.53, both build systems will default-enable support for Rust.
+ Consequently, builds will break by default if Rust is not available on the
+ build host. The use of Rust can still be explicitly disabled via build
+ flags.
+3. In Git 3.0, the build options will be removed and support for Rust is
+ mandatory.
+--
++
+You can explicitly ask both Meson and our Makefile-based system to enable Rust
+by saying `meson configure -Drust=enabled` and `make WITH_RUST=YesPlease`,
+respectively.
++
+The Git project will declare the last version before Git 3.0 to be a long-term
+support release. This long-term release will receive important bug fixes for at
+least four release cycles and security fixes for six release cycles. The Git
+project will hand over maintainership of the long-term release to distributors
+in case they need to extend the life of that long-term release even further.
+Details of how this long-term release will be handed over to the community will
+be discussed once the Git project decides to stop officially supporting it.
++
+We will evaluate the impact on downstream distributions before making Rust
+mandatory in Git 3.0. If we see that the impact on downstream distributions
+would be significant, we may decide to defer this change to a subsequent minor
+release. This evaluation will also take into account our own experience with
+how painful it is to keep Rust an optional component.
+
=== Removals
* Support for grafting commits has long been superseded by git-replace(1).
@@ -241,7 +286,7 @@ These features will be removed.
equivalent `git log --raw`. We have nominated the command for
removal, have changed the command to refuse to work unless the
`--i-still-use-this` option is given, and asked the users to report
- when they do so. So far there hasn't been a single complaint.
+ when they do so.
+
The command will be removed.
diff --git a/Documentation/RelNotes/2.52.0.adoc b/Documentation/RelNotes/2.52.0.adoc
index 1e5281188f..7fb1a3ce18 100644
--- a/Documentation/RelNotes/2.52.0.adoc
+++ b/Documentation/RelNotes/2.52.0.adoc
@@ -44,6 +44,9 @@ UI, Workflows & Features
* The stash.index configuration variable can be set to make "git stash
pop/apply" pretend that it was invoked with "--index".
+ * "git fast-import" learned that "--signed-commits=<how>" option that
+ corresponds to that of "git fast-export".
+
Performance, Internal Implementation, Development Support etc.
--------------------------------------------------------------
@@ -83,6 +86,10 @@ Performance, Internal Implementation, Development Support etc.
singleton variable, which has been updated to pass an instance
throughout the callchain.
+ * The work to build on the bulk-checkin infrastructure to create many
+ objects at once in a transaction and to abstract it into the
+ generic object layer continues.
+
* CodingGuidelines now spells out how bitfields are to be written.
* Adjust to the way newer versions of cURL selectivel enables tracing
@@ -102,6 +109,13 @@ Performance, Internal Implementation, Development Support etc.
while the code has been cleaned up to prevent similar bugs in the
future.
+ * The build procedure based on meson learned a target to only build
+ documentation, similar to "make doc".
+ (merge ff4ec8ded0 ps/meson-build-docs later to maint).
+
+ * Dip our toes a bit to (optionally) use Rust implemented helper
+ called from our C code.
+
Fixes since v2.51
-----------------
@@ -259,6 +273,31 @@ including security updates, are included in this release.
* "git last-modified" operating in non-recursive mode used to trigger
a BUG(), which has been corrected.
+ * The use of "git config get" command to learn how ANSI color
+ sequence is for a particular type, e.g., "git config get
+ --type=color --default=reset no.such.thing", isn't very ergonomic.
+ (merge e4dabf4fd6 ps/config-get-color-fixes later to maint).
+
+ * The "do you still use it?" message given by a command that is
+ deeply deprecated and allow us to suggest alternatives has been
+ updated.
+ (merge 54a60e5b38 kh/you-still-use-whatchanged-fix later to maint).
+
+ * Clang-format update to let our control macros formatted the way we
+ had them traditionally, e.g., "for_each_string_list_item()" without
+ space before the parentheses.
+ (merge 3721541d35 jt/clang-format-foreach-wo-space-before-parenthesis later to maint).
+
+ * A few places where an size_t value was cast to curl_off_t without
+ checking has been updated to use the existing helper function.
+ (merge ecc5749578 js/curl-off-t-fixes later to maint).
+
+ * "git reflog write" did not honor the configured user.name/email
+ which has been corrected.
+
+ * Handling of an empty subdirectory of .git/refs/ in the ref-files
+ backend has been corrected.
+
* Other code cleanup, docfix, build fix, etc.
(merge 823d537fa7 kh/doc-git-log-markup-fix later to maint).
(merge cf7efa4f33 rj/t6137-cygwin-fix later to maint).
@@ -284,3 +323,5 @@ including security updates, are included in this release.
(merge ac7096723b jc/doc-includeif-hasconfig-remote-url-fix later to maint).
(merge fafc9b08b8 ag/doc-sendmail-gmail-example-update later to maint).
(merge a66fc22bf9 rs/get-oid-with-flags-cleanup later to maint).
+ (merge e1d062e8ba ps/odb-clean-stale-wrappers later to maint).
+ (merge fdd21ba116 mh/doc-credential-url-prefix later to maint).
diff --git a/Documentation/config/alias.adoc b/Documentation/config/alias.adoc
index 95825354bf..80ce17d2de 100644
--- a/Documentation/config/alias.adoc
+++ b/Documentation/config/alias.adoc
@@ -3,7 +3,8 @@ alias.*::
after defining `alias.last = cat-file commit HEAD`, the invocation
`git last` is equivalent to `git cat-file commit HEAD`. To avoid
confusion and troubles with script usage, aliases that
- hide existing Git commands are ignored. Arguments are split by
+ hide existing Git commands are ignored except for deprecated
+ commands. Arguments are split by
spaces, the usual shell quoting and escaping are supported.
A quote pair or a backslash can be used to quote them.
+
diff --git a/Documentation/git-fast-import.adoc b/Documentation/git-fast-import.adoc
index 6e095b02a1..85ed7a7270 100644
--- a/Documentation/git-fast-import.adoc
+++ b/Documentation/git-fast-import.adoc
@@ -66,6 +66,11 @@ fast-import stream! This option is enabled automatically for
remote-helpers that use the `import` capability, as they are
already trusted to run their own code.
+--signed-commits=(verbatim|warn-verbatim|warn-strip|strip|abort)::
+ Specify how to handle signed commits. Behaves in the same way
+ as the same option in linkgit:git-fast-export[1], except that
+ default is 'verbatim' (instead of 'abort').
+
Options for Frontends
~~~~~~~~~~~~~~~~~~~~~
diff --git a/Documentation/git-pack-refs.adoc b/Documentation/git-pack-refs.adoc
index 42b90051e6..fde9f2f294 100644
--- a/Documentation/git-pack-refs.adoc
+++ b/Documentation/git-pack-refs.adoc
@@ -45,58 +45,7 @@ unpacked.
OPTIONS
-------
---all::
-
-The command by default packs all tags and refs that are already
-packed, and leaves other refs
-alone. This is because branches are expected to be actively
-developed and packing their tips does not help performance.
-This option causes all refs to be packed as well, with the exception
-of hidden refs, broken refs, and symbolic refs. Useful for a repository
-with many branches of historical interests.
-
---no-prune::
-
-The command usually removes loose refs under `$GIT_DIR/refs`
-hierarchy after packing them. This option tells it not to.
-
---auto::
-
-Pack refs as needed depending on the current state of the ref database. The
-behavior depends on the ref format used by the repository and may change in the
-future.
-+
- - "files": Loose references are packed into the `packed-refs` file
- based on the ratio of loose references to the size of the
- `packed-refs` file. The bigger the `packed-refs` file, the more loose
- references need to exist before we repack.
-+
- - "reftable": Tables are compacted such that they form a geometric
- sequence. For two tables N and N+1, where N+1 is newer, this
- maintains the property that N is at least twice as big as N+1. Only
- tables that violate this property are compacted.
-
---include <pattern>::
-
-Pack refs based on a `glob(7)` pattern. Repetitions of this option
-accumulate inclusion patterns. If a ref is both included in `--include` and
-`--exclude`, `--exclude` takes precedence. Using `--include` will preclude all
-tags from being included by default. Symbolic refs and broken refs will never
-be packed. When used with `--all`, it will be a noop. Use `--no-include` to clear
-and reset the list of patterns.
-
---exclude <pattern>::
-
-Do not pack refs matching the given `glob(7)` pattern. Repetitions of this option
-accumulate exclusion patterns. Use `--no-exclude` to clear and reset the list of
-patterns. If a ref is already packed, including it with `--exclude` will not
-unpack it.
-+
-When used with `--all`, pack only loose refs which do not match any of
-the provided `--exclude` patterns.
-+
-When used with `--include`, refs provided to `--include`, minus refs that are
-provided to `--exclude` will be packed.
+include::pack-refs-options.adoc[]
BUGS
diff --git a/Documentation/git-push.adoc b/Documentation/git-push.adoc
index 5f5408e2c0..cc5cadcdfc 100644
--- a/Documentation/git-push.adoc
+++ b/Documentation/git-push.adoc
@@ -55,96 +55,66 @@ OPTIONS[[OPTIONS]]
<refspec>...::
Specify what destination ref to update with what source object.
- The format of a <refspec> parameter is an optional plus
- `+`, followed by the source object <src>, followed
- by a colon `:`, followed by the destination ref <dst>.
-+
-The <src> is often the name of the branch you would want to push, but
-it can be any arbitrary "SHA-1 expression", such as `master~4` or
-`HEAD` (see linkgit:gitrevisions[7]).
-+
-The <dst> tells which ref on the remote side is updated with this
-push. Arbitrary expressions cannot be used here, an actual ref must
-be named.
-If `git push [<repository>]` without any `<refspec>` argument is set to
-update some ref at the destination with `<src>` with
-`remote.<repository>.push` configuration variable, `:<dst>` part can
-be omitted--such a push will update a ref that `<src>` normally updates
-without any `<refspec>` on the command line. Otherwise, missing
-`:<dst>` means to update the same ref as the `<src>`.
-+
-If <dst> doesn't start with `refs/` (e.g. `refs/heads/master`) we will
-try to infer where in `refs/*` on the destination <repository> it
-belongs based on the type of <src> being pushed and whether <dst>
-is ambiguous.
+
---
-* If <dst> unambiguously refers to a ref on the <repository> remote,
- then push to that ref.
-
-* If <src> resolves to a ref starting with refs/heads/ or refs/tags/,
- then prepend that to <dst>.
-
-* Other ambiguity resolutions might be added in the future, but for
- now any other cases will error out with an error indicating what we
- tried, and depending on the `advice.pushUnqualifiedRefname`
- configuration (see linkgit:git-config[1]) suggest what refs/
- namespace you may have wanted to push to.
-
---
-+
-The object referenced by <src> is used to update the <dst> reference
-on the remote side. Whether this is allowed depends on where in
-`refs/*` the <dst> reference lives as described in detail below, in
-those sections "update" means any modifications except deletes, which
-as noted after the next few sections are treated differently.
-+
-The `refs/heads/*` namespace will only accept commit objects, and
-updates only if they can be fast-forwarded.
-+
-The `refs/tags/*` namespace will accept any kind of object (as
-commits, trees and blobs can be tagged), and any updates to them will
-be rejected.
-+
-It's possible to push any type of object to any namespace outside of
-`refs/{tags,heads}/*`. In the case of tags and commits, these will be
-treated as if they were the commits inside `refs/heads/*` for the
-purposes of whether the update is allowed.
-+
-I.e. a fast-forward of commits and tags outside `refs/{tags,heads}/*`
-is allowed, even in cases where what's being fast-forwarded is not a
-commit, but a tag object which happens to point to a new commit which
-is a fast-forward of the commit the last tag (or commit) it's
-replacing. Replacing a tag with an entirely different tag is also
-allowed, if it points to the same commit, as well as pushing a peeled
-tag, i.e. pushing the commit that existing tag object points to, or a
-new tag object which an existing commit points to.
-+
-Tree and blob objects outside of `refs/{tags,heads}/*` will be treated
-the same way as if they were inside `refs/tags/*`, any update of them
-will be rejected.
-+
-All of the rules described above about what's not allowed as an update
-can be overridden by adding an the optional leading `+` to a refspec
-(or using `--force` command line option). The only exception to this
-is that no amount of forcing will make the `refs/heads/*` namespace
-accept a non-commit object. Hooks and configuration can also override
-or amend these rules, see e.g. `receive.denyNonFastForwards` in
-linkgit:git-config[1] and `pre-receive` and `update` in
-linkgit:githooks[5].
-+
-Pushing an empty <src> allows you to delete the <dst> ref from the
-remote repository. Deletions are always accepted without a leading `+`
-in the refspec (or `--force`), except when forbidden by configuration
-or hooks. See `receive.denyDeletes` in linkgit:git-config[1] and
-`pre-receive` and `update` in linkgit:githooks[5].
-+
-The special refspec `:` (or `+:` to allow non-fast-forward updates)
-directs Git to push "matching" branches: for every branch that exists on
-the local side, the remote side is updated if a branch of the same name
-already exists on the remote side.
-+
-`tag <tag>` means the same as `refs/tags/<tag>:refs/tags/<tag>`.
+The format for a refspec is [+]<src>[:<dst>], for example `main`,
+`main:other`, or `HEAD^:refs/heads/main`.
++
+The `<src>` is often the name of the local branch to push, but it can be
+any arbitrary "SHA-1 expression" (see linkgit:gitrevisions[7]).
++
+The `<dst>` determines what ref to update on the remote side. It must be the
+name of a branch, tag, or other ref, not an arbitrary expression.
++
+The `+` is optional and does the same thing as `--force`.
++
+You can write a refspec using the fully expanded form (for
+example `refs/heads/main:refs/heads/main`) which specifies the exact source
+and destination, or with a shorter form (for example `main` or
+`main:other`). Here are the rules for how refspecs are expanded,
+as well as various other special refspec forms:
++
+ * `<src>` without a `:<dst>` means to update the same ref as the
+ `<src>`, unless the `remote.<repository>.push` configuration specifies a
+ different <dst>. For example, if `main` is a branch, then the refspec
+ `main` expands to `main:refs/heads/main`.
+ * If `<dst>` unambiguously refers to a ref on the <repository> remote,
+ then expand it to that ref. For example, if `v1.0` is a tag on the
+ remote, then `HEAD:v1.0` expands to `HEAD:refs/tags/v1.0`.
+ * If `<src>` resolves to a ref starting with `refs/heads/` or `refs/tags/`,
+ then prepend that to <dst>. For example, if `main` is a branch, then
+ `main:other` expands to `main:refs/heads/other`
+ * The special refspec `:` (or `+:` to allow non-fast-forward updates)
+ directs Git to push "matching" branches: for every branch that exists on
+ the local side, the remote side is updated if a branch of the same name
+ already exists on the remote side.
+ * <src> may contain a * to indicate a simple pattern match.
+ This works like a glob that matches any ref matching the pattern.
+ There must be only one * in both the `<src>` and `<dst>`.
+ It will map refs to the destination by replacing the * with the
+ contents matched from the source. For example, `refs/heads/*:refs/heads/*`
+ will push all branches.
+ * A refspec starting with `^` is a negative refspec.
+ This specifies refs to exclude. A ref will be considered to
+ match if it matches at least one positive refspec, and does not
+ match any negative refspec. Negative refspecs can be pattern refspecs.
+ They must only contain a `<src>`.
+ Fully spelled out hex object names are also not supported.
+ For example, `git push origin 'refs/heads/*' '^refs/heads/dev-*'`
+ will push all branches except for those starting with `dev-`
+ * If `<src>` is empty, it deletes the `<dst>` ref from the remote
+ repository. For example, `git push origin :dev` will
+ delete the `dev` branch.
+ * `tag <tag>` expands to `refs/tags/<tag>:refs/tags/<tag>`.
+ This is technically a special syntax for `git push` and not a refspec,
+ since in `git push origin tag v1.0` the arguments `tag` and `v1.0`
+ are separate.
+ * If the refspec can't be expanded unambiguously, error out
+ with an error indicating what was tried, and depending
+ on the `advice.pushUnqualifiedRefname` configuration (see
+ linkgit:git-config[1]) suggest what refs/ namespace you may have
+ wanted to push to.
+
+Not all updates are allowed: see PUSH RULES below for the details.
--all::
--branches::
@@ -335,14 +305,12 @@ allowing a forced update.
-f::
--force::
- Usually, the command refuses to update a remote ref that is
- not an ancestor of the local ref used to overwrite it.
- Also, when `--force-with-lease` option is used, the command refuses
- to update a remote ref whose current value does not match
- what is expected.
+ Usually, `git push` will refuse to update a branch that is not an
+ ancestor of the commit being pushed.
+
-This flag disables these checks, and can cause the remote repository
-to lose commits; use it with care.
+This flag disables that check, the other safety checks in PUSH RULES
+below, and the checks in --force-with-lease. It can cause the remote
+repository to lose commits; use it with care.
+
Note that `--force` applies to all the refs that are pushed, hence
using it with `push.default` set to `matching` or with multiple push
@@ -514,6 +482,45 @@ reason::
refs, no explanation is needed. For a failed ref, the reason for
failure is described.
+PUSH RULES
+----------
+
+As a safety feature, the `git push` command only allows certain kinds of
+updates to prevent you from accidentally losing data on the remote.
+
+Because branches and tags are intended to be used differently, the
+safety rules for pushing to a branch are different from the rules
+for pushing to a tag. In the following rules "update" means any
+modifications except deletions and creations. Deletions and creations
+are always allowed, except when forbidden by configuration or hooks.
+
+1. If the push destination is a **branch** (`refs/heads/*`): only
+ fast-forward updates are allowed, which means the destination must be
+ an ancestor of the source commit. The source must be a commit.
+2. If the push destination is a **tag** (`refs/tags/*`): all updates will
+ be rejected. The source can be any object.
+3. If the push destination is not a branch or tag:
+ * If the source is a tree or blob object, any updates will be rejected
+ * If the source is a tag or commit object, any fast-forward update
+ is allowed, even in cases where what's being fast-forwarded is not a
+ commit, but a tag object which happens to point to a new commit which
+ is a fast-forward of the commit the last tag (or commit) it's
+ replacing. Replacing a tag with an entirely different tag is also
+ allowed, if it points to the same commit, as well as pushing a peeled
+ tag, i.e. pushing the commit that existing tag object points to, or a
+ new tag object which an existing commit points to.
+
+You can override these rules by passing `--force` or by adding the
+optional leading `+` to a refspec. The only exceptions are that no
+amount of forcing will make a branch accept a non-commit object,
+and forcing won't make the remote repository accept a push that it's
+configured to deny.
+
+Hooks and configuration can also override or amend these rules,
+see e.g. `receive.denyNonFastForwards` and `receive.denyDeletes`
+in linkgit:git-config[1] and `pre-receive` and `update` in
+linkgit:githooks[5].
+
NOTE ABOUT FAST-FORWARDS
------------------------
diff --git a/Documentation/git-refs.adoc b/Documentation/git-refs.adoc
index bfa9b3ea2d..fa33680cc7 100644
--- a/Documentation/git-refs.adoc
+++ b/Documentation/git-refs.adoc
@@ -19,6 +19,7 @@ git refs list [--count=<count>] [--shell|--perl|--python|--tcl]
[(--exclude=<pattern>)...] [--start-after=<marker>]
[ --stdin | (<pattern>...)]
git refs exists <ref>
+git refs optimize [--all] [--no-prune] [--auto] [--include <pattern>] [--exclude <pattern>]
DESCRIPTION
-----------
@@ -45,6 +46,11 @@ exists::
failed with an error other than the reference being missing. This does
not verify whether the reference resolves to an actual object.
+optimize::
+ Optimizes references to improve repository performance and reduce disk
+ usage. This subcommand is an alias for linkgit:git-pack-refs[1] and
+ offers identical functionality.
+
OPTIONS
-------
@@ -80,6 +86,10 @@ The following options are specific to 'git refs list':
include::for-each-ref-options.adoc[]
+The following options are specific to 'git refs optimize':
+
+include::pack-refs-options.adoc[]
+
KNOWN LIMITATIONS
-----------------
diff --git a/Documentation/git-whatchanged.adoc b/Documentation/git-whatchanged.adoc
index d21484026f..436e219b7d 100644
--- a/Documentation/git-whatchanged.adoc
+++ b/Documentation/git-whatchanged.adoc
@@ -15,7 +15,7 @@ WARNING
-------
`git whatchanged` has been deprecated and is scheduled for removal in
a future version of Git, as it is merely `git log` with different
-default; `whatchanged` is not even shorter to type than `log --raw`.
+defaults.
DESCRIPTION
-----------
@@ -24,7 +24,11 @@ Shows commit logs and diff output each commit introduces.
New users are encouraged to use linkgit:git-log[1] instead. The
`whatchanged` command is essentially the same as linkgit:git-log[1]
-but defaults to showing the raw format diff output and skipping merges.
+but defaults to showing the raw format diff output and skipping merges:
+
+----
+git log --raw --no-merges
+----
The command is primarily kept for historical reasons; fingers of
many people who learned Git long before `git log` was invented by
diff --git a/Documentation/git.adoc b/Documentation/git.adoc
index 03e9e69d25..ce099e78b8 100644
--- a/Documentation/git.adoc
+++ b/Documentation/git.adoc
@@ -219,7 +219,8 @@ If you just want to run git as if it was started in `<path>` then use
List commands by group. This is an internal/experimental
option and may change or be removed in the future. Supported
groups are: builtins, parseopt (builtin commands that use
- parse-options), main (all commands in libexec directory),
+ parse-options), deprecated (deprecated builtins),
+ main (all commands in libexec directory),
others (all other commands in `$PATH` that have git- prefix),
list-<category> (see categories in command-list.txt),
nohelpers (exclude helper commands), alias and config
diff --git a/Documentation/gitcredentials.adoc b/Documentation/gitcredentials.adoc
index 3337bb475d..60c2cc4ade 100644
--- a/Documentation/gitcredentials.adoc
+++ b/Documentation/gitcredentials.adoc
@@ -150,9 +150,8 @@ pattern in the config file. For example, if you have this in your config file:
username = foo
--------------------------------------
-then we will match: both protocols are the same, both hosts are the same, and
-the "pattern" URL does not care about the path component at all. However, this
-context would not match:
+then we will match: both protocols are the same and both hosts are the same.
+However, this context would not match:
--------------------------------------
[credential "https://kernel.org"]
@@ -166,11 +165,11 @@ match: Git compares the protocols exactly. However, you may use wildcards in
the domain name and other pattern matching techniques as with the `http.<URL>.*`
options.
-If the "pattern" URL does include a path component, then this too must match
-exactly: the context `https://example.com/bar/baz.git` will match a config
-entry for `https://example.com/bar/baz.git` (in addition to matching the config
-entry for `https://example.com`) but will not match a config entry for
-`https://example.com/bar`.
+If the "pattern" URL does include a path component, then this must match
+as a prefix path: the context `https://example.com/bar` will match a config
+entry for `https://example.com/bar/baz.git` but will not match a config entry for
+`https://example.com/other/repo.git` or `https://example.com/barry/repo.git`
+(even though it is a string prefix).
CONFIGURATION OPTIONS
diff --git a/Documentation/howto/meson.build b/Documentation/howto/meson.build
index 81000028c0..ece20244af 100644
--- a/Documentation/howto/meson.build
+++ b/Documentation/howto/meson.build
@@ -29,7 +29,7 @@ howto_index = custom_target(
output: 'howto-index.adoc',
)
-custom_target(
+doc_targets += custom_target(
command: asciidoc_html_options,
input: howto_index,
output: 'howto-index.html',
@@ -51,7 +51,7 @@ foreach howto : howto_sources
capture: true,
)
- custom_target(
+ doc_targets += custom_target(
command: asciidoc_html_options,
input: howto_stripped,
output: fs.stem(howto_stripped.full_path()) + '.html',
diff --git a/Documentation/meson.build b/Documentation/meson.build
index e34965c5b0..44f94cdb7b 100644
--- a/Documentation/meson.build
+++ b/Documentation/meson.build
@@ -377,7 +377,7 @@ foreach manpage, category : manpages
output: fs.stem(manpage) + '.xml',
)
- custom_target(
+ doc_targets += custom_target(
command: [
xmlto,
'-m', '@INPUT0@',
@@ -400,7 +400,7 @@ foreach manpage, category : manpages
endif
if get_option('docs').contains('html')
- custom_target(
+ doc_targets += custom_target(
command: asciidoc_common_options + [
'--backend=' + asciidoc_html,
'--doctype=manpage',
@@ -452,7 +452,7 @@ if get_option('docs').contains('html')
depends: documentation_deps,
)
- custom_target(
+ doc_targets += custom_target(
command: [
xsltproc,
'--xinclude',
@@ -481,7 +481,7 @@ if get_option('docs').contains('html')
]
foreach article : articles
- custom_target(
+ doc_targets += custom_target(
command: asciidoc_common_options + [
'--backend=' + asciidoc_html,
'--out-file=@OUTPUT@',
diff --git a/Documentation/pack-refs-options.adoc b/Documentation/pack-refs-options.adoc
new file mode 100644
index 0000000000..0b11282941
--- /dev/null
+++ b/Documentation/pack-refs-options.adoc
@@ -0,0 +1,52 @@
+--all::
+
+The command by default packs all tags and refs that are already
+packed, and leaves other refs
+alone. This is because branches are expected to be actively
+developed and packing their tips does not help performance.
+This option causes all refs to be packed as well, with the exception
+of hidden refs, broken refs, and symbolic refs. Useful for a repository
+with many branches of historical interests.
+
+--no-prune::
+
+The command usually removes loose refs under `$GIT_DIR/refs`
+hierarchy after packing them. This option tells it not to.
+
+--auto::
+
+Pack refs as needed depending on the current state of the ref database. The
+behavior depends on the ref format used by the repository and may change in the
+future.
++
+ - "files": Loose references are packed into the `packed-refs` file
+ based on the ratio of loose references to the size of the
+ `packed-refs` file. The bigger the `packed-refs` file, the more loose
+ references need to exist before we repack.
++
+ - "reftable": Tables are compacted such that they form a geometric
+ sequence. For two tables N and N+1, where N+1 is newer, this
+ maintains the property that N is at least twice as big as N+1. Only
+ tables that violate this property are compacted.
+
+--include <pattern>::
+
+Pack refs based on a `glob(7)` pattern. Repetitions of this option
+accumulate inclusion patterns. If a ref is both included in `--include` and
+`--exclude`, `--exclude` takes precedence. Using `--include` will preclude all
+tags from being included by default. Symbolic refs and broken refs will never
+be packed. When used with `--all`, it will be a noop. Use `--no-include` to clear
+and reset the list of patterns.
+
+--exclude <pattern>::
+
+Do not pack refs matching the given `glob(7)` pattern. Repetitions of this option
+accumulate exclusion patterns. Use `--no-exclude` to clear and reset the list of
+patterns. If a ref is already packed, including it with `--exclude` will not
+unpack it.
++
+When used with `--all`, pack only loose refs which do not match any of
+the provided `--exclude` patterns.
++
+When used with `--include`, refs provided to `--include`, minus refs that are
+provided to `--exclude` will be packed.
diff --git a/Documentation/technical/meson.build b/Documentation/technical/meson.build
index a13aafcfbb..858af811a7 100644
--- a/Documentation/technical/meson.build
+++ b/Documentation/technical/meson.build
@@ -46,7 +46,7 @@ api_index = custom_target(
output: 'api-index.adoc',
)
-custom_target(
+doc_targets += custom_target(
command: asciidoc_html_options,
input: api_index,
output: 'api-index.html',
@@ -56,7 +56,7 @@ custom_target(
)
foreach article : api_docs + articles
- custom_target(
+ doc_targets += custom_target(
command: asciidoc_html_options,
input: article,
output: fs.stem(article) + '.html',
diff --git a/Makefile b/Makefile
index 4c95affadb..7ea149598d 100644
--- a/Makefile
+++ b/Makefile
@@ -483,6 +483,14 @@ include shared.mak
# Define LIBPCREDIR=/foo/bar if your PCRE header and library files are
# in /foo/bar/include and /foo/bar/lib directories.
#
+# == Optional Rust support ==
+#
+# Define WITH_RUST if you want to include features and subsystems written in
+# Rust into Git. For now, Rust is still an optional feature of the build
+# process. With Git 3.0 though, Rust will always be enabled.
+#
+# Building Rust code requires Cargo.
+#
# == SHA-1 and SHA-256 defines ==
#
# === SHA-1 backend ===
@@ -683,6 +691,7 @@ OBJECTS =
OTHER_PROGRAMS =
PROGRAM_OBJS =
PROGRAMS =
+RUST_SOURCES =
EXCLUDED_PROGRAMS =
SCRIPT_PERL =
SCRIPT_PYTHON =
@@ -883,7 +892,9 @@ BUILT_INS += git-stage$X
BUILT_INS += git-status$X
BUILT_INS += git-switch$X
BUILT_INS += git-version$X
+ifndef WITH_BREAKING_CHANGES
BUILT_INS += git-whatchanged$X
+endif
# what 'all' will build but not install in gitexecdir
OTHER_PROGRAMS += git$X
@@ -918,6 +929,108 @@ TEST_SHELL_PATH = $(SHELL_PATH)
LIB_FILE = libgit.a
XDIFF_LIB = xdiff/lib.a
REFTABLE_LIB = reftable/libreftable.a
+ifdef DEBUG
+RUST_LIB = target/debug/libgitcore.a
+else
+RUST_LIB = target/release/libgitcore.a
+endif
+
+# xdiff and reftable libs may in turn depend on what is in libgit.a
+GITLIBS = common-main.o $(LIB_FILE) $(XDIFF_LIB) $(REFTABLE_LIB) $(LIB_FILE)
+EXTLIBS =
+
+GIT_USER_AGENT = git/$(GIT_VERSION)
+
+ifeq ($(wildcard sha1collisiondetection/lib/sha1.h),sha1collisiondetection/lib/sha1.h)
+DC_SHA1_SUBMODULE = auto
+endif
+
+# Set CFLAGS, LDFLAGS and other *FLAGS variables. These might be
+# tweaked by config.* below as well as the command-line, both of
+# which'll override these defaults.
+# Older versions of GCC may require adding "-std=gnu99" at the end.
+CFLAGS = -g -O2 -Wall
+LDFLAGS =
+CC_LD_DYNPATH = -Wl,-rpath,
+BASIC_CFLAGS = -I.
+BASIC_LDFLAGS =
+
+# library flags
+ARFLAGS = rcs
+PTHREAD_CFLAGS =
+
+# Rust flags
+CARGO_ARGS =
+ifndef V
+CARGO_ARGS += --quiet
+endif
+ifndef DEBUG
+CARGO_ARGS += --release
+endif
+
+# For the 'sparse' target
+SPARSE_FLAGS ?= -std=gnu99 -D__STDC_NO_VLA__
+SP_EXTRA_FLAGS =
+
+# For informing GIT-BUILD-OPTIONS of the SANITIZE=leak,address targets
+SANITIZE_LEAK =
+SANITIZE_ADDRESS =
+
+# For the 'coccicheck' target
+SPATCH_INCLUDE_FLAGS = --all-includes
+SPATCH_FLAGS =
+SPATCH_TEST_FLAGS =
+
+# If *.o files are present, have "coccicheck" depend on them, with
+# COMPUTE_HEADER_DEPENDENCIES this will speed up the common-case of
+# only needing to re-generate coccicheck results for the users of a
+# given API if it's changed, and not all files in the project. If
+# COMPUTE_HEADER_DEPENDENCIES=no this will be unset too.
+SPATCH_USE_O_DEPENDENCIES = YesPlease
+
+# Set SPATCH_CONCAT_COCCI to concatenate the contrib/cocci/*.cocci
+# files into a single contrib/cocci/ALL.cocci before running
+# "coccicheck".
+#
+# Pros:
+#
+# - Speeds up a one-shot run of "make coccicheck", as we won't have to
+# parse *.[ch] files N times for the N *.cocci rules
+#
+# Cons:
+#
+# - Will make incremental development of *.cocci slower, as
+# e.g. changing strbuf.cocci will re-run all *.cocci.
+#
+# - Makes error and performance analysis harder, as rules will be
+# applied from a monolithic ALL.cocci, rather than
+# e.g. strbuf.cocci. To work around this either undefine this, or
+# generate a specific patch, e.g. this will always use strbuf.cocci,
+# not ALL.cocci:
+#
+# make contrib/coccinelle/strbuf.cocci.patch
+SPATCH_CONCAT_COCCI = YesPlease
+
+# Rebuild 'coccicheck' if $(SPATCH), its flags etc. change
+TRACK_SPATCH_DEFINES =
+TRACK_SPATCH_DEFINES += $(SPATCH)
+TRACK_SPATCH_DEFINES += $(SPATCH_INCLUDE_FLAGS)
+TRACK_SPATCH_DEFINES += $(SPATCH_FLAGS)
+TRACK_SPATCH_DEFINES += $(SPATCH_TEST_FLAGS)
+GIT-SPATCH-DEFINES: FORCE
+ @FLAGS='$(TRACK_SPATCH_DEFINES)'; \
+ if test x"$$FLAGS" != x"`cat GIT-SPATCH-DEFINES 2>/dev/null`" ; then \
+ echo >&2 " * new spatch flags"; \
+ echo "$$FLAGS" >GIT-SPATCH-DEFINES; \
+ fi
+
+include config.mak.uname
+-include config.mak.autogen
+-include config.mak
+
+ifdef DEVELOPER
+include config.mak.dev
+endif
GENERATED_H += command-list.h
GENERATED_H += config-list.h
@@ -974,7 +1087,6 @@ LIB_OBJS += blame.o
LIB_OBJS += blob.o
LIB_OBJS += bloom.o
LIB_OBJS += branch.o
-LIB_OBJS += bulk-checkin.o
LIB_OBJS += bundle-uri.o
LIB_OBJS += bundle.o
LIB_OBJS += cache-tree.o
@@ -1094,6 +1206,7 @@ LIB_OBJS += pack-bitmap.o
LIB_OBJS += pack-check.o
LIB_OBJS += pack-mtimes.o
LIB_OBJS += pack-objects.o
+LIB_OBJS += pack-refs.o
LIB_OBJS += pack-revindex.o
LIB_OBJS += pack-write.o
LIB_OBJS += packfile.o
@@ -1196,7 +1309,9 @@ LIB_OBJS += urlmatch.o
LIB_OBJS += usage.o
LIB_OBJS += userdiff.o
LIB_OBJS += utf8.o
+ifndef WITH_RUST
LIB_OBJS += varint.o
+endif
LIB_OBJS += version.o
LIB_OBJS += versioncmp.o
LIB_OBJS += walker.o
@@ -1388,93 +1503,8 @@ CLAR_TEST_OBJS += $(UNIT_TEST_DIR)/unit-test.o
UNIT_TEST_OBJS += $(UNIT_TEST_DIR)/test-lib.o
-# xdiff and reftable libs may in turn depend on what is in libgit.a
-GITLIBS = common-main.o $(LIB_FILE) $(XDIFF_LIB) $(REFTABLE_LIB) $(LIB_FILE)
-EXTLIBS =
-
-GIT_USER_AGENT = git/$(GIT_VERSION)
-
-ifeq ($(wildcard sha1collisiondetection/lib/sha1.h),sha1collisiondetection/lib/sha1.h)
-DC_SHA1_SUBMODULE = auto
-endif
-
-# Set CFLAGS, LDFLAGS and other *FLAGS variables. These might be
-# tweaked by config.* below as well as the command-line, both of
-# which'll override these defaults.
-# Older versions of GCC may require adding "-std=gnu99" at the end.
-CFLAGS = -g -O2 -Wall
-LDFLAGS =
-CC_LD_DYNPATH = -Wl,-rpath,
-BASIC_CFLAGS = -I.
-BASIC_LDFLAGS =
-
-# library flags
-ARFLAGS = rcs
-PTHREAD_CFLAGS =
-
-# For the 'sparse' target
-SPARSE_FLAGS ?= -std=gnu99 -D__STDC_NO_VLA__
-SP_EXTRA_FLAGS =
-
-# For informing GIT-BUILD-OPTIONS of the SANITIZE=leak,address targets
-SANITIZE_LEAK =
-SANITIZE_ADDRESS =
-
-# For the 'coccicheck' target
-SPATCH_INCLUDE_FLAGS = --all-includes
-SPATCH_FLAGS =
-SPATCH_TEST_FLAGS =
-
-# If *.o files are present, have "coccicheck" depend on them, with
-# COMPUTE_HEADER_DEPENDENCIES this will speed up the common-case of
-# only needing to re-generate coccicheck results for the users of a
-# given API if it's changed, and not all files in the project. If
-# COMPUTE_HEADER_DEPENDENCIES=no this will be unset too.
-SPATCH_USE_O_DEPENDENCIES = YesPlease
-
-# Set SPATCH_CONCAT_COCCI to concatenate the contrib/cocci/*.cocci
-# files into a single contrib/cocci/ALL.cocci before running
-# "coccicheck".
-#
-# Pros:
-#
-# - Speeds up a one-shot run of "make coccicheck", as we won't have to
-# parse *.[ch] files N times for the N *.cocci rules
-#
-# Cons:
-#
-# - Will make incremental development of *.cocci slower, as
-# e.g. changing strbuf.cocci will re-run all *.cocci.
-#
-# - Makes error and performance analysis harder, as rules will be
-# applied from a monolithic ALL.cocci, rather than
-# e.g. strbuf.cocci. To work around this either undefine this, or
-# generate a specific patch, e.g. this will always use strbuf.cocci,
-# not ALL.cocci:
-#
-# make contrib/coccinelle/strbuf.cocci.patch
-SPATCH_CONCAT_COCCI = YesPlease
-
-# Rebuild 'coccicheck' if $(SPATCH), its flags etc. change
-TRACK_SPATCH_DEFINES =
-TRACK_SPATCH_DEFINES += $(SPATCH)
-TRACK_SPATCH_DEFINES += $(SPATCH_INCLUDE_FLAGS)
-TRACK_SPATCH_DEFINES += $(SPATCH_FLAGS)
-TRACK_SPATCH_DEFINES += $(SPATCH_TEST_FLAGS)
-GIT-SPATCH-DEFINES: FORCE
- @FLAGS='$(TRACK_SPATCH_DEFINES)'; \
- if test x"$$FLAGS" != x"`cat GIT-SPATCH-DEFINES 2>/dev/null`" ; then \
- echo >&2 " * new spatch flags"; \
- echo "$$FLAGS" >GIT-SPATCH-DEFINES; \
- fi
-
-include config.mak.uname
--include config.mak.autogen
--include config.mak
-
-ifdef DEVELOPER
-include config.mak.dev
-endif
+RUST_SOURCES += src/lib.rs
+RUST_SOURCES += src/varint.rs
GIT-VERSION-FILE: FORCE
@OLD=$$(cat $@ 2>/dev/null || :) && \
@@ -1505,6 +1535,11 @@ endif
ALL_CFLAGS = $(DEVELOPER_CFLAGS) $(CPPFLAGS) $(CFLAGS) $(CFLAGS_APPEND)
ALL_LDFLAGS = $(LDFLAGS) $(LDFLAGS_APPEND)
+ifdef WITH_RUST
+BASIC_CFLAGS += -DWITH_RUST
+GITLIBS += $(RUST_LIB)
+endif
+
ifdef SANITIZE
SANITIZERS := $(foreach flag,$(subst $(comma),$(space),$(SANITIZE)),$(flag))
BASIC_CFLAGS += -fsanitize=$(SANITIZE) -fno-sanitize-recover=$(SANITIZE)
@@ -2919,6 +2954,12 @@ scalar$X: scalar.o GIT-LDFLAGS $(GITLIBS)
$(LIB_FILE): $(LIB_OBJS)
$(QUIET_AR)$(RM) $@ && $(AR) $(ARFLAGS) $@ $^
+$(RUST_LIB): Cargo.toml $(RUST_SOURCES)
+ $(QUIET_CARGO)cargo build $(CARGO_ARGS)
+
+.PHONY: rust
+rust: $(RUST_LIB)
+
$(XDIFF_LIB): $(XDIFF_OBJS)
$(QUIET_AR)$(RM) $@ && $(AR) $(ARFLAGS) $@ $^
@@ -3769,6 +3810,7 @@ clean: profile-clean coverage-clean cocciclean
$(RM) $(FUZZ_PROGRAMS)
$(RM) $(SP_OBJ)
$(RM) $(HCC)
+ $(RM) -r Cargo.lock target/
$(RM) version-def.h
$(RM) -r $(dep_dirs) $(compdb_dir) compile_commands.json
$(RM) $(test_bindir_programs)
diff --git a/builtin/add.c b/builtin/add.c
index 4cd3d183f9..32709794b3 100644
--- a/builtin/add.c
+++ b/builtin/add.c
@@ -14,13 +14,14 @@
#include "gettext.h"
#include "pathspec.h"
#include "run-command.h"
+#include "object-file.h"
+#include "odb.h"
#include "parse-options.h"
#include "path.h"
#include "preload-index.h"
#include "diff.h"
#include "read-cache.h"
#include "revision.h"
-#include "bulk-checkin.h"
#include "strvec.h"
#include "submodule.h"
#include "add-interactive.h"
@@ -575,7 +576,7 @@ int cmd_add(int argc,
string_list_clear(&only_match_skip_worktree, 0);
}
- transaction = begin_odb_transaction(repo->objects);
+ transaction = odb_transaction_begin(repo->objects);
ps_matched = xcalloc(pathspec.nr, 1);
if (add_renormalize)
@@ -594,7 +595,7 @@ int cmd_add(int argc,
if (chmod_arg && pathspec.nr)
exit_status |= chmod_pathspec(repo, &pathspec, chmod_arg[0], show_only);
- end_odb_transaction(transaction);
+ odb_transaction_commit(transaction);
finish:
if (write_locked_index(repo->index, &lock_file,
diff --git a/builtin/backfill.c b/builtin/backfill.c
index 80056abe47..e80fc1b694 100644
--- a/builtin/backfill.c
+++ b/builtin/backfill.c
@@ -53,7 +53,7 @@ static void download_batch(struct backfill_context *ctx)
* We likely have a new packfile. Add it to the packed list to
* avoid possible duplicate downloads of the same objects.
*/
- reprepare_packed_git(ctx->repo);
+ odb_reprepare(ctx->repo->objects);
}
static int fill_missing_blobs(const char *path UNUSED,
diff --git a/builtin/cat-file.c b/builtin/cat-file.c
index fce0b06451..ee6715fa52 100644
--- a/builtin/cat-file.c
+++ b/builtin/cat-file.c
@@ -852,9 +852,10 @@ static void batch_each_object(struct batch_options *opt,
if (bitmap && !for_each_bitmapped_object(bitmap, &opt->objects_filter,
batch_one_object_bitmapped, &payload)) {
+ struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *pack;
- for (pack = get_all_packs(the_repository); pack; pack = pack->next) {
+ for (pack = packfile_store_get_all_packs(packs); pack; pack = pack->next) {
if (bitmap_index_contains_pack(bitmap, pack) ||
open_pack_index(pack))
continue;
diff --git a/builtin/config.c b/builtin/config.c
index 2348a99dd4..75852bd79d 100644
--- a/builtin/config.c
+++ b/builtin/config.c
@@ -547,24 +547,31 @@ static int git_get_color_config(const char *var, const char *value,
return 0;
}
-static void get_color(const struct config_location_options *opts,
+static int get_color(const struct config_location_options *opts,
const char *var, const char *def_color)
{
struct get_color_config_data data = {
.get_color_slot = var,
.parsed_color[0] = '\0',
};
+ int ret;
config_with_options(git_get_color_config, &data,
&opts->source, the_repository,
&opts->options);
if (!data.get_color_found && def_color) {
- if (color_parse(def_color, data.parsed_color) < 0)
- die(_("unable to parse default color value"));
+ if (color_parse(def_color, data.parsed_color) < 0) {
+ ret = error(_("unable to parse default color value"));
+ goto out;
+ }
}
+ ret = 0;
+
+out:
fputs(data.parsed_color, stdout);
+ return ret;
}
struct get_colorbool_config_data {
@@ -913,10 +920,13 @@ static int cmd_config_get(int argc, const char **argv, const char *prefix,
location_options_init(&location_opts, prefix);
display_options_init(&display_opts);
- setup_auto_pager("config", 1);
+ if (display_opts.type != TYPE_COLOR)
+ setup_auto_pager("config", 1);
if (url)
ret = get_urlmatch(&location_opts, &display_opts, argv[0], url);
+ else if (display_opts.type == TYPE_COLOR && !strlen(argv[0]) && display_opts.default_value)
+ ret = get_color(&location_opts, "", display_opts.default_value);
else
ret = get_value(&location_opts, &display_opts, argv[0], value_pattern,
get_value_flags, flags);
@@ -1391,7 +1401,7 @@ static int cmd_config_actions(int argc, const char **argv, const char *prefix)
}
else if (actions == ACTION_GET_COLOR) {
check_argc(argc, 1, 2);
- get_color(&location_opts, argv[0], argv[1]);
+ ret = get_color(&location_opts, argv[0], argv[1]);
}
else if (actions == ACTION_GET_COLORBOOL) {
check_argc(argc, 1, 2);
diff --git a/builtin/count-objects.c b/builtin/count-objects.c
index a61d3b46aa..f2f407c2a7 100644
--- a/builtin/count-objects.c
+++ b/builtin/count-objects.c
@@ -122,6 +122,7 @@ int cmd_count_objects(int argc,
count_loose, count_cruft, NULL, NULL);
if (verbose) {
+ struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
unsigned long num_pack = 0;
off_t size_pack = 0;
@@ -129,7 +130,7 @@ int cmd_count_objects(int argc,
struct strbuf pack_buf = STRBUF_INIT;
struct strbuf garbage_buf = STRBUF_INIT;
- for (p = get_all_packs(the_repository); p; p = p->next) {
+ for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
if (!p->pack_local)
continue;
if (open_pack_index(p))
diff --git a/builtin/fast-export.c b/builtin/fast-export.c
index c06ee0b213..dc2486f9a8 100644
--- a/builtin/fast-export.c
+++ b/builtin/fast-export.c
@@ -37,8 +37,6 @@ static const char *const fast_export_usage[] = {
NULL
};
-enum sign_mode { SIGN_ABORT, SIGN_VERBATIM, SIGN_STRIP, SIGN_WARN_VERBATIM, SIGN_WARN_STRIP };
-
static int progress;
static enum sign_mode signed_tag_mode = SIGN_ABORT;
static enum sign_mode signed_commit_mode = SIGN_STRIP;
@@ -59,23 +57,16 @@ static struct hashmap anonymized_seeds;
static struct revision_sources revision_sources;
static int parse_opt_sign_mode(const struct option *opt,
- const char *arg, int unset)
+ const char *arg, int unset)
{
enum sign_mode *val = opt->value;
+
if (unset)
return 0;
- else if (!strcmp(arg, "abort"))
- *val = SIGN_ABORT;
- else if (!strcmp(arg, "verbatim") || !strcmp(arg, "ignore"))
- *val = SIGN_VERBATIM;
- else if (!strcmp(arg, "warn-verbatim") || !strcmp(arg, "warn"))
- *val = SIGN_WARN_VERBATIM;
- else if (!strcmp(arg, "warn-strip"))
- *val = SIGN_WARN_STRIP;
- else if (!strcmp(arg, "strip"))
- *val = SIGN_STRIP;
- else
+
+ if (parse_sign_mode(arg, val))
return error("Unknown %s mode: %s", opt->long_name, arg);
+
return 0;
}
diff --git a/builtin/fast-import.c b/builtin/fast-import.c
index 2c35f9345d..606c6aea82 100644
--- a/builtin/fast-import.c
+++ b/builtin/fast-import.c
@@ -188,6 +188,8 @@ static int global_argc;
static const char **global_argv;
static const char *global_prefix;
+static enum sign_mode signed_commit_mode = SIGN_VERBATIM;
+
/* Memory pools */
static struct mem_pool fi_mem_pool = {
.block_alloc = 2*1024*1024 - sizeof(struct mp_block),
@@ -897,11 +899,11 @@ static void end_packfile(void)
idx_name = keep_pack(create_index());
/* Register the packfile with core git's machinery. */
- new_p = add_packed_git(pack_data->repo, idx_name, strlen(idx_name), 1);
+ new_p = packfile_store_load_pack(pack_data->repo->objects->packfiles,
+ idx_name, 1);
if (!new_p)
die("core git rejected index %s", idx_name);
all_packs[pack_id] = new_p;
- install_packed_git(the_repository, new_p);
free(idx_name);
/* Print the boundary */
@@ -952,6 +954,7 @@ static int store_object(
struct object_id *oidout,
uintmax_t mark)
{
+ struct packfile_store *packs = the_repository->objects->packfiles;
void *out, *delta;
struct object_entry *e;
unsigned char hdr[96];
@@ -975,7 +978,7 @@ static int store_object(
if (e->idx.offset) {
duplicate_count_by_type[type]++;
return 1;
- } else if (find_oid_pack(&oid, get_all_packs(the_repository))) {
+ } else if (find_oid_pack(&oid, packfile_store_get_all_packs(packs))) {
e->type = type;
e->pack_id = MAX_PACK_ID;
e->idx.offset = 1; /* just not zero! */
@@ -1092,6 +1095,7 @@ static void truncate_pack(struct hashfile_checkpoint *checkpoint)
static void stream_blob(uintmax_t len, struct object_id *oidout, uintmax_t mark)
{
+ struct packfile_store *packs = the_repository->objects->packfiles;
size_t in_sz = 64 * 1024, out_sz = 64 * 1024;
unsigned char *in_buf = xmalloc(in_sz);
unsigned char *out_buf = xmalloc(out_sz);
@@ -1175,7 +1179,7 @@ static void stream_blob(uintmax_t len, struct object_id *oidout, uintmax_t mark)
duplicate_count_by_type[OBJ_BLOB]++;
truncate_pack(&checkpoint);
- } else if (find_oid_pack(&oid, get_all_packs(the_repository))) {
+ } else if (find_oid_pack(&oid, packfile_store_get_all_packs(packs))) {
e->type = OBJ_BLOB;
e->pack_id = MAX_PACK_ID;
e->idx.offset = 1; /* just not zero! */
@@ -2752,6 +2756,15 @@ static void parse_one_signature(struct signature_data *sig, const char *v)
parse_data(&sig->data, 0, NULL);
}
+static void discard_one_signature(void)
+{
+ struct strbuf data = STRBUF_INIT;
+
+ read_next_command();
+ parse_data(&data, 0, NULL);
+ strbuf_release(&data);
+}
+
static void add_gpgsig_to_commit(struct strbuf *commit_data,
const char *header,
struct signature_data *sig)
@@ -2785,6 +2798,22 @@ static void store_signature(struct signature_data *stored_sig,
}
}
+static void import_one_signature(struct signature_data *sig_sha1,
+ struct signature_data *sig_sha256,
+ const char *v)
+{
+ struct signature_data sig = { NULL, NULL, STRBUF_INIT };
+
+ parse_one_signature(&sig, v);
+
+ if (!strcmp(sig.hash_algo, "sha1"))
+ store_signature(sig_sha1, &sig, "SHA-1");
+ else if (!strcmp(sig.hash_algo, "sha256"))
+ store_signature(sig_sha256, &sig, "SHA-256");
+ else
+ die(_("parse_one_signature() returned unknown hash algo"));
+}
+
static void parse_new_commit(const char *arg)
{
static struct strbuf msg = STRBUF_INIT;
@@ -2817,19 +2846,32 @@ static void parse_new_commit(const char *arg)
if (!committer)
die("Expected committer but didn't get one");
- /* Process signatures (up to 2: one "sha1" and one "sha256") */
while (skip_prefix(command_buf.buf, "gpgsig ", &v)) {
- struct signature_data sig = { NULL, NULL, STRBUF_INIT };
-
- parse_one_signature(&sig, v);
+ switch (signed_commit_mode) {
+
+ /* First, modes that don't need the signature to be parsed */
+ case SIGN_ABORT:
+ die("encountered signed commit; use "
+ "--signed-commits=<mode> to handle it");
+ case SIGN_WARN_STRIP:
+ warning(_("stripping a commit signature"));
+ /* fallthru */
+ case SIGN_STRIP:
+ discard_one_signature();
+ break;
- if (!strcmp(sig.hash_algo, "sha1"))
- store_signature(&sig_sha1, &sig, "SHA-1");
- else if (!strcmp(sig.hash_algo, "sha256"))
- store_signature(&sig_sha256, &sig, "SHA-256");
- else
- BUG("parse_one_signature() returned unknown hash algo");
+ /* Second, modes that parse the signature */
+ case SIGN_WARN_VERBATIM:
+ warning(_("importing a commit signature verbatim"));
+ /* fallthru */
+ case SIGN_VERBATIM:
+ import_one_signature(&sig_sha1, &sig_sha256, v);
+ break;
+ /* Third, BUG */
+ default:
+ BUG("invalid signed_commit_mode value %d", signed_commit_mode);
+ }
read_next_command();
}
@@ -3501,6 +3543,9 @@ static int parse_one_option(const char *option)
option_active_branches(option);
} else if (skip_prefix(option, "export-pack-edges=", &option)) {
option_export_pack_edges(option);
+ } else if (skip_prefix(option, "signed-commits=", &option)) {
+ if (parse_sign_mode(option, &signed_commit_mode))
+ usagef(_("unknown --signed-commits mode '%s'"), option);
} else if (!strcmp(option, "quiet")) {
show_stats = 0;
quiet = 1;
diff --git a/builtin/fsck.c b/builtin/fsck.c
index d2eb9d4fbe..8ee95e0d67 100644
--- a/builtin/fsck.c
+++ b/builtin/fsck.c
@@ -867,19 +867,20 @@ static int mark_packed_for_connectivity(const struct object_id *oid,
static int check_pack_rev_indexes(struct repository *r, int show_progress)
{
+ struct packfile_store *packs = r->objects->packfiles;
struct progress *progress = NULL;
uint32_t pack_count = 0;
int res = 0;
if (show_progress) {
- for (struct packed_git *p = get_all_packs(r); p; p = p->next)
+ for (struct packed_git *p = packfile_store_get_all_packs(packs); p; p = p->next)
pack_count++;
progress = start_delayed_progress(the_repository,
"Verifying reverse pack-indexes", pack_count);
pack_count = 0;
}
- for (struct packed_git *p = get_all_packs(r); p; p = p->next) {
+ for (struct packed_git *p = packfile_store_get_all_packs(packs); p; p = p->next) {
int load_error = load_pack_revindex_from_disk(p);
if (load_error < 0) {
@@ -999,6 +1000,8 @@ int cmd_fsck(int argc,
for_each_packed_object(the_repository,
mark_packed_for_connectivity, NULL, 0);
} else {
+ struct packfile_store *packs = the_repository->objects->packfiles;
+
odb_prepare_alternates(the_repository->objects);
for (source = the_repository->objects->sources; source; source = source->next)
fsck_source(source);
@@ -1009,7 +1012,7 @@ int cmd_fsck(int argc,
struct progress *progress = NULL;
if (show_progress) {
- for (p = get_all_packs(the_repository); p;
+ for (p = packfile_store_get_all_packs(packs); p;
p = p->next) {
if (open_pack_index(p))
continue;
@@ -1019,7 +1022,7 @@ int cmd_fsck(int argc,
progress = start_progress(the_repository,
_("Checking objects"), total);
}
- for (p = get_all_packs(the_repository); p;
+ for (p = packfile_store_get_all_packs(packs); p;
p = p->next) {
/* verify gives error messages itself */
if (verify_pack(the_repository,
diff --git a/builtin/gc.c b/builtin/gc.c
index 03ae4926b2..e19e13d978 100644
--- a/builtin/gc.c
+++ b/builtin/gc.c
@@ -487,9 +487,10 @@ static int too_many_loose_objects(struct gc_config *cfg)
static struct packed_git *find_base_packs(struct string_list *packs,
unsigned long limit)
{
+ struct packfile_store *packfiles = the_repository->objects->packfiles;
struct packed_git *p, *base = NULL;
- for (p = get_all_packs(the_repository); p; p = p->next) {
+ for (p = packfile_store_get_all_packs(packfiles); p; p = p->next) {
if (!p->pack_local || p->is_cruft)
continue;
if (limit) {
@@ -508,13 +509,14 @@ static struct packed_git *find_base_packs(struct string_list *packs,
static int too_many_packs(struct gc_config *cfg)
{
+ struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
int cnt;
if (cfg->gc_auto_pack_limit <= 0)
return 0;
- for (cnt = 0, p = get_all_packs(the_repository); p; p = p->next) {
+ for (cnt = 0, p = packfile_store_get_all_packs(packs); p; p = p->next) {
if (!p->pack_local)
continue;
if (p->pack_keep)
@@ -1042,7 +1044,7 @@ int cmd_gc(int argc,
die(FAILED_RUN, "rerere");
report_garbage = report_pack_garbage;
- reprepare_packed_git(the_repository);
+ odb_reprepare(the_repository->objects);
if (pack_garbage.nr > 0) {
close_object_store(the_repository->objects);
clean_pack_garbage();
@@ -1423,7 +1425,7 @@ static int incremental_repack_auto_condition(struct gc_config *cfg UNUSED)
if (incremental_repack_auto_limit < 0)
return 1;
- for (p = get_packed_git(the_repository);
+ for (p = packfile_store_get_packs(the_repository->objects->packfiles);
count < incremental_repack_auto_limit && p;
p = p->next) {
if (!p->multi_pack_index)
@@ -1491,8 +1493,8 @@ static off_t get_auto_pack_size(void)
struct packed_git *p;
struct repository *r = the_repository;
- reprepare_packed_git(r);
- for (p = get_all_packs(r); p; p = p->next) {
+ odb_reprepare(r->objects);
+ for (p = packfile_store_get_all_packs(r->objects->packfiles); p; p = p->next) {
if (p->pack_size > max_size) {
second_largest_size = max_size;
max_size = p->pack_size;
diff --git a/builtin/grep.c b/builtin/grep.c
index 1d97eb2a2a..13841fbf00 100644
--- a/builtin/grep.c
+++ b/builtin/grep.c
@@ -1214,7 +1214,7 @@ int cmd_grep(int argc,
if (recurse_submodules)
repo_read_gitmodules(the_repository, 1);
if (startup_info->have_repository)
- (void)get_packed_git(the_repository);
+ (void)packfile_store_get_packs(the_repository->objects->packfiles);
start_threads(&opt);
} else {
diff --git a/builtin/index-pack.c b/builtin/index-pack.c
index f91c301bba..2b78ba7fe4 100644
--- a/builtin/index-pack.c
+++ b/builtin/index-pack.c
@@ -1640,13 +1640,9 @@ static void final(const char *final_pack_name, const char *curr_pack_name,
rename_tmp_packfile(&final_index_name, curr_index_name, &index_name,
hash, "idx", 1);
- if (do_fsck_object) {
- struct packed_git *p;
- p = add_packed_git(the_repository, final_index_name,
- strlen(final_index_name), 0);
- if (p)
- install_packed_git(the_repository, p);
- }
+ if (do_fsck_object)
+ packfile_store_load_pack(the_repository->objects->packfiles,
+ final_index_name, 0);
if (!from_stdin) {
printf("%s\n", hash_to_hex(hash));
diff --git a/builtin/log.c b/builtin/log.c
index 5f552d14c0..8aa1777940 100644
--- a/builtin/log.c
+++ b/builtin/log.c
@@ -543,7 +543,13 @@ int cmd_whatchanged(int argc,
cmd_log_init(argc, argv, prefix, &rev, &opt, &cfg);
if (!cfg.i_still_use_this)
- you_still_use_that("git whatchanged");
+ you_still_use_that("git whatchanged",
+ _("\n"
+ "hint: You can replace 'git whatchanged <opts>' with:\n"
+ "hint:\tgit log <opts> --raw --no-merges\n"
+ "hint: Or make an alias:\n"
+ "hint:\tgit config set --global alias.whatchanged 'log --raw --no-merges'\n"
+ "\n"));
if (!rev.diffopt.output_format)
rev.diffopt.output_format = DIFF_FORMAT_RAW;
diff --git a/builtin/pack-objects.c b/builtin/pack-objects.c
index 5856b5f6bf..5bdc44fb2d 100644
--- a/builtin/pack-objects.c
+++ b/builtin/pack-objects.c
@@ -1748,12 +1748,12 @@ static int want_object_in_pack_mtime(const struct object_id *oid,
}
}
- list_for_each(pos, get_packed_git_mru(the_repository)) {
+ list_for_each(pos, packfile_store_get_packs_mru(the_repository->objects->packfiles)) {
struct packed_git *p = list_entry(pos, struct packed_git, mru);
want = want_object_in_pack_one(p, oid, exclude, found_pack, found_offset, found_mtime);
if (!exclude && want > 0)
list_move(&p->mru,
- get_packed_git_mru(the_repository));
+ packfile_store_get_packs_mru(the_repository->objects->packfiles));
if (want != -1)
return want;
}
@@ -3774,7 +3774,7 @@ static void show_object_pack_hint(struct object *object, const char *name,
enum stdin_packs_mode mode = *(enum stdin_packs_mode *)data;
if (mode == STDIN_PACKS_MODE_FOLLOW) {
if (object->type == OBJ_BLOB &&
- !has_object(the_repository, &object->oid, 0))
+ !odb_has_object(the_repository->objects, &object->oid, 0))
return;
add_object_entry(&object->oid, object->type, name, 0);
} else {
@@ -3831,6 +3831,7 @@ static int pack_mtime_cmp(const void *_a, const void *_b)
static void read_packs_list_from_stdin(struct rev_info *revs)
{
+ struct packfile_store *packs = the_repository->objects->packfiles;
struct strbuf buf = STRBUF_INIT;
struct string_list include_packs = STRING_LIST_INIT_DUP;
struct string_list exclude_packs = STRING_LIST_INIT_DUP;
@@ -3855,7 +3856,7 @@ static void read_packs_list_from_stdin(struct rev_info *revs)
string_list_sort(&exclude_packs);
string_list_remove_duplicates(&exclude_packs, 0);
- for (p = get_all_packs(the_repository); p; p = p->next) {
+ for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
const char *pack_name = pack_basename(p);
if ((item = string_list_lookup(&include_packs, pack_name)))
@@ -4076,6 +4077,7 @@ static void enumerate_cruft_objects(void)
static void enumerate_and_traverse_cruft_objects(struct string_list *fresh_packs)
{
+ struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
struct rev_info revs;
int ret;
@@ -4105,7 +4107,7 @@ static void enumerate_and_traverse_cruft_objects(struct string_list *fresh_packs
* Re-mark only the fresh packs as kept so that objects in
* unknown packs do not halt the reachability traversal early.
*/
- for (p = get_all_packs(the_repository); p; p = p->next)
+ for (p = packfile_store_get_all_packs(packs); p; p = p->next)
p->pack_keep_in_core = 0;
mark_pack_kept_in_core(fresh_packs, 1);
@@ -4122,6 +4124,7 @@ static void enumerate_and_traverse_cruft_objects(struct string_list *fresh_packs
static void read_cruft_objects(void)
{
+ struct packfile_store *packs = the_repository->objects->packfiles;
struct strbuf buf = STRBUF_INIT;
struct string_list discard_packs = STRING_LIST_INIT_DUP;
struct string_list fresh_packs = STRING_LIST_INIT_DUP;
@@ -4142,7 +4145,7 @@ static void read_cruft_objects(void)
string_list_sort(&discard_packs);
string_list_sort(&fresh_packs);
- for (p = get_all_packs(the_repository); p; p = p->next) {
+ for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
const char *pack_name = pack_basename(p);
struct string_list_item *item;
@@ -4390,11 +4393,12 @@ static void add_unreachable_loose_objects(struct rev_info *revs)
static int has_sha1_pack_kept_or_nonlocal(const struct object_id *oid)
{
+ struct packfile_store *packs = the_repository->objects->packfiles;
static struct packed_git *last_found = (void *)1;
struct packed_git *p;
p = (last_found != (void *)1) ? last_found :
- get_all_packs(the_repository);
+ packfile_store_get_all_packs(packs);
while (p) {
if ((!p->pack_local || p->pack_keep ||
@@ -4404,7 +4408,7 @@ static int has_sha1_pack_kept_or_nonlocal(const struct object_id *oid)
return 1;
}
if (p == last_found)
- p = get_all_packs(the_repository);
+ p = packfile_store_get_all_packs(packs);
else
p = p->next;
if (p == last_found)
@@ -4436,12 +4440,13 @@ static int loosened_object_can_be_discarded(const struct object_id *oid,
static void loosen_unused_packed_objects(void)
{
+ struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
uint32_t i;
uint32_t loosened_objects_nr = 0;
struct object_id oid;
- for (p = get_all_packs(the_repository); p; p = p->next) {
+ for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
if (!p->pack_local || p->pack_keep || p->pack_keep_in_core)
continue;
@@ -4591,8 +4596,8 @@ static int add_objects_by_path(const char *path,
/* Skip objects that do not exist locally. */
if ((exclude_promisor_objects || arg_missing_action != MA_ERROR) &&
- oid_object_info_extended(the_repository, oid, &oi,
- OBJECT_INFO_FOR_PREFETCH) < 0)
+ odb_read_object_info_extended(the_repository->objects, oid, &oi,
+ OBJECT_INFO_FOR_PREFETCH) < 0)
continue;
exclude = is_oid_uninteresting(the_repository, oid);
@@ -4742,12 +4747,13 @@ static void get_object_list(struct rev_info *revs, struct strvec *argv)
static void add_extra_kept_packs(const struct string_list *names)
{
+ struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
if (!names->nr)
return;
- for (p = get_all_packs(the_repository); p; p = p->next) {
+ for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
const char *name = basename(p->pack_name);
int i;
@@ -5185,8 +5191,10 @@ int cmd_pack_objects(int argc,
add_extra_kept_packs(&keep_pack_list);
if (ignore_packed_keep_on_disk) {
+ struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
- for (p = get_all_packs(the_repository); p; p = p->next)
+
+ for (p = packfile_store_get_all_packs(packs); p; p = p->next)
if (p->pack_local && p->pack_keep)
break;
if (!p) /* no keep-able packs found */
@@ -5198,8 +5206,10 @@ int cmd_pack_objects(int argc,
* want to unset "local" based on looking at packs, as
* it also covers non-local objects
*/
+ struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
- for (p = get_all_packs(the_repository); p; p = p->next) {
+
+ for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
if (!p->pack_local) {
have_non_local_packs = 1;
break;
diff --git a/builtin/pack-redundant.c b/builtin/pack-redundant.c
index fe81c293e3..80743d8806 100644
--- a/builtin/pack-redundant.c
+++ b/builtin/pack-redundant.c
@@ -566,7 +566,8 @@ static struct pack_list * add_pack(struct packed_git *p)
static struct pack_list * add_pack_file(const char *filename)
{
- struct packed_git *p = get_all_packs(the_repository);
+ struct packfile_store *packs = the_repository->objects->packfiles;
+ struct packed_git *p = packfile_store_get_all_packs(packs);
if (strlen(filename) < 40)
die("Bad pack filename: %s", filename);
@@ -581,7 +582,8 @@ static struct pack_list * add_pack_file(const char *filename)
static void load_all(void)
{
- struct packed_git *p = get_all_packs(the_repository);
+ struct packfile_store *packs = the_repository->objects->packfiles;
+ struct packed_git *p = packfile_store_get_all_packs(packs);
while (p) {
add_pack(p);
@@ -626,7 +628,7 @@ int cmd_pack_redundant(int argc, const char **argv, const char *prefix UNUSED, s
}
if (!i_still_use_this)
- you_still_use_that("git pack-redundant");
+ you_still_use_that("git pack-redundant", NULL);
if (load_all_packs)
load_all();
diff --git a/builtin/pack-refs.c b/builtin/pack-refs.c
index 5e28d0f9e8..3446b84cda 100644
--- a/builtin/pack-refs.c
+++ b/builtin/pack-refs.c
@@ -1,60 +1,16 @@
#include "builtin.h"
-#include "config.h"
-#include "environment.h"
#include "gettext.h"
-#include "parse-options.h"
-#include "refs.h"
-#include "revision.h"
-
-static char const * const pack_refs_usage[] = {
- N_("git pack-refs [--all] [--no-prune] [--auto] [--include <pattern>] [--exclude <pattern>]"),
- NULL
-};
+#include "pack-refs.h"
int cmd_pack_refs(int argc,
const char **argv,
const char *prefix,
struct repository *repo)
{
- struct ref_exclusions excludes = REF_EXCLUSIONS_INIT;
- struct string_list included_refs = STRING_LIST_INIT_NODUP;
- struct pack_refs_opts pack_refs_opts = {
- .exclusions = &excludes,
- .includes = &included_refs,
- .flags = PACK_REFS_PRUNE,
- };
- struct string_list option_excluded_refs = STRING_LIST_INIT_NODUP;
- struct string_list_item *item;
- int pack_all = 0;
- int ret;
-
- struct option opts[] = {
- OPT_BOOL(0, "all", &pack_all, N_("pack everything")),
- OPT_BIT(0, "prune", &pack_refs_opts.flags, N_("prune loose refs (default)"), PACK_REFS_PRUNE),
- OPT_BIT(0, "auto", &pack_refs_opts.flags, N_("auto-pack refs as needed"), PACK_REFS_AUTO),
- OPT_STRING_LIST(0, "include", pack_refs_opts.includes, N_("pattern"),
- N_("references to include")),
- OPT_STRING_LIST(0, "exclude", &option_excluded_refs, N_("pattern"),
- N_("references to exclude")),
- OPT_END(),
+ static char const * const pack_refs_usage[] = {
+ N_("git pack-refs " PACK_REFS_OPTS),
+ NULL
};
- repo_config(repo, git_default_config, NULL);
- if (parse_options(argc, argv, prefix, opts, pack_refs_usage, 0))
- usage_with_options(pack_refs_usage, opts);
-
- for_each_string_list_item(item, &option_excluded_refs)
- add_ref_exclusion(pack_refs_opts.exclusions, item->string);
-
- if (pack_all)
- string_list_append(pack_refs_opts.includes, "*");
-
- if (!pack_refs_opts.includes->nr)
- string_list_append(pack_refs_opts.includes, "refs/tags/*");
-
- ret = refs_pack_refs(get_main_ref_store(repo), &pack_refs_opts);
- clear_ref_exclusions(&excludes);
- string_list_clear(&included_refs, 0);
- string_list_clear(&option_excluded_refs, 0);
- return ret;
+ return pack_refs_core(argc, argv, prefix, repo, pack_refs_usage);
}
diff --git a/builtin/receive-pack.c b/builtin/receive-pack.c
index 1113137a6f..c9288a9c7e 100644
--- a/builtin/receive-pack.c
+++ b/builtin/receive-pack.c
@@ -2389,7 +2389,7 @@ static const char *unpack(int err_fd, struct shallow_info *si)
status = finish_command(&child);
if (status)
return "index-pack abnormal exit";
- reprepare_packed_git(the_repository);
+ odb_reprepare(the_repository->objects);
}
return NULL;
}
diff --git a/builtin/reflog.c b/builtin/reflog.c
index c8f6b93d60..dcbfe89339 100644
--- a/builtin/reflog.c
+++ b/builtin/reflog.c
@@ -418,6 +418,8 @@ static int cmd_reflog_write(int argc, const char **argv, const char *prefix,
const char *ref, *message;
int ret;
+ repo_config(repo, git_ident_config, NULL);
+
argc = parse_options(argc, argv, prefix, options, reflog_write_usage, 0);
if (argc != 4)
usage_with_options(reflog_write_usage, options);
diff --git a/builtin/refs.c b/builtin/refs.c
index 91548783b7..3064f888b2 100644
--- a/builtin/refs.c
+++ b/builtin/refs.c
@@ -2,6 +2,7 @@
#include "builtin.h"
#include "config.h"
#include "fsck.h"
+#include "pack-refs.h"
#include "parse-options.h"
#include "refs.h"
#include "strbuf.h"
@@ -18,6 +19,9 @@
#define REFS_EXISTS_USAGE \
N_("git refs exists <ref>")
+#define REFS_OPTIMIZE_USAGE \
+ N_("git refs optimize " PACK_REFS_OPTS)
+
static int cmd_refs_migrate(int argc, const char **argv, const char *prefix,
struct repository *repo UNUSED)
{
@@ -159,6 +163,17 @@ out:
return ret;
}
+static int cmd_refs_optimize(int argc, const char **argv, const char *prefix,
+ struct repository *repo)
+{
+ static char const * const refs_optimize_usage[] = {
+ REFS_OPTIMIZE_USAGE,
+ NULL
+ };
+
+ return pack_refs_core(argc, argv, prefix, repo, refs_optimize_usage);
+}
+
int cmd_refs(int argc,
const char **argv,
const char *prefix,
@@ -169,6 +184,7 @@ int cmd_refs(int argc,
REFS_VERIFY_USAGE,
"git refs list " COMMON_USAGE_FOR_EACH_REF,
REFS_EXISTS_USAGE,
+ REFS_OPTIMIZE_USAGE,
NULL,
};
parse_opt_subcommand_fn *fn = NULL;
@@ -177,6 +193,7 @@ int cmd_refs(int argc,
OPT_SUBCOMMAND("verify", &fn, cmd_refs_verify),
OPT_SUBCOMMAND("list", &fn, cmd_refs_list),
OPT_SUBCOMMAND("exists", &fn, cmd_refs_exists),
+ OPT_SUBCOMMAND("optimize", &fn, cmd_refs_optimize),
OPT_END(),
};
diff --git a/builtin/repack.c b/builtin/repack.c
index c490a51e91..e8730808c5 100644
--- a/builtin/repack.c
+++ b/builtin/repack.c
@@ -265,10 +265,11 @@ static void existing_packs_release(struct existing_packs *existing)
static void collect_pack_filenames(struct existing_packs *existing,
const struct string_list *extra_keep)
{
+ struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
struct strbuf buf = STRBUF_INIT;
- for (p = get_all_packs(the_repository); p; p = p->next) {
+ for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
int i;
const char *base;
@@ -497,10 +498,11 @@ static void init_pack_geometry(struct pack_geometry *geometry,
struct existing_packs *existing,
const struct pack_objects_args *args)
{
+ struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
struct strbuf buf = STRBUF_INIT;
- for (p = get_all_packs(the_repository); p; p = p->next) {
+ for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
if (args->local && !p->pack_local)
/*
* When asked to only repack local packfiles we skip
@@ -1137,11 +1139,12 @@ static int write_filtered_pack(const struct pack_objects_args *args,
static void combine_small_cruft_packs(FILE *in, size_t combine_cruft_below_size,
struct existing_packs *existing)
{
+ struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
struct strbuf buf = STRBUF_INIT;
size_t i;
- for (p = get_all_packs(the_repository); p; p = p->next) {
+ for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
if (!(p->is_cruft && p->pack_local))
continue;
@@ -1685,7 +1688,7 @@ int cmd_repack(int argc,
goto cleanup;
}
- reprepare_packed_git(the_repository);
+ odb_reprepare(the_repository->objects);
if (delete_redundant) {
int opts = 0;
diff --git a/builtin/unpack-objects.c b/builtin/unpack-objects.c
index 28124b324d..ef79e43715 100644
--- a/builtin/unpack-objects.c
+++ b/builtin/unpack-objects.c
@@ -2,7 +2,6 @@
#define DISABLE_SIGN_COMPARE_WARNINGS
#include "builtin.h"
-#include "bulk-checkin.h"
#include "config.h"
#include "environment.h"
#include "gettext.h"
@@ -600,12 +599,12 @@ static void unpack_all(void)
progress = start_progress(the_repository,
_("Unpacking objects"), nr_objects);
CALLOC_ARRAY(obj_list, nr_objects);
- transaction = begin_odb_transaction(the_repository->objects);
+ transaction = odb_transaction_begin(the_repository->objects);
for (i = 0; i < nr_objects; i++) {
unpack_one(i);
display_progress(progress, i + 1);
}
- end_odb_transaction(transaction);
+ odb_transaction_commit(transaction);
stop_progress(&progress);
if (delta_list)
diff --git a/builtin/update-index.c b/builtin/update-index.c
index 2ba2d29c95..8a5907767b 100644
--- a/builtin/update-index.c
+++ b/builtin/update-index.c
@@ -8,7 +8,6 @@
#define DISABLE_SIGN_COMPARE_WARNINGS
#include "builtin.h"
-#include "bulk-checkin.h"
#include "config.h"
#include "environment.h"
#include "gettext.h"
@@ -19,6 +18,7 @@
#include "cache-tree.h"
#include "tree-walk.h"
#include "object-file.h"
+#include "odb.h"
#include "refs.h"
#include "resolve-undo.h"
#include "parse-options.h"
@@ -70,14 +70,6 @@ static void report(const char *fmt, ...)
if (!verbose)
return;
- /*
- * It is possible, though unlikely, that a caller could use the verbose
- * output to synchronize with addition of objects to the object
- * database. The current implementation of ODB transactions leaves
- * objects invisible while a transaction is active, so flush the
- * transaction here before reporting a change made by update-index.
- */
- flush_odb_transaction(the_repository->objects->transaction);
va_start(vp, fmt);
vprintf(fmt, vp);
putchar('\n');
@@ -1131,7 +1123,7 @@ int cmd_update_index(int argc,
* Allow the object layer to optimize adding multiple objects in
* a batch.
*/
- transaction = begin_odb_transaction(the_repository->objects);
+ transaction = odb_transaction_begin(the_repository->objects);
while (ctx.argc) {
if (parseopt_state != PARSE_OPT_DONE)
parseopt_state = parse_options_step(&ctx, options,
@@ -1150,6 +1142,21 @@ int cmd_update_index(int argc,
const char *path = ctx.argv[0];
char *p;
+ /*
+ * It is possible, though unlikely, that a caller could
+ * use the verbose output to synchronize with addition
+ * of objects to the object database. The current
+ * implementation of ODB transactions leaves objects
+ * invisible while a transaction is active, so end the
+ * transaction here early before processing the next
+ * update. All further updates are performed outside of
+ * a transaction.
+ */
+ if (transaction && verbose) {
+ odb_transaction_commit(transaction);
+ transaction = NULL;
+ }
+
setup_work_tree();
p = prefix_path(prefix, prefix_length, path);
update_one(p);
@@ -1214,7 +1221,7 @@ int cmd_update_index(int argc,
/*
* By now we have added all of the new objects
*/
- end_odb_transaction(transaction);
+ odb_transaction_commit(transaction);
if (split_index > 0) {
if (repo_config_get_split_index(the_repository) == 0)
diff --git a/bulk-checkin.c b/bulk-checkin.c
deleted file mode 100644
index 124c493067..0000000000
--- a/bulk-checkin.c
+++ /dev/null
@@ -1,403 +0,0 @@
-/*
- * Copyright (c) 2011, Google Inc.
- */
-
-#define USE_THE_REPOSITORY_VARIABLE
-
-#include "git-compat-util.h"
-#include "bulk-checkin.h"
-#include "environment.h"
-#include "gettext.h"
-#include "hex.h"
-#include "lockfile.h"
-#include "repository.h"
-#include "csum-file.h"
-#include "pack.h"
-#include "strbuf.h"
-#include "tmp-objdir.h"
-#include "packfile.h"
-#include "object-file.h"
-#include "odb.h"
-
-struct bulk_checkin_packfile {
- char *pack_tmp_name;
- struct hashfile *f;
- off_t offset;
- struct pack_idx_option pack_idx_opts;
-
- struct pack_idx_entry **written;
- uint32_t alloc_written;
- uint32_t nr_written;
-};
-
-struct odb_transaction {
- struct object_database *odb;
-
- int nesting;
- struct tmp_objdir *objdir;
- struct bulk_checkin_packfile packfile;
-};
-
-static void finish_tmp_packfile(struct odb_transaction *transaction,
- struct strbuf *basename,
- unsigned char hash[])
-{
- struct bulk_checkin_packfile *state = &transaction->packfile;
- struct repository *repo = transaction->odb->repo;
- char *idx_tmp_name = NULL;
-
- stage_tmp_packfiles(repo, basename, state->pack_tmp_name,
- state->written, state->nr_written, NULL,
- &state->pack_idx_opts, hash, &idx_tmp_name);
- rename_tmp_packfile_idx(repo, basename, &idx_tmp_name);
-
- free(idx_tmp_name);
-}
-
-static void flush_bulk_checkin_packfile(struct odb_transaction *transaction)
-{
- struct bulk_checkin_packfile *state = &transaction->packfile;
- struct repository *repo = transaction->odb->repo;
- unsigned char hash[GIT_MAX_RAWSZ];
- struct strbuf packname = STRBUF_INIT;
-
- if (!state->f)
- return;
-
- if (state->nr_written == 0) {
- close(state->f->fd);
- free_hashfile(state->f);
- unlink(state->pack_tmp_name);
- goto clear_exit;
- } else if (state->nr_written == 1) {
- finalize_hashfile(state->f, hash, FSYNC_COMPONENT_PACK,
- CSUM_HASH_IN_STREAM | CSUM_FSYNC | CSUM_CLOSE);
- } else {
- int fd = finalize_hashfile(state->f, hash, FSYNC_COMPONENT_PACK, 0);
- fixup_pack_header_footer(repo->hash_algo, fd, hash, state->pack_tmp_name,
- state->nr_written, hash,
- state->offset);
- close(fd);
- }
-
- strbuf_addf(&packname, "%s/pack/pack-%s.",
- repo_get_object_directory(transaction->odb->repo),
- hash_to_hex_algop(hash, repo->hash_algo));
-
- finish_tmp_packfile(transaction, &packname, hash);
- for (uint32_t i = 0; i < state->nr_written; i++)
- free(state->written[i]);
-
-clear_exit:
- free(state->pack_tmp_name);
- free(state->written);
- memset(state, 0, sizeof(*state));
-
- strbuf_release(&packname);
- /* Make objects we just wrote available to ourselves */
- reprepare_packed_git(repo);
-}
-
-/*
- * Cleanup after batch-mode fsync_object_files.
- */
-static void flush_batch_fsync(struct odb_transaction *transaction)
-{
- struct strbuf temp_path = STRBUF_INIT;
- struct tempfile *temp;
-
- if (!transaction->objdir)
- return;
-
- /*
- * Issue a full hardware flush against a temporary file to ensure
- * that all objects are durable before any renames occur. The code in
- * fsync_loose_object_bulk_checkin has already issued a writeout
- * request, but it has not flushed any writeback cache in the storage
- * hardware or any filesystem logs. This fsync call acts as a barrier
- * to ensure that the data in each new object file is durable before
- * the final name is visible.
- */
- strbuf_addf(&temp_path, "%s/bulk_fsync_XXXXXX",
- repo_get_object_directory(transaction->odb->repo));
- temp = xmks_tempfile(temp_path.buf);
- fsync_or_die(get_tempfile_fd(temp), get_tempfile_path(temp));
- delete_tempfile(&temp);
- strbuf_release(&temp_path);
-
- /*
- * Make the object files visible in the primary ODB after their data is
- * fully durable.
- */
- tmp_objdir_migrate(transaction->objdir);
- transaction->objdir = NULL;
-}
-
-static int already_written(struct odb_transaction *transaction,
- struct object_id *oid)
-{
- /* The object may already exist in the repository */
- if (odb_has_object(transaction->odb, oid,
- HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
- return 1;
-
- /* Might want to keep the list sorted */
- for (uint32_t i = 0; i < transaction->packfile.nr_written; i++)
- if (oideq(&transaction->packfile.written[i]->oid, oid))
- return 1;
-
- /* This is a new object we need to keep */
- return 0;
-}
-
-/*
- * Read the contents from fd for size bytes, streaming it to the
- * packfile in state while updating the hash in ctx. Signal a failure
- * by returning a negative value when the resulting pack would exceed
- * the pack size limit and this is not the first object in the pack,
- * so that the caller can discard what we wrote from the current pack
- * by truncating it and opening a new one. The caller will then call
- * us again after rewinding the input fd.
- *
- * The already_hashed_to pointer is kept untouched by the caller to
- * make sure we do not hash the same byte when we are called
- * again. This way, the caller does not have to checkpoint its hash
- * status before calling us just in case we ask it to call us again
- * with a new pack.
- */
-static int stream_blob_to_pack(struct bulk_checkin_packfile *state,
- struct git_hash_ctx *ctx, off_t *already_hashed_to,
- int fd, size_t size, const char *path,
- unsigned flags)
-{
- git_zstream s;
- unsigned char ibuf[16384];
- unsigned char obuf[16384];
- unsigned hdrlen;
- int status = Z_OK;
- int write_object = (flags & INDEX_WRITE_OBJECT);
- off_t offset = 0;
-
- git_deflate_init(&s, pack_compression_level);
-
- hdrlen = encode_in_pack_object_header(obuf, sizeof(obuf), OBJ_BLOB, size);
- s.next_out = obuf + hdrlen;
- s.avail_out = sizeof(obuf) - hdrlen;
-
- while (status != Z_STREAM_END) {
- if (size && !s.avail_in) {
- size_t rsize = size < sizeof(ibuf) ? size : sizeof(ibuf);
- ssize_t read_result = read_in_full(fd, ibuf, rsize);
- if (read_result < 0)
- die_errno("failed to read from '%s'", path);
- if ((size_t)read_result != rsize)
- die("failed to read %u bytes from '%s'",
- (unsigned)rsize, path);
- offset += rsize;
- if (*already_hashed_to < offset) {
- size_t hsize = offset - *already_hashed_to;
- if (rsize < hsize)
- hsize = rsize;
- if (hsize)
- git_hash_update(ctx, ibuf, hsize);
- *already_hashed_to = offset;
- }
- s.next_in = ibuf;
- s.avail_in = rsize;
- size -= rsize;
- }
-
- status = git_deflate(&s, size ? 0 : Z_FINISH);
-
- if (!s.avail_out || status == Z_STREAM_END) {
- if (write_object) {
- size_t written = s.next_out - obuf;
-
- /* would we bust the size limit? */
- if (state->nr_written &&
- pack_size_limit_cfg &&
- pack_size_limit_cfg < state->offset + written) {
- git_deflate_abort(&s);
- return -1;
- }
-
- hashwrite(state->f, obuf, written);
- state->offset += written;
- }
- s.next_out = obuf;
- s.avail_out = sizeof(obuf);
- }
-
- switch (status) {
- case Z_OK:
- case Z_BUF_ERROR:
- case Z_STREAM_END:
- continue;
- default:
- die("unexpected deflate failure: %d", status);
- }
- }
- git_deflate_end(&s);
- return 0;
-}
-
-/* Lazily create backing packfile for the state */
-static void prepare_to_stream(struct odb_transaction *transaction,
- unsigned flags)
-{
- struct bulk_checkin_packfile *state = &transaction->packfile;
- if (!(flags & INDEX_WRITE_OBJECT) || state->f)
- return;
-
- state->f = create_tmp_packfile(transaction->odb->repo,
- &state->pack_tmp_name);
- reset_pack_idx_option(&state->pack_idx_opts);
-
- /* Pretend we are going to write only one object */
- state->offset = write_pack_header(state->f, 1);
- if (!state->offset)
- die_errno("unable to write pack header");
-}
-
-int index_blob_bulk_checkin(struct odb_transaction *transaction,
- struct object_id *result_oid, int fd, size_t size,
- const char *path, unsigned flags)
-{
- struct bulk_checkin_packfile *state = &transaction->packfile;
- off_t seekback, already_hashed_to;
- struct git_hash_ctx ctx;
- unsigned char obuf[16384];
- unsigned header_len;
- struct hashfile_checkpoint checkpoint;
- struct pack_idx_entry *idx = NULL;
-
- seekback = lseek(fd, 0, SEEK_CUR);
- if (seekback == (off_t) -1)
- return error("cannot find the current offset");
-
- header_len = format_object_header((char *)obuf, sizeof(obuf),
- OBJ_BLOB, size);
- transaction->odb->repo->hash_algo->init_fn(&ctx);
- git_hash_update(&ctx, obuf, header_len);
-
- /* Note: idx is non-NULL when we are writing */
- if ((flags & INDEX_WRITE_OBJECT) != 0) {
- CALLOC_ARRAY(idx, 1);
-
- prepare_to_stream(transaction, flags);
- hashfile_checkpoint_init(state->f, &checkpoint);
- }
-
- already_hashed_to = 0;
-
- while (1) {
- prepare_to_stream(transaction, flags);
- if (idx) {
- hashfile_checkpoint(state->f, &checkpoint);
- idx->offset = state->offset;
- crc32_begin(state->f);
- }
- if (!stream_blob_to_pack(state, &ctx, &already_hashed_to,
- fd, size, path, flags))
- break;
- /*
- * Writing this object to the current pack will make
- * it too big; we need to truncate it, start a new
- * pack, and write into it.
- */
- if (!idx)
- BUG("should not happen");
- hashfile_truncate(state->f, &checkpoint);
- state->offset = checkpoint.offset;
- flush_bulk_checkin_packfile(transaction);
- if (lseek(fd, seekback, SEEK_SET) == (off_t) -1)
- return error("cannot seek back");
- }
- git_hash_final_oid(result_oid, &ctx);
- if (!idx)
- return 0;
-
- idx->crc32 = crc32_end(state->f);
- if (already_written(transaction, result_oid)) {
- hashfile_truncate(state->f, &checkpoint);
- state->offset = checkpoint.offset;
- free(idx);
- } else {
- oidcpy(&idx->oid, result_oid);
- ALLOC_GROW(state->written,
- state->nr_written + 1,
- state->alloc_written);
- state->written[state->nr_written++] = idx;
- }
- return 0;
-}
-
-void prepare_loose_object_bulk_checkin(struct odb_transaction *transaction)
-{
- /*
- * We lazily create the temporary object directory
- * the first time an object might be added, since
- * callers may not know whether any objects will be
- * added at the time they call begin_odb_transaction.
- */
- if (!transaction || transaction->objdir)
- return;
-
- transaction->objdir = tmp_objdir_create(transaction->odb->repo, "bulk-fsync");
- if (transaction->objdir)
- tmp_objdir_replace_primary_odb(transaction->objdir, 0);
-}
-
-void fsync_loose_object_bulk_checkin(struct odb_transaction *transaction,
- int fd, const char *filename)
-{
- /*
- * If we have an active ODB transaction, we issue a call that
- * cleans the filesystem page cache but avoids a hardware flush
- * command. Later on we will issue a single hardware flush
- * before renaming the objects to their final names as part of
- * flush_batch_fsync.
- */
- if (!transaction || !transaction->objdir ||
- git_fsync(fd, FSYNC_WRITEOUT_ONLY) < 0) {
- if (errno == ENOSYS)
- warning(_("core.fsyncMethod = batch is unsupported on this platform"));
- fsync_or_die(fd, filename);
- }
-}
-
-struct odb_transaction *begin_odb_transaction(struct object_database *odb)
-{
- if (!odb->transaction) {
- CALLOC_ARRAY(odb->transaction, 1);
- odb->transaction->odb = odb;
- }
-
- odb->transaction->nesting += 1;
-
- return odb->transaction;
-}
-
-void flush_odb_transaction(struct odb_transaction *transaction)
-{
- if (!transaction)
- return;
-
- flush_batch_fsync(transaction);
- flush_bulk_checkin_packfile(transaction);
-}
-
-void end_odb_transaction(struct odb_transaction *transaction)
-{
- if (!transaction || transaction->nesting == 0)
- BUG("Unbalanced ODB transaction nesting");
-
- transaction->nesting -= 1;
-
- if (transaction->nesting)
- return;
-
- flush_odb_transaction(transaction);
- transaction->odb->transaction = NULL;
- free(transaction);
-}
diff --git a/bulk-checkin.h b/bulk-checkin.h
deleted file mode 100644
index ac8887f476..0000000000
--- a/bulk-checkin.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (c) 2011, Google Inc.
- */
-#ifndef BULK_CHECKIN_H
-#define BULK_CHECKIN_H
-
-#include "object.h"
-#include "odb.h"
-
-struct odb_transaction;
-
-void prepare_loose_object_bulk_checkin(struct odb_transaction *transaction);
-void fsync_loose_object_bulk_checkin(struct odb_transaction *transaction,
- int fd, const char *filename);
-
-/*
- * This writes the specified object to a packfile. Objects written here
- * during the same transaction are written to the same packfile. The
- * packfile is not flushed until the transaction is flushed. The caller
- * is expected to ensure a valid transaction is setup for objects to be
- * recorded to.
- *
- * This also bypasses the usual "convert-to-git" dance, and that is on
- * purpose. We could write a streaming version of the converting
- * functions and insert that before feeding the data to fast-import
- * (or equivalent in-core API described above). However, that is
- * somewhat complicated, as we do not know the size of the filter
- * result, which we need to know beforehand when writing a git object.
- * Since the primary motivation for trying to stream from the working
- * tree file and to avoid mmaping it in core is to deal with large
- * binary blobs, they generally do not want to get any conversion, and
- * callers should avoid this code path when filters are requested.
- */
-int index_blob_bulk_checkin(struct odb_transaction *transaction,
- struct object_id *oid, int fd, size_t size,
- const char *path, unsigned flags);
-
-/*
- * Tell the object database to optimize for adding
- * multiple objects. end_odb_transaction must be called
- * to make new objects visible. Transactions can be nested,
- * and objects are only visible after the outermost transaction
- * is complete or the transaction is flushed.
- */
-struct odb_transaction *begin_odb_transaction(struct object_database *odb);
-
-/*
- * Make any objects that are currently part of a pending object
- * database transaction visible. It is valid to call this function
- * even if no transaction is active.
- */
-void flush_odb_transaction(struct odb_transaction *transaction);
-
-/*
- * Tell the object database to make any objects from the
- * current transaction visible if this is the final nested
- * transaction.
- */
-void end_odb_transaction(struct odb_transaction *transaction);
-
-#endif
diff --git a/cache-tree.c b/cache-tree.c
index d225554eed..2aba47060e 100644
--- a/cache-tree.c
+++ b/cache-tree.c
@@ -8,7 +8,6 @@
#include "tree.h"
#include "tree-walk.h"
#include "cache-tree.h"
-#include "bulk-checkin.h"
#include "object-file.h"
#include "odb.h"
#include "read-cache-ll.h"
@@ -490,10 +489,10 @@ int cache_tree_update(struct index_state *istate, int flags)
trace_performance_enter();
trace2_region_enter("cache_tree", "update", the_repository);
- transaction = begin_odb_transaction(the_repository->objects);
+ transaction = odb_transaction_begin(the_repository->objects);
i = update_one(istate->cache_tree, istate->cache, istate->cache_nr,
"", 0, &skip, flags);
- end_odb_transaction(transaction);
+ odb_transaction_commit(transaction);
trace2_region_leave("cache_tree", "update", the_repository);
trace_performance_leave("cache_tree_update");
if (i < 0)
diff --git a/ci/install-dependencies.sh b/ci/install-dependencies.sh
index d061a47293..0d3aa496fc 100755
--- a/ci/install-dependencies.sh
+++ b/ci/install-dependencies.sh
@@ -30,8 +30,12 @@ alpine-*)
bash cvs gnupg perl-cgi perl-dbd-sqlite perl-io-tty >/dev/null
;;
fedora-*|almalinux-*)
+ case "$jobname" in
+ *-meson)
+ MESON_DEPS="meson ninja";;
+ esac
dnf -yq update >/dev/null &&
- dnf -yq install shadow-utils sudo make gcc findutils diffutils perl python3 gawk gettext zlib-devel expat-devel openssl-devel curl-devel pcre2-devel >/dev/null
+ dnf -yq install shadow-utils sudo make pkg-config gcc findutils diffutils perl python3 gawk gettext zlib-devel expat-devel openssl-devel curl-devel pcre2-devel $MESON_DEPS cargo >/dev/null
;;
ubuntu-*|i386/ubuntu-*|debian-*)
# Required so that apt doesn't wait for user input on certain packages.
@@ -58,7 +62,7 @@ ubuntu-*|i386/ubuntu-*|debian-*)
make libssl-dev libcurl4-openssl-dev libexpat-dev wget sudo default-jre \
tcl tk gettext zlib1g-dev perl-modules liberror-perl libauthen-sasl-perl \
libemail-valid-perl libio-pty-perl libio-socket-ssl-perl libnet-smtp-ssl-perl libdbd-sqlite3-perl libcgi-pm-perl \
- libsecret-1-dev libpcre2-dev meson ninja-build pkg-config \
+ libsecret-1-dev libpcre2-dev meson ninja-build pkg-config cargo \
${CC_PACKAGE:-${CC:-gcc}} $PYTHON_PACKAGE
case "$distro" in
diff --git a/ci/run-build-and-tests.sh b/ci/run-build-and-tests.sh
index a21834043f..8bda62b921 100755
--- a/ci/run-build-and-tests.sh
+++ b/ci/run-build-and-tests.sh
@@ -5,11 +5,12 @@
. ${0%/*}/lib.sh
-run_tests=t
-
case "$jobname" in
-linux-breaking-changes)
+fedora-breaking-changes-musl|linux-breaking-changes)
export WITH_BREAKING_CHANGES=YesPlease
+ export WITH_RUST=YesPlease
+ MESONFLAGS="$MESONFLAGS -Dbreaking_changes=true"
+ MESONFLAGS="$MESONFLAGS -Drust=enabled"
;;
linux-TEST-vars)
export OPENSSL_SHA1_UNSAFE=YesPlease
@@ -35,12 +36,6 @@ linux-sha256)
linux-reftable|linux-reftable-leaks|osx-reftable)
export GIT_TEST_DEFAULT_REF_FORMAT=reftable
;;
-pedantic)
- # Don't run the tests; we only care about whether Git can be
- # built.
- export DEVOPTS=pedantic
- run_tests=
- ;;
esac
case "$jobname" in
@@ -53,21 +48,15 @@ case "$jobname" in
-Dtest_output_directory="${TEST_OUTPUT_DIRECTORY:-$(pwd)/t}" \
$MESONFLAGS
group "Build" meson compile -C build --
- if test -n "$run_tests"
- then
- group "Run tests" meson test -C build --print-errorlogs --test-args="$GIT_TEST_OPTS" || (
- ./t/aggregate-results.sh "${TEST_OUTPUT_DIRECTORY:-t}/test-results"
- handle_failed_tests
- )
- fi
+ group "Run tests" meson test -C build --print-errorlogs --test-args="$GIT_TEST_OPTS" || (
+ ./t/aggregate-results.sh "${TEST_OUTPUT_DIRECTORY:-t}/test-results"
+ handle_failed_tests
+ )
;;
*)
group Build make
- if test -n "$run_tests"
- then
- group "Run tests" make test ||
- handle_failed_tests
- fi
+ group "Run tests" make test ||
+ handle_failed_tests
;;
esac
diff --git a/ci/test-documentation.sh b/ci/test-documentation.sh
index 49f87f50fd..5e4fd8fbd7 100755
--- a/ci/test-documentation.sh
+++ b/ci/test-documentation.sh
@@ -48,13 +48,13 @@ check_unignored_build_artifacts
# Build docs with Meson and AsciiDoc
meson setup build-asciidoc -Ddocs=html,man -Ddocs_backend=asciidoc
-meson compile -C build-asciidoc
+meson compile -C build-asciidoc docs
check_docs build-asciidoc AsciiDoc
rm -rf build-asciidoc
# Build docs with Meson and AsciiDoctor
meson setup build-asciidoctor -Ddocs=html,man -Ddocs_backend=asciidoctor
-meson compile -C build-asciidoctor
+meson compile -C build-asciidoctor docs
check_docs build-asciidoctor Asciidoctor
rm -rf build-asciidoctor
diff --git a/connected.c b/connected.c
index 18c13245d8..b288a18b17 100644
--- a/connected.c
+++ b/connected.c
@@ -72,11 +72,12 @@ int check_connected(oid_iterate_fn fn, void *cb_data,
* Before checking for promisor packs, be sure we have the
* latest pack-files loaded into memory.
*/
- reprepare_packed_git(the_repository);
+ odb_reprepare(the_repository->objects);
do {
+ struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
- for (p = get_all_packs(the_repository); p; p = p->next) {
+ for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
if (!p->pack_promisor)
continue;
if (find_pack_entry_one(oid, p))
diff --git a/contrib/contacts/meson.build b/contrib/contacts/meson.build
index 73d82dfe52..c8fdb35ed9 100644
--- a/contrib/contacts/meson.build
+++ b/contrib/contacts/meson.build
@@ -20,7 +20,7 @@ if get_option('docs').contains('man')
output: 'git-contacts.xml',
)
- custom_target(
+ doc_targets += custom_target(
command: [
xmlto,
'-m', '@INPUT@',
@@ -39,7 +39,7 @@ if get_option('docs').contains('man')
endif
if get_option('docs').contains('html')
- custom_target(
+ doc_targets += custom_target(
command: asciidoc_common_options + [
'--backend=' + asciidoc_html,
'--doctype=manpage',
diff --git a/contrib/subtree/meson.build b/contrib/subtree/meson.build
index 98dd8e0c8e..46cdbcc30c 100644
--- a/contrib/subtree/meson.build
+++ b/contrib/subtree/meson.build
@@ -38,7 +38,7 @@ if get_option('docs').contains('man')
output: 'git-subtree.xml',
)
- custom_target(
+ doc_targets += custom_target(
command: [
xmlto,
'-m', '@INPUT@',
@@ -57,7 +57,7 @@ if get_option('docs').contains('man')
endif
if get_option('docs').contains('html')
- custom_target(
+ doc_targets += custom_target(
command: asciidoc_common_options + [
'--backend=' + asciidoc_html,
'--doctype=manpage',
diff --git a/dir.c b/dir.c
index 71108ac79b..0a67a99cb3 100644
--- a/dir.c
+++ b/dir.c
@@ -3579,7 +3579,8 @@ static void write_one_dir(struct untracked_cache_dir *untracked,
struct stat_data stat_data;
struct strbuf *out = &wd->out;
unsigned char intbuf[16];
- unsigned int intlen, value;
+ unsigned int value;
+ uint8_t intlen;
int i = wd->index++;
/*
@@ -3632,7 +3633,7 @@ void write_untracked_extension(struct strbuf *out, struct untracked_cache *untra
struct ondisk_untracked_cache *ouc;
struct write_data wd;
unsigned char varbuf[16];
- int varint_len;
+ uint8_t varint_len;
const unsigned hashsz = the_hash_algo->rawsz;
CALLOC_ARRAY(ouc, 1);
@@ -3738,7 +3739,7 @@ static int read_one_dir(struct untracked_cache_dir **untracked_,
struct untracked_cache_dir ud, *untracked;
const unsigned char *data = rd->data, *end = rd->end;
const unsigned char *eos;
- unsigned int value;
+ uint64_t value;
int i;
memset(&ud, 0, sizeof(ud));
@@ -3830,7 +3831,8 @@ struct untracked_cache *read_untracked_extension(const void *data, unsigned long
struct read_data rd;
const unsigned char *next = data, *end = (const unsigned char *)data + sz;
const char *ident;
- int ident_len;
+ uint64_t ident_len;
+ uint64_t varint_len;
ssize_t len;
const char *exclude_per_dir;
const unsigned hashsz = the_hash_algo->rawsz;
@@ -3867,8 +3869,8 @@ struct untracked_cache *read_untracked_extension(const void *data, unsigned long
if (next >= end)
goto done2;
- len = decode_varint(&next);
- if (next > end || len == 0)
+ varint_len = decode_varint(&next);
+ if (next > end || varint_len == 0)
goto done2;
rd.valid = ewah_new();
@@ -3877,9 +3879,9 @@ struct untracked_cache *read_untracked_extension(const void *data, unsigned long
rd.data = next;
rd.end = end;
rd.index = 0;
- ALLOC_ARRAY(rd.ucd, len);
+ ALLOC_ARRAY(rd.ucd, varint_len);
- if (read_one_dir(&uc->root, &rd) || rd.index != len)
+ if (read_one_dir(&uc->root, &rd) || rd.index != varint_len)
goto done;
next = rd.data;
diff --git a/fetch-pack.c b/fetch-pack.c
index 6ed5662951..fe7a84bf2f 100644
--- a/fetch-pack.c
+++ b/fetch-pack.c
@@ -1983,7 +1983,7 @@ static void update_shallow(struct fetch_pack_args *args,
* remote is shallow, but this is a clone, there are
* no objects in repo to worry about. Accept any
* shallow points that exist in the pack (iow in repo
- * after get_pack() and reprepare_packed_git())
+ * after get_pack() and odb_reprepare())
*/
struct oid_array extra = OID_ARRAY_INIT;
struct object_id *oid = si->shallow->oid;
@@ -2108,7 +2108,7 @@ struct ref *fetch_pack(struct fetch_pack_args *args,
ref_cpy = do_fetch_pack(args, fd, ref, sought, nr_sought,
&si, pack_lockfiles);
}
- reprepare_packed_git(the_repository);
+ odb_reprepare(the_repository->objects);
if (!args->cloning && args->deepen) {
struct check_connected_options opt = CHECK_CONNECTED_INIT;
diff --git a/git-compat-util.h b/git-compat-util.h
index 9408f463e3..398e0fac4f 100644
--- a/git-compat-util.h
+++ b/git-compat-util.h
@@ -460,7 +460,7 @@ void warning_errno(const char *err, ...) __attribute__((format (printf, 1, 2)));
void show_usage_if_asked(int ac, const char **av, const char *err);
-NORETURN void you_still_use_that(const char *command_name);
+NORETURN void you_still_use_that(const char *command_name, const char *hint);
#ifndef NO_OPENSSL
#ifdef APPLE_COMMON_CRYPTO
diff --git a/git.c b/git.c
index d020eef021..c5fad56813 100644
--- a/git.c
+++ b/git.c
@@ -28,6 +28,7 @@
#define NEED_WORK_TREE (1<<3)
#define DELAY_PAGER_CONFIG (1<<4)
#define NO_PARSEOPT (1<<5) /* parse-options is not used */
+#define DEPRECATED (1<<6)
struct cmd_struct {
const char *cmd;
@@ -51,7 +52,9 @@ const char git_more_info_string[] =
static int use_pager = -1;
-static void list_builtins(struct string_list *list, unsigned int exclude_option);
+static void list_builtins(struct string_list *list,
+ unsigned int include_option,
+ unsigned int exclude_option);
static void exclude_helpers_from_list(struct string_list *list)
{
@@ -88,7 +91,7 @@ static int list_cmds(const char *spec)
int len = sep - spec;
if (match_token(spec, len, "builtins"))
- list_builtins(&list, 0);
+ list_builtins(&list, 0, 0);
else if (match_token(spec, len, "main"))
list_all_main_cmds(&list);
else if (match_token(spec, len, "others"))
@@ -99,6 +102,8 @@ static int list_cmds(const char *spec)
list_aliases(&list);
else if (match_token(spec, len, "config"))
list_cmds_by_config(&list);
+ else if (match_token(spec, len, "deprecated"))
+ list_builtins(&list, DEPRECATED, 0);
else if (len > 5 && !strncmp(spec, "list-", 5)) {
struct strbuf sb = STRBUF_INIT;
@@ -322,7 +327,7 @@ static int handle_options(const char ***argv, int *argc, int *envchanged)
if (!strcmp(cmd, "parseopt")) {
struct string_list list = STRING_LIST_INIT_DUP;
- list_builtins(&list, NO_PARSEOPT);
+ list_builtins(&list, 0, NO_PARSEOPT);
for (size_t i = 0; i < list.nr; i++)
printf("%s ", list.items[i].string);
string_list_clear(&list, 0);
@@ -360,7 +365,7 @@ static int handle_options(const char ***argv, int *argc, int *envchanged)
return (*argv) - orig_argv;
}
-static int handle_alias(struct strvec *args)
+static int handle_alias(struct strvec *args, struct string_list *expanded_aliases)
{
int envchanged = 0, ret = 0, saved_errno = errno;
int count, option_count;
@@ -371,6 +376,8 @@ static int handle_alias(struct strvec *args)
alias_command = args->v[0];
alias_string = alias_lookup(alias_command);
if (alias_string) {
+ struct string_list_item *seen;
+
if (args->nr == 2 && !strcmp(args->v[1], "-h"))
fprintf_ln(stderr, _("'%s' is aliased to '%s'"),
alias_command, alias_string);
@@ -418,6 +425,25 @@ static int handle_alias(struct strvec *args)
if (!strcmp(alias_command, new_argv[0]))
die(_("recursive alias: %s"), alias_command);
+ string_list_append(expanded_aliases, alias_command);
+ seen = unsorted_string_list_lookup(expanded_aliases,
+ new_argv[0]);
+
+ if (seen) {
+ struct strbuf sb = STRBUF_INIT;
+ for (size_t i = 0; i < expanded_aliases->nr; i++) {
+ struct string_list_item *item = &expanded_aliases->items[i];
+
+ strbuf_addf(&sb, "\n %s", item->string);
+ if (item == seen)
+ strbuf_addstr(&sb, " <==");
+ else if (i == expanded_aliases->nr - 1)
+ strbuf_addstr(&sb, " ==>");
+ }
+ die(_("alias loop detected: expansion of '%s' does"
+ " not terminate:%s"), expanded_aliases->items[0].string, sb.buf);
+ }
+
trace_argv_printf(new_argv,
"trace: alias expansion: %s =>",
alias_command);
@@ -591,7 +617,7 @@ static struct cmd_struct commands[] = {
{ "notes", cmd_notes, RUN_SETUP },
{ "pack-objects", cmd_pack_objects, RUN_SETUP },
#ifndef WITH_BREAKING_CHANGES
- { "pack-redundant", cmd_pack_redundant, RUN_SETUP | NO_PARSEOPT },
+ { "pack-redundant", cmd_pack_redundant, RUN_SETUP | NO_PARSEOPT | DEPRECATED },
#endif
{ "pack-refs", cmd_pack_refs, RUN_SETUP },
{ "patch-id", cmd_patch_id, RUN_SETUP_GENTLY | NO_PARSEOPT },
@@ -649,7 +675,7 @@ static struct cmd_struct commands[] = {
{ "verify-tag", cmd_verify_tag, RUN_SETUP },
{ "version", cmd_version },
#ifndef WITH_BREAKING_CHANGES
- { "whatchanged", cmd_whatchanged, RUN_SETUP },
+ { "whatchanged", cmd_whatchanged, RUN_SETUP | DEPRECATED },
#endif
{ "worktree", cmd_worktree, RUN_SETUP },
{ "write-tree", cmd_write_tree, RUN_SETUP },
@@ -670,11 +696,16 @@ int is_builtin(const char *s)
return !!get_builtin(s);
}
-static void list_builtins(struct string_list *out, unsigned int exclude_option)
+static void list_builtins(struct string_list *out,
+ unsigned int include_option,
+ unsigned int exclude_option)
{
+ if (include_option && exclude_option)
+ BUG("'include_option' and 'exclude_option' are mutually exclusive");
for (size_t i = 0; i < ARRAY_SIZE(commands); i++) {
- if (exclude_option &&
- (commands[i].option & exclude_option))
+ if (include_option && !(commands[i].option & include_option))
+ continue;
+ if (exclude_option && (commands[i].option & exclude_option))
continue;
string_list_append(out, commands[i].cmd);
}
@@ -795,14 +826,30 @@ static void execv_dashed_external(const char **argv)
exit(128);
}
+static int is_deprecated_command(const char *cmd)
+{
+ struct cmd_struct *builtin = get_builtin(cmd);
+ return builtin && (builtin->option & DEPRECATED);
+}
+
static int run_argv(struct strvec *args)
{
int done_alias = 0;
- struct string_list cmd_list = STRING_LIST_INIT_DUP;
- struct string_list_item *seen;
+ struct string_list expanded_aliases = STRING_LIST_INIT_DUP;
while (1) {
/*
+ * Allow deprecated commands to be overridden by aliases. This
+ * creates a seamless path forward for people who want to keep
+ * using the name after it is gone, but want to skip the
+ * deprecation complaint in the meantime.
+ */
+ if (is_deprecated_command(args->v[0]) &&
+ handle_alias(args, &expanded_aliases)) {
+ done_alias = 1;
+ continue;
+ }
+ /*
* If we tried alias and futzed with our environment,
* it no longer is safe to invoke builtins directly in
* general. We have to spawn them as dashed externals.
@@ -851,35 +898,17 @@ static int run_argv(struct strvec *args)
/* .. then try the external ones */
execv_dashed_external(args->v);
- seen = unsorted_string_list_lookup(&cmd_list, args->v[0]);
- if (seen) {
- struct strbuf sb = STRBUF_INIT;
- for (size_t i = 0; i < cmd_list.nr; i++) {
- struct string_list_item *item = &cmd_list.items[i];
-
- strbuf_addf(&sb, "\n %s", item->string);
- if (item == seen)
- strbuf_addstr(&sb, " <==");
- else if (i == cmd_list.nr - 1)
- strbuf_addstr(&sb, " ==>");
- }
- die(_("alias loop detected: expansion of '%s' does"
- " not terminate:%s"), cmd_list.items[0].string, sb.buf);
- }
-
- string_list_append(&cmd_list, args->v[0]);
-
/*
* It could be an alias -- this works around the insanity
* of overriding "git log" with "git show" by having
* alias.log = show
*/
- if (!handle_alias(args))
+ if (!handle_alias(args, &expanded_aliases))
break;
done_alias = 1;
}
- string_list_clear(&cmd_list, 0);
+ string_list_clear(&expanded_aliases, 0);
return done_alias;
}
diff --git a/gitk-git/gitk b/gitk-git/gitk
index 6e4d71d585..c02db0194d 100755
--- a/gitk-git/gitk
+++ b/gitk-git/gitk
@@ -2215,6 +2215,7 @@ proc setoptions {} {
}
proc setttkstyle {} {
+ global theme
eval font configure TkDefaultFont [fontflags mainfont]
eval font configure TkTextFont [fontflags textfont]
eval font configure TkHeadingFont [fontflags mainfont]
@@ -2224,6 +2225,10 @@ proc setttkstyle {} {
eval font configure TkIconFont [fontflags uifont]
eval font configure TkMenuFont [fontflags uifont]
eval font configure TkSmallCaptionFont [fontflags uifont]
+
+ if {[catch {ttk::style theme use $theme} err]} {
+ set theme [ttk::style theme use]
+ }
}
# Make a menu and submenus.
@@ -2376,7 +2381,6 @@ proc makewindow {} {
global highlight_files gdttype
global searchstring sstring
global bgcolor fgcolor bglist fglist diffcolors diffbgcolors selectbgcolor
- global uifgcolor uifgdisabledcolor
global filesepbgcolor filesepfgcolor
global mergecolors foundbgcolor currentsearchhitbgcolor
global headctxmenu progresscanv progressitem progresscoords statusw
@@ -2495,40 +2499,18 @@ proc makewindow {} {
set sha1entry .tf.bar.sha1
set entries $sha1entry
set sha1but .tf.bar.sha1label
- button $sha1but -text "[mc "Commit ID:"] " -state disabled -relief flat \
+ ttk::button $sha1but -text "[mc "Commit ID:"] " -state disabled \
-command gotocommit -width 8
- $sha1but conf -disabledforeground [$sha1but cget -foreground]
pack .tf.bar.sha1label -side left
ttk::entry $sha1entry -width $hashlength -font textfont -textvariable sha1string
trace add variable sha1string write sha1change
pack $sha1entry -side left -pady 2
- set bm_left_data {
- #define left_width 16
- #define left_height 16
- static unsigned char left_bits[] = {
- 0x00, 0x00, 0xc0, 0x01, 0xe0, 0x00, 0x70, 0x00, 0x38, 0x00, 0x1c, 0x00,
- 0x0e, 0x00, 0xff, 0x7f, 0xff, 0x7f, 0xff, 0x7f, 0x0e, 0x00, 0x1c, 0x00,
- 0x38, 0x00, 0x70, 0x00, 0xe0, 0x00, 0xc0, 0x01};
- }
- set bm_right_data {
- #define right_width 16
- #define right_height 16
- static unsigned char right_bits[] = {
- 0x00, 0x00, 0xc0, 0x01, 0x80, 0x03, 0x00, 0x07, 0x00, 0x0e, 0x00, 0x1c,
- 0x00, 0x38, 0xff, 0x7f, 0xff, 0x7f, 0xff, 0x7f, 0x00, 0x38, 0x00, 0x1c,
- 0x00, 0x0e, 0x00, 0x07, 0x80, 0x03, 0xc0, 0x01};
- }
- image create bitmap bm-left -data $bm_left_data -foreground $uifgcolor
- image create bitmap bm-left-gray -data $bm_left_data -foreground $uifgdisabledcolor
- image create bitmap bm-right -data $bm_right_data -foreground $uifgcolor
- image create bitmap bm-right-gray -data $bm_right_data -foreground $uifgdisabledcolor
-
- ttk::button .tf.bar.leftbut -command goback -state disabled -width 26
- .tf.bar.leftbut configure -image [list bm-left disabled bm-left-gray]
+ ttk::button .tf.bar.leftbut -command goback -state disabled
+ .tf.bar.leftbut configure -text \u2190 -width 3
pack .tf.bar.leftbut -side left -fill y
- ttk::button .tf.bar.rightbut -command goforw -state disabled -width 26
- .tf.bar.rightbut configure -image [list bm-right disabled bm-right-gray]
+ ttk::button .tf.bar.rightbut -command goforw -state disabled
+ .tf.bar.rightbut configure -text \u2192 -width 3
pack .tf.bar.rightbut -side left -fill y
ttk::label .tf.bar.rowlabel -text [mc "Row"]
@@ -2559,31 +2541,8 @@ proc makewindow {} {
# build up the bottom bar of upper window
ttk::label .tf.lbar.flabel -text "[mc "Find"] "
- set bm_down_data {
- #define down_width 16
- #define down_height 16
- static unsigned char down_bits[] = {
- 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01,
- 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01,
- 0x87, 0xe1, 0x8e, 0x71, 0x9c, 0x39, 0xb8, 0x1d,
- 0xf0, 0x0f, 0xe0, 0x07, 0xc0, 0x03, 0x80, 0x01};
- }
- image create bitmap bm-down -data $bm_down_data -foreground $uifgcolor
- ttk::button .tf.lbar.fnext -width 26 -command {dofind 1 1}
- .tf.lbar.fnext configure -image bm-down
-
- set bm_up_data {
- #define up_width 16
- #define up_height 16
- static unsigned char up_bits[] = {
- 0x80, 0x01, 0xc0, 0x03, 0xe0, 0x07, 0xf0, 0x0f,
- 0xb8, 0x1d, 0x9c, 0x39, 0x8e, 0x71, 0x87, 0xe1,
- 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01,
- 0x80, 0x01, 0x80, 0x01, 0x80, 0x01, 0x80, 0x01};
- }
- image create bitmap bm-up -data $bm_up_data -foreground $uifgcolor
- ttk::button .tf.lbar.fprev -width 26 -command {dofind -1 1}
- .tf.lbar.fprev configure -image bm-up
+ ttk::button .tf.lbar.fnext -command {dofind 1 1} -text \u2193 -width 3
+ ttk::button .tf.lbar.fprev -command {dofind -1 1} -text \u2191 -width 3
ttk::label .tf.lbar.flab2 -text " [mc "commit"] "
@@ -2656,7 +2615,7 @@ proc makewindow {} {
ttk::label .bleft.mid.labeldiffcontext -text " [mc "Lines of context"]: "
pack .bleft.mid.diff .bleft.mid.old .bleft.mid.new -side left -ipadx $wgap
- spinbox .bleft.mid.diffcontext -width 5 \
+ ttk::spinbox .bleft.mid.diffcontext -width 5 \
-from 0 -increment 1 -to 10000000 \
-validate all -validatecommand "diffcontextvalidate %P" \
-textvariable diffcontextstring
@@ -8910,9 +8869,9 @@ proc sha1change {n1 n2 op} {
}
if {[$sha1but cget -state] == $state} return
if {$state == "normal"} {
- $sha1but conf -state normal -relief raised -text "[mc "Goto:"] "
+ $sha1but conf -state normal -text "[mc "Goto:"] "
} else {
- $sha1but conf -state disabled -relief flat -text "[mc "Commit ID:"] "
+ $sha1but conf -state disabled -text "[mc "Commit ID:"] "
}
}
@@ -10294,7 +10253,9 @@ proc refill_reflist {} {
if {![string match "remotes/*" $n] && [string match $reflistfilter $n]} {
if {[commitinview $headids($n) $curview]} {
lappend localrefs [list $n H]
- if {[info exists upstreamofref($n)] && [commitinview $headids($upstreamofref($n)) $curview]} {
+ if {[info exists upstreamofref($n)] && \
+ [info exists headids($upstreamofref($n))] && \
+ [commitinview $headids($upstreamofref($n)) $curview]} {
lappend trackedremoterefs [list $upstreamofref($n) R]
}
} else {
@@ -11608,9 +11569,10 @@ proc mkfontdisp {font top which} {
set fontpref($font) [set $font]
ttk::button $top.${font}but -text $which \
-command [list choosefont $font $which]
- ttk::label $top.$font -relief flat -font $font \
- -text $fontattr($font,family) -justify left
+ ttk::label $top.$font -font $font \
+ -text $fontattr($font,family)
grid x $top.${font}but $top.$font -sticky w
+ grid configure $top.$font -sticky ew
}
proc centertext {w} {
@@ -11690,48 +11652,52 @@ proc prefspage_general {notebook} {
ttk::label $page.ldisp -text [mc "Commit list display options"] -font mainfontbold
grid $page.ldisp - -sticky w -pady 10
+
ttk::label $page.spacer -text " "
ttk::label $page.maxwidthl -text [mc "Maximum graph width (lines)"]
- spinbox $page.maxwidth -from 0 -to 100 -width 4 -textvariable maxwidth
+ ttk::spinbox $page.maxwidth -from 0 -to 100 -width 4 -textvariable maxwidth
grid $page.spacer $page.maxwidthl $page.maxwidth -sticky w
#xgettext:no-tcl-format
ttk::label $page.maxpctl -text [mc "Maximum graph width (% of pane)"]
- spinbox $page.maxpct -from 1 -to 100 -width 4 -textvariable maxgraphpct
+ ttk::spinbox $page.maxpct -from 1 -to 100 -width 4 -textvariable maxgraphpct
grid x $page.maxpctl $page.maxpct -sticky w
+
ttk::checkbutton $page.showlocal -text [mc "Show local changes"] \
-variable showlocalchanges
grid x $page.showlocal -sticky w
+
ttk::checkbutton $page.hideremotes -text [mc "Hide remote refs"] \
-variable hideremotes
grid x $page.hideremotes -sticky w
ttk::entry $page.refstohide -textvariable refstohide
- ttk::frame $page.refstohidef
- ttk::label $page.refstohidef.l -text [mc "Refs to hide (space-separated globs)" ]
- pack $page.refstohidef.l -side left
- pack configure $page.refstohidef.l -padx 10
- grid x $page.refstohidef $page.refstohide -sticky ew
+ ttk::label $page.refstohidel -text [mc "Refs to hide (space-separated globs)"]
+ grid x $page.refstohidel $page.refstohide -sticky ew
+ grid configure $page.refstohide -padx {0 5}
ttk::checkbutton $page.autocopy -text [mc "Copy commit ID to clipboard"] \
-variable autocopy
grid x $page.autocopy -sticky w
+
if {[haveselectionclipboard]} {
ttk::checkbutton $page.autoselect -text [mc "Copy commit ID to X11 selection"] \
-variable autoselect
grid x $page.autoselect -sticky w
}
- spinbox $page.autosellen -from 1 -to $hashlength -width 4 -textvariable autosellen
+ ttk::spinbox $page.autosellen -from 1 -to $hashlength -width 4 -textvariable autosellen
ttk::label $page.autosellenl -text [mc "Length of commit ID to copy"]
grid x $page.autosellenl $page.autosellen -sticky w
+
ttk::label $page.kscroll1 -text [mc "Wheel scrolling multiplier"]
- spinbox $page.kscroll -from 1 -to 20 -width 4 -textvariable kscroll
+ ttk::spinbox $page.kscroll -from 1 -to 20 -width 4 -textvariable kscroll
grid x $page.kscroll1 $page.kscroll -sticky w
ttk::label $page.ddisp -text [mc "Diff display options"] -font mainfontbold
grid $page.ddisp - -sticky w -pady 10
+
ttk::label $page.tabstopl -text [mc "Tab spacing"]
- spinbox $page.tabstop -from 1 -to 20 -width 4 -textvariable tabstop
+ ttk::spinbox $page.tabstop -from 1 -to 20 -width 4 -textvariable tabstop
grid x $page.tabstopl $page.tabstop -sticky w
ttk::label $page.wrapcommentl -text [mc "Wrap comment text"]
@@ -11745,12 +11711,15 @@ proc prefspage_general {notebook} {
ttk::checkbutton $page.ntag -text [mc "Display nearby tags/heads"] \
-variable showneartags
grid x $page.ntag -sticky w
+
ttk::label $page.maxrefsl -text [mc "Maximum # tags/heads to show"]
- spinbox $page.maxrefs -from 1 -to 1000 -width 4 -textvariable maxrefs
+ ttk::spinbox $page.maxrefs -from 1 -to 1000 -width 4 -textvariable maxrefs
grid x $page.maxrefsl $page.maxrefs -sticky w
+
ttk::checkbutton $page.ldiff -text [mc "Limit diffs to listed paths"] \
-variable limitdiffs
grid x $page.ldiff -sticky w
+
ttk::checkbutton $page.lattr -text [mc "Support per-file encodings"] \
-variable perfile_attrs
grid x $page.lattr -sticky w
@@ -11759,76 +11728,109 @@ proc prefspage_general {notebook} {
ttk::frame $page.extdifff
ttk::label $page.extdifff.l -text [mc "External diff tool" ]
ttk::button $page.extdifff.b -text [mc "Choose..."] -command choose_extdiff
- pack $page.extdifff.l $page.extdifff.b -side left
- pack configure $page.extdifff.l -padx 10
+ pack $page.extdifff.l -side left
+ pack $page.extdifff.b -side right -padx {0 5}
grid x $page.extdifff $page.extdifft -sticky ew
+ grid configure $page.extdifft -padx {0 5}
ttk::entry $page.webbrowser -textvariable web_browser
- ttk::frame $page.webbrowserf
- ttk::label $page.webbrowserf.l -text [mc "Web browser" ]
- pack $page.webbrowserf.l -side left
- pack configure $page.webbrowserf.l -padx 10
- grid x $page.webbrowserf $page.webbrowser -sticky ew
+ ttk::label $page.webbrowserl -text [mc "Web browser" ]
+ grid x $page.webbrowserl $page.webbrowser -sticky ew
+ grid configure $page.webbrowser -padx {0 5}
+
+ grid columnconfigure $page 2 -weight 1
return $page
}
proc prefspage_colors {notebook} {
- global uicolor bgcolor fgcolor ctext diffcolors selectbgcolor markbgcolor
+ global bgcolor fgcolor ctext diffcolors selectbgcolor markbgcolor
global diffbgcolors
+ global themeloader
set page [create_prefs_page $notebook.colors]
+ ttk::label $page.themesel -font mainfontbold \
+ -text [mc "Themes - change requires restart"]
+ grid $page.themesel - -sticky w -pady 10
+
+ ttk::label $page.themelabel -text [mc "Theme to use after restart"]
+ makedroplist $page.theme theme {*}[lsort [ttk::style theme names]]
+ grid x $page.themelabel $page.theme -sticky w
+
+ ttk::entry $page.tloadvar -textvariable themeloader
+ ttk::frame $page.tloadframe
+ ttk::label $page.tloadframe.l -text [mc "Theme definition file"]
+ ttk::button $page.tloadframe.b -text [mc "Choose..."] \
+ -command [list choose_themeloader $page]
+ pack $page.tloadframe.l -side left
+ pack $page.tloadframe.b -side right -padx {0 5}
+ pack configure $page.tloadframe.l -padx 0
+ grid x $page.tloadframe $page.tloadvar -sticky ew
+ grid configure $page.tloadvar -padx {0 5}
+
+ ttk::label $page.themelabel2 -text \
+ [mc "The theme definition file may affect all themes."]
+ ttk::button $page.themebut2 -text [mc "Apply theme"] \
+ -command [list updatetheme $page]
+ grid x $page.themebut2 $page.themelabel2 -sticky w
+
ttk::label $page.cdisp -text [mc "Colors: press to choose"] -font mainfontbold
grid $page.cdisp - -sticky w -pady 10
- label $page.ui -padx 40 -relief sunk -background $uicolor
- ttk::button $page.uibut -text [mc "Interface"] \
- -command [list choosecolor uicolor {} $page [mc "interface"]]
- grid x $page.uibut $page.ui -sticky w
label $page.bg -padx 40 -relief sunk -background $bgcolor
ttk::button $page.bgbut -text [mc "Background"] \
-command [list choosecolor bgcolor {} $page [mc "background"]]
grid x $page.bgbut $page.bg -sticky w
+
label $page.fg -padx 40 -relief sunk -background $fgcolor
ttk::button $page.fgbut -text [mc "Foreground"] \
-command [list choosecolor fgcolor {} $page [mc "foreground"]]
grid x $page.fgbut $page.fg -sticky w
+
label $page.diffold -padx 40 -relief sunk -background [lindex $diffcolors 0]
ttk::button $page.diffoldbut -text [mc "Diff: old lines"] \
-command [list choosecolor diffcolors 0 $page [mc "diff old lines"]]
grid x $page.diffoldbut $page.diffold -sticky w
+
label $page.diffoldbg -padx 40 -relief sunk -background [lindex $diffbgcolors 0]
ttk::button $page.diffoldbgbut -text [mc "Diff: old lines bg"] \
-command [list choosecolor diffbgcolors 0 $page [mc "diff old lines bg"]]
grid x $page.diffoldbgbut $page.diffoldbg -sticky w
+
label $page.diffnew -padx 40 -relief sunk -background [lindex $diffcolors 1]
ttk::button $page.diffnewbut -text [mc "Diff: new lines"] \
-command [list choosecolor diffcolors 1 $page [mc "diff new lines"]]
grid x $page.diffnewbut $page.diffnew -sticky w
+
label $page.diffnewbg -padx 40 -relief sunk -background [lindex $diffbgcolors 1]
ttk::button $page.diffnewbgbut -text [mc "Diff: new lines bg"] \
-command [list choosecolor diffbgcolors 1 $page [mc "diff new lines bg"]]
grid x $page.diffnewbgbut $page.diffnewbg -sticky w
+
label $page.hunksep -padx 40 -relief sunk -background [lindex $diffcolors 2]
ttk::button $page.hunksepbut -text [mc "Diff: hunk header"] \
-command [list choosecolor diffcolors 2 $page [mc "diff hunk header"]]
grid x $page.hunksepbut $page.hunksep -sticky w
+
label $page.markbgsep -padx 40 -relief sunk -background $markbgcolor
ttk::button $page.markbgbut -text [mc "Marked line bg"] \
-command [list choosecolor markbgcolor {} $page [mc "marked line background"]]
grid x $page.markbgbut $page.markbgsep -sticky w
+
label $page.selbgsep -padx 40 -relief sunk -background $selectbgcolor
ttk::button $page.selbgbut -text [mc "Select bg"] \
-command [list choosecolor selectbgcolor {} $page [mc "background"]]
grid x $page.selbgbut $page.selbgsep -sticky w
+
+ grid columnconfigure $page 2 -weight 1
+
return $page
}
proc prefspage_set_colorswatches {page} {
- global uicolor bgcolor fgcolor ctext diffcolors selectbgcolor markbgcolor
+ global bgcolor fgcolor ctext diffcolors selectbgcolor markbgcolor
global diffbgcolors
- $page.ui configure -background $uicolor
$page.bg configure -background $bgcolor
$page.fg configure -background $fgcolor
$page.diffold configure -background [lindex $diffcolors 0]
@@ -11847,6 +11849,7 @@ proc prefspage_fonts {notebook} {
mkfontdisp mainfont $page [mc "Main font"]
mkfontdisp textfont $page [mc "Diff display font"]
mkfontdisp uifont $page [mc "User interface font"]
+ grid columnconfigure $page 2 -weight 1
return $page
}
@@ -11881,7 +11884,7 @@ proc doprefs {} {
grid rowconfigure $notebook 1 -weight 1
raise [lindex $pages 0]
- grid $notebook -sticky news -padx 2 -pady 2
+ grid $notebook -sticky news -padx 3 -pady 3
grid rowconfigure $top 0 -weight 1
grid columnconfigure $top 0 -weight 1
@@ -11890,12 +11893,13 @@ proc doprefs {} {
ttk::button $top.buts.can -text [mc "Cancel"] -command prefscan -default normal
bind $top <Key-Return> prefsok
bind $top <Key-Escape> prefscan
- grid $top.buts.ok $top.buts.can
- grid columnconfigure $top.buts 0 -weight 1 -uniform a
- grid columnconfigure $top.buts 1 -weight 1 -uniform a
- grid $top.buts - - -pady 10 -sticky ew
- grid columnconfigure $top 2 -weight 1
+ grid $top.buts.ok $top.buts.can -padx 20
+ grid $top.buts -sticky w -pady 10
bind $top <Visibility> [list focus $top.buts.ok]
+
+ # let geometry manager determine run, set minimum size
+ update idletasks
+ wm minsize $top [winfo reqwidth $top] [winfo reqheight $top]
}
proc choose_extdiff {} {
@@ -11907,6 +11911,51 @@ proc choose_extdiff {} {
}
}
+proc run_themeloader {f} {
+ if {![info exists ::_themefiles_seen]} {
+ set ::_themefiles_seen [dict create]
+ }
+
+ set fn [file normalize $f]
+ if {![dict exists $::_themefiles_seen $fn]} {
+ if {[catch {source $fn} err]} {
+ error_popup "could not interpret: $fn\n$err"
+ dict set ::_themefiles_seen $fn 0
+ } else {
+ dict set ::_themefiles_seen $fn 1
+ }
+ }
+ return [dict get $::_themefiles_seen $fn]
+}
+
+proc updatetheme {prefspage {dotheme 1}} {
+ global theme
+ global themeloader
+ if {$themeloader ne {}} {
+ if {![run_themeloader $themeloader]} {
+ set themeloader {}
+ return
+ } else {
+ $prefspage.theme configure -values \
+ [lsort [ttk::style theme names]]
+ }
+ }
+ if {$dotheme} {
+ ttk::style theme use $theme
+ set_gui_colors
+ prefspage_set_colorswatches $prefspage
+ }
+}
+
+proc choose_themeloader {prefspage} {
+ global themeloader
+ set tfile [tk_getOpenFile -title [mc "Gitk: select theme definition"] -multiple false]
+ if {$tfile ne {}} {
+ set themeloader $tfile
+ updatetheme $prefspage 0
+ }
+}
+
proc choosecolor {v vi prefspage x} {
global $v
@@ -11930,21 +11979,6 @@ proc setselbg {c} {
allcanvs itemconf secsel -fill $c
}
-# This sets the background color and the color scheme for the whole UI.
-# For some reason, tk_setPalette chooses a nasty dark red for selectColor
-# if we don't specify one ourselves, which makes the checkbuttons and
-# radiobuttons look bad. This chooses white for selectColor if the
-# background color is light, or black if it is dark.
-proc setui {c} {
- if {[tk windowingsystem] eq "win32"} { return }
- set bg [winfo rgb . $c]
- set selc black
- if {[lindex $bg 0] + 1.5 * [lindex $bg 1] + 0.5 * [lindex $bg 2] > 100000} {
- set selc white
- }
- tk_setPalette background $c selectColor $selc
-}
-
proc setbg {c} {
global bglist
@@ -11969,10 +12003,9 @@ proc setfg {c} {
}
proc set_gui_colors {} {
- global uicolor bgcolor fgcolor ctext diffcolors selectbgcolor markbgcolor
+ global bgcolor fgcolor ctext diffcolors selectbgcolor markbgcolor
global diffbgcolors
- setui $uicolor
setbg $bgcolor
setfg $fgcolor
$ctext tag conf d0 -foreground [lindex $diffcolors 0]
@@ -11994,6 +12027,7 @@ proc prefscan {} {
catch {destroy $prefstop}
unset prefstop
fontcan
+ setttkstyle
set_gui_colors
}
@@ -12460,11 +12494,13 @@ namespace import ::msgcat::mc
# on OSX bring the current Wish process window to front
if {[tk windowingsystem] eq "aqua"} {
- safe_exec [list osascript -e [format {
- tell application "System Events"
- set frontmost of processes whose unix id is %d to true
- end tell
- } [pid] ]]
+ catch {
+ safe_exec [list osascript -e [format {
+ tell application "System Events"
+ set frontmost of processes whose unix id is %d to true
+ end tell
+ } [pid] ]]
+ }
}
# Unset GIT_TRACE var if set
@@ -12569,17 +12605,11 @@ if {[tk windowingsystem] eq "aqua"} {
set colors {"#00ff00" red blue magenta darkgrey brown orange}
if {[tk windowingsystem] eq "win32"} {
- set uicolor SystemButtonFace
- set uifgcolor SystemButtonText
- set uifgdisabledcolor SystemDisabledText
set bgcolor SystemWindow
set fgcolor SystemWindowText
set selectbgcolor SystemHighlight
set web_browser "cmd /c start"
} else {
- set uicolor grey85
- set uifgcolor black
- set uifgdisabledcolor "#999"
set bgcolor white
set fgcolor black
set selectbgcolor gray85
@@ -12619,6 +12649,12 @@ set circleoutlinecolor $fgcolor
set foundbgcolor yellow
set currentsearchhitbgcolor orange
+set theme [ttk::style theme use]
+set themeloader {}
+set uicolor {}
+set uifgcolor {}
+set uifgdisabledcolor {}
+
# button for popping up context menus
if {[tk windowingsystem] eq "aqua" && [package vcompare $::tcl_version 8.7] < 0} {
set ctxbut <Button-2>
@@ -12702,6 +12738,8 @@ set config_variables {
tagfgcolor
tagoutlinecolor
textfont
+ theme
+ themeloader
uicolor
uifgcolor
uifgdisabledcolor
@@ -12801,7 +12839,13 @@ set nullid "0000000000000000000000000000000000000000"
set nullid2 "0000000000000000000000000000000000000001"
set nullfile "/dev/null"
-setttkstyle
+if {[file exists $themeloader]} {
+ if {![run_themeloader $themeloader]} {
+ puts stderr "Could not interpret themeloader: $themeloader"
+ exit 1
+ }
+}
+
set appname "gitk"
set runq {}
@@ -12917,6 +12961,7 @@ if {[tk windowingsystem] eq "win32"} {
focus -force .
}
+setttkstyle
set_gui_colors
getcommits {}
diff --git a/gpg-interface.c b/gpg-interface.c
index 06e7fb5060..2f4f0e32cb 100644
--- a/gpg-interface.c
+++ b/gpg-interface.c
@@ -1125,3 +1125,20 @@ out:
FREE_AND_NULL(ssh_signing_key_file);
return ret;
}
+
+int parse_sign_mode(const char *arg, enum sign_mode *mode)
+{
+ if (!strcmp(arg, "abort"))
+ *mode = SIGN_ABORT;
+ else if (!strcmp(arg, "verbatim") || !strcmp(arg, "ignore"))
+ *mode = SIGN_VERBATIM;
+ else if (!strcmp(arg, "warn-verbatim") || !strcmp(arg, "warn"))
+ *mode = SIGN_WARN_VERBATIM;
+ else if (!strcmp(arg, "warn-strip"))
+ *mode = SIGN_WARN_STRIP;
+ else if (!strcmp(arg, "strip"))
+ *mode = SIGN_STRIP;
+ else
+ return -1;
+ return 0;
+}
diff --git a/gpg-interface.h b/gpg-interface.h
index 60ddf8bbfa..50487aa148 100644
--- a/gpg-interface.h
+++ b/gpg-interface.h
@@ -104,4 +104,19 @@ int check_signature(struct signature_check *sigc,
void print_signature_buffer(const struct signature_check *sigc,
unsigned flags);
+/* Modes for --signed-tags=<mode> and --signed-commits=<mode> options. */
+enum sign_mode {
+ SIGN_ABORT,
+ SIGN_WARN_VERBATIM,
+ SIGN_VERBATIM,
+ SIGN_WARN_STRIP,
+ SIGN_STRIP,
+};
+
+/*
+ * Return 0 if `arg` can be parsed into an `enum sign_mode`. Return -1
+ * otherwise.
+ */
+int parse_sign_mode(const char *arg, enum sign_mode *mode);
+
#endif
diff --git a/help.c b/help.c
index bb20498cfd..5854dd4a7e 100644
--- a/help.c
+++ b/help.c
@@ -791,6 +791,12 @@ void get_version_info(struct strbuf *buf, int show_build_options)
strbuf_addf(buf, "shell-path: %s\n", SHELL_PATH);
/* NEEDSWORK: also save and output GIT-BUILD_OPTIONS? */
+#if defined WITH_RUST
+ strbuf_addstr(buf, "rust: enabled\n");
+#else
+ strbuf_addstr(buf, "rust: disabled\n");
+#endif
+
if (fsmonitor_ipc__is_supported())
strbuf_addstr(buf, "feature: fsmonitor--daemon\n");
#if defined LIBCURL_VERSION
diff --git a/http-backend.c b/http-backend.c
index d5dfe762bb..9084058f1e 100644
--- a/http-backend.c
+++ b/http-backend.c
@@ -603,18 +603,19 @@ static void get_head(struct strbuf *hdr, char *arg UNUSED)
static void get_info_packs(struct strbuf *hdr, char *arg UNUSED)
{
size_t objdirlen = strlen(repo_get_object_directory(the_repository));
+ struct packfile_store *packs = the_repository->objects->packfiles;
struct strbuf buf = STRBUF_INIT;
struct packed_git *p;
size_t cnt = 0;
select_getanyfile(hdr);
- for (p = get_all_packs(the_repository); p; p = p->next) {
+ for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
if (p->pack_local)
cnt++;
}
strbuf_grow(&buf, cnt * 53 + 2);
- for (p = get_all_packs(the_repository); p; p = p->next) {
+ for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
if (p->pack_local)
strbuf_addf(&buf, "P %s\n", p->pack_name + objdirlen + 6);
}
diff --git a/http-push.c b/http-push.c
index 4c43ba3bc7..a1c01e3b9b 100644
--- a/http-push.c
+++ b/http-push.c
@@ -208,7 +208,8 @@ static void curl_setup_http(CURL *curl, const char *url,
curl_easy_setopt(curl, CURLOPT_UPLOAD, 1L);
curl_easy_setopt(curl, CURLOPT_URL, url);
curl_easy_setopt(curl, CURLOPT_INFILE, buffer);
- curl_easy_setopt(curl, CURLOPT_INFILESIZE, buffer->buf.len);
+ curl_easy_setopt(curl, CURLOPT_INFILESIZE_LARGE,
+ cast_size_t_to_curl_off_t(buffer->buf.len));
curl_easy_setopt(curl, CURLOPT_READFUNCTION, fread_buffer);
curl_easy_setopt(curl, CURLOPT_SEEKFUNCTION, seek_buffer);
curl_easy_setopt(curl, CURLOPT_SEEKDATA, buffer);
diff --git a/http.c b/http.c
index a7d55dcbba..7e3af1e72f 100644
--- a/http.c
+++ b/http.c
@@ -2416,6 +2416,7 @@ static char *fetch_pack_index(unsigned char *hash, const char *base_url)
static int fetch_and_setup_pack_index(struct packed_git **packs_head,
unsigned char *sha1, const char *base_url)
{
+ struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *new_pack, *p;
char *tmp_idx = NULL;
int ret;
@@ -2424,7 +2425,7 @@ static int fetch_and_setup_pack_index(struct packed_git **packs_head,
* If we already have the pack locally, no need to fetch its index or
* even add it to list; we already have all of its objects.
*/
- for (p = get_all_packs(the_repository); p; p = p->next) {
+ for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
if (hasheq(p->hash, sha1, the_repository->hash_algo))
return 0;
}
@@ -2549,7 +2550,7 @@ void http_install_packfile(struct packed_git *p,
lst = &((*lst)->next);
*lst = (*lst)->next;
- install_packed_git(the_repository, p);
+ packfile_store_add_pack(the_repository->objects->packfiles, p);
}
struct http_pack_request *new_http_pack_request(
diff --git a/http.h b/http.h
index 36202139f4..553e16205c 100644
--- a/http.h
+++ b/http.h
@@ -8,6 +8,7 @@ struct packed_git;
#include <curl/curl.h>
#include <curl/easy.h>
+#include "gettext.h"
#include "strbuf.h"
#include "remote.h"
@@ -95,6 +96,15 @@ static inline int missing__target(int code, int result)
#define missing_target(a) missing__target((a)->http_code, (a)->curl_result)
+static inline curl_off_t cast_size_t_to_curl_off_t(size_t a)
+{
+ uintmax_t size = a;
+ if (size > maximum_signed_value_of_type(curl_off_t))
+ die(_("number too large to represent as curl_off_t "
+ "on this platform: %"PRIuMAX), (uintmax_t)a);
+ return (curl_off_t)a;
+}
+
/*
* Normalize curl results to handle CURL_FAILONERROR (or lack thereof). Failing
* http codes have their "result" converted to CURLE_HTTP_RETURNED_ERROR, and
@@ -210,7 +220,7 @@ int finish_http_pack_request(struct http_pack_request *preq);
void release_http_pack_request(struct http_pack_request *preq);
/*
- * Remove p from the given list, and invoke install_packed_git() on it.
+ * Remove p from the given list, and invoke packfile_store_add_pack() on it.
*
* This is a convenience function for users that have obtained a list of packs
* from http_get_info_packs() and have chosen a specific pack to fetch.
diff --git a/imap-send.c b/imap-send.c
index 4bd5b8aa0d..26dda7f328 100644
--- a/imap-send.c
+++ b/imap-send.c
@@ -1721,7 +1721,7 @@ static int curl_append_msgs_to_imap(struct imap_server_conf *server,
lf_to_crlf(&msgbuf.buf);
curl_easy_setopt(curl, CURLOPT_INFILESIZE_LARGE,
- (curl_off_t)(msgbuf.buf.len-prev_len));
+ cast_size_t_to_curl_off_t(msgbuf.buf.len-prev_len));
res = curl_easy_perform(curl);
diff --git a/meson.build b/meson.build
index b3dfcc0497..ec55d6a5fd 100644
--- a/meson.build
+++ b/meson.build
@@ -220,7 +220,7 @@ project('git', 'c',
# learned to define __STDC_VERSION__ with C11 and later. We thus require
# GNU C99 and fall back to C11. Meson only learned to handle the fallback
# with version 1.3.0, so on older versions we use GNU C99 unconditionally.
- default_options: meson.version().version_compare('>=1.3.0') ? ['c_std=gnu99,c11'] : ['c_std=gnu99'],
+ default_options: meson.version().version_compare('>=1.3.0') ? ['rust_std=2018', 'c_std=gnu99,c11'] : ['rust_std=2018', 'c_std=gnu99'],
)
fs = import('fs')
@@ -287,7 +287,6 @@ libgit_sources = [
'blob.c',
'bloom.c',
'branch.c',
- 'bulk-checkin.c',
'bundle-uri.c',
'bundle.c',
'cache-tree.c',
@@ -407,6 +406,7 @@ libgit_sources = [
'pack-check.c',
'pack-mtimes.c',
'pack-objects.c',
+ 'pack-refs.c',
'pack-revindex.c',
'pack-write.c',
'packfile.c',
@@ -522,7 +522,6 @@ libgit_sources = [
'usage.c',
'userdiff.c',
'utf8.c',
- 'varint.c',
'version.c',
'versioncmp.c',
'walker.c',
@@ -1703,6 +1702,17 @@ version_def_h = custom_target(
)
libgit_sources += version_def_h
+cargo = find_program('cargo', dirs: program_path, native: true, required: get_option('rust'))
+rust_option = get_option('rust').disable_auto_if(not cargo.found())
+if rust_option.allowed()
+ subdir('src')
+ libgit_c_args += '-DWITH_RUST'
+else
+ libgit_sources += [
+ 'varint.c',
+ ]
+endif
+
libgit = declare_dependency(
link_with: static_library('git',
sources: libgit_sources,
@@ -2101,11 +2111,20 @@ endif
subdir('bin-wrappers')
if get_option('docs') != []
+ doc_targets = []
subdir('Documentation')
+else
+ docs_backend = 'none'
endif
subdir('contrib')
+# Note that the target is intentionally configured after including the
+# 'contrib' directory, as some tool there also have their own manpages.
+if get_option('docs') != []
+ alias_target('docs', doc_targets)
+endif
+
exclude_from_check_headers = [
'compat/',
'unicode-width.h',
@@ -2240,10 +2259,12 @@ summary({
'pcre2': pcre2,
'perl': perl_features_enabled,
'python': target_python.found(),
+ 'rust': rust_option.allowed(),
}, section: 'Auto-detected features', bool_yn: true)
summary({
'csprng': csprng_backend,
+ 'docs': docs_backend,
'https': https_backend,
'sha1': sha1_backend,
'sha1_unsafe': sha1_unsafe_backend,
diff --git a/meson_options.txt b/meson_options.txt
index 1668f260a1..143dee9237 100644
--- a/meson_options.txt
+++ b/meson_options.txt
@@ -71,6 +71,8 @@ option('zlib_backend', type: 'combo', choices: ['auto', 'zlib', 'zlib-ng'], valu
# Build tweaks.
option('breaking_changes', type: 'boolean', value: false,
description: 'Enable upcoming breaking changes.')
+option('rust', type: 'feature', value: 'auto',
+ description: 'Enable building with Rust.')
option('macos_use_homebrew_gettext', type: 'boolean', value: true,
description: 'Use gettext from Homebrew instead of the slightly-broken system-provided one.')
diff --git a/midx.c b/midx.c
index 7726c13d7e..1d6269f957 100644
--- a/midx.c
+++ b/midx.c
@@ -93,6 +93,12 @@ static int midx_read_object_offsets(const unsigned char *chunk_start,
return 0;
}
+struct multi_pack_index *get_multi_pack_index(struct odb_source *source)
+{
+ packfile_store_prepare(source->odb->packfiles);
+ return source->midx;
+}
+
static struct multi_pack_index *load_multi_pack_index_one(struct odb_source *source,
const char *midx_name)
{
@@ -443,7 +449,6 @@ int prepare_midx_pack(struct multi_pack_index *m,
{
struct repository *r = m->source->odb->repo;
struct strbuf pack_name = STRBUF_INIT;
- struct strbuf key = STRBUF_INIT;
struct packed_git *p;
pack_int_id = midx_for_pack(&m, pack_int_id);
@@ -455,25 +460,11 @@ int prepare_midx_pack(struct multi_pack_index *m,
strbuf_addf(&pack_name, "%s/pack/%s", m->source->path,
m->pack_names[pack_int_id]);
-
- /* pack_map holds the ".pack" name, but we have the .idx */
- strbuf_addbuf(&key, &pack_name);
- strbuf_strip_suffix(&key, ".idx");
- strbuf_addstr(&key, ".pack");
- p = hashmap_get_entry_from_hash(&r->objects->pack_map,
- strhash(key.buf), key.buf,
- struct packed_git, packmap_ent);
- if (!p) {
- p = add_packed_git(r, pack_name.buf, pack_name.len,
- m->source->local);
- if (p) {
- install_packed_git(r, p);
- list_add_tail(&p->mru, &r->objects->packed_git_mru);
- }
- }
-
+ p = packfile_store_load_pack(r->objects->packfiles,
+ pack_name.buf, m->source->local);
+ if (p)
+ list_add_tail(&p->mru, &r->objects->packfiles->mru);
strbuf_release(&pack_name);
- strbuf_release(&key);
if (!p) {
m->packs[pack_int_id] = MIDX_PACK_ERROR;
diff --git a/midx.h b/midx.h
index e241d2d690..6e54d73503 100644
--- a/midx.h
+++ b/midx.h
@@ -94,6 +94,7 @@ void get_midx_chain_filename(struct odb_source *source, struct strbuf *out);
void get_split_midx_filename_ext(struct odb_source *source, struct strbuf *buf,
const unsigned char *hash, const char *ext);
+struct multi_pack_index *get_multi_pack_index(struct odb_source *source);
struct multi_pack_index *load_multi_pack_index(struct odb_source *source);
int prepare_midx_pack(struct multi_pack_index *m, uint32_t pack_int_id);
struct packed_git *nth_midxed_pack(struct multi_pack_index *m,
diff --git a/object-file.c b/object-file.c
index bc15af4245..4675c8ed6b 100644
--- a/object-file.c
+++ b/object-file.c
@@ -10,7 +10,6 @@
#define USE_THE_REPOSITORY_VARIABLE
#include "git-compat-util.h"
-#include "bulk-checkin.h"
#include "convert.h"
#include "dir.h"
#include "environment.h"
@@ -28,6 +27,8 @@
#include "read-cache-ll.h"
#include "setup.h"
#include "streaming.h"
+#include "tempfile.h"
+#include "tmp-objdir.h"
/* The maximum size for an object header. */
#define MAX_HEADER_LEN 32
@@ -666,6 +667,93 @@ void hash_object_file(const struct git_hash_algo *algo, const void *buf,
write_object_file_prepare(algo, buf, len, type, oid, hdr, &hdrlen);
}
+struct transaction_packfile {
+ char *pack_tmp_name;
+ struct hashfile *f;
+ off_t offset;
+ struct pack_idx_option pack_idx_opts;
+
+ struct pack_idx_entry **written;
+ uint32_t alloc_written;
+ uint32_t nr_written;
+};
+
+struct odb_transaction {
+ struct object_database *odb;
+
+ struct tmp_objdir *objdir;
+ struct transaction_packfile packfile;
+};
+
+static void prepare_loose_object_transaction(struct odb_transaction *transaction)
+{
+ /*
+ * We lazily create the temporary object directory
+ * the first time an object might be added, since
+ * callers may not know whether any objects will be
+ * added at the time they call object_file_transaction_begin.
+ */
+ if (!transaction || transaction->objdir)
+ return;
+
+ transaction->objdir = tmp_objdir_create(transaction->odb->repo, "bulk-fsync");
+ if (transaction->objdir)
+ tmp_objdir_replace_primary_odb(transaction->objdir, 0);
+}
+
+static void fsync_loose_object_transaction(struct odb_transaction *transaction,
+ int fd, const char *filename)
+{
+ /*
+ * If we have an active ODB transaction, we issue a call that
+ * cleans the filesystem page cache but avoids a hardware flush
+ * command. Later on we will issue a single hardware flush
+ * before renaming the objects to their final names as part of
+ * flush_batch_fsync.
+ */
+ if (!transaction || !transaction->objdir ||
+ git_fsync(fd, FSYNC_WRITEOUT_ONLY) < 0) {
+ if (errno == ENOSYS)
+ warning(_("core.fsyncMethod = batch is unsupported on this platform"));
+ fsync_or_die(fd, filename);
+ }
+}
+
+/*
+ * Cleanup after batch-mode fsync_object_files.
+ */
+static void flush_loose_object_transaction(struct odb_transaction *transaction)
+{
+ struct strbuf temp_path = STRBUF_INIT;
+ struct tempfile *temp;
+
+ if (!transaction->objdir)
+ return;
+
+ /*
+ * Issue a full hardware flush against a temporary file to ensure
+ * that all objects are durable before any renames occur. The code in
+ * fsync_loose_object_transaction has already issued a writeout
+ * request, but it has not flushed any writeback cache in the storage
+ * hardware or any filesystem logs. This fsync call acts as a barrier
+ * to ensure that the data in each new object file is durable before
+ * the final name is visible.
+ */
+ strbuf_addf(&temp_path, "%s/bulk_fsync_XXXXXX",
+ repo_get_object_directory(transaction->odb->repo));
+ temp = xmks_tempfile(temp_path.buf);
+ fsync_or_die(get_tempfile_fd(temp), get_tempfile_path(temp));
+ delete_tempfile(&temp);
+ strbuf_release(&temp_path);
+
+ /*
+ * Make the object files visible in the primary ODB after their data is
+ * fully durable.
+ */
+ tmp_objdir_migrate(transaction->objdir);
+ transaction->objdir = NULL;
+}
+
/* Finalize a file on disk, and close it. */
static void close_loose_object(struct odb_source *source,
int fd, const char *filename)
@@ -674,7 +762,7 @@ static void close_loose_object(struct odb_source *source,
goto out;
if (batch_fsync_enabled(FSYNC_COMPONENT_LOOSE_OBJECT))
- fsync_loose_object_bulk_checkin(source->odb->transaction, fd, filename);
+ fsync_loose_object_transaction(source->odb->transaction, fd, filename);
else if (fsync_object_files > 0)
fsync_or_die(fd, filename);
else
@@ -852,7 +940,7 @@ static int write_loose_object(struct odb_source *source,
static struct strbuf filename = STRBUF_INIT;
if (batch_fsync_enabled(FSYNC_COMPONENT_LOOSE_OBJECT))
- prepare_loose_object_bulk_checkin(source->odb->transaction);
+ prepare_loose_object_transaction(source->odb->transaction);
odb_loose_path(source, &filename, oid);
@@ -941,7 +1029,7 @@ int stream_loose_object(struct odb_source *source,
int hdrlen;
if (batch_fsync_enabled(FSYNC_COMPONENT_LOOSE_OBJECT))
- prepare_loose_object_bulk_checkin(source->odb->transaction);
+ prepare_loose_object_transaction(source->odb->transaction);
/* Since oid is not determined, save tmp file to odb path. */
strbuf_addf(&filename, "%s/", source->path);
@@ -1243,6 +1331,274 @@ static int index_core(struct index_state *istate,
return ret;
}
+static int already_written(struct odb_transaction *transaction,
+ struct object_id *oid)
+{
+ /* The object may already exist in the repository */
+ if (odb_has_object(transaction->odb, oid,
+ HAS_OBJECT_RECHECK_PACKED | HAS_OBJECT_FETCH_PROMISOR))
+ return 1;
+
+ /* Might want to keep the list sorted */
+ for (uint32_t i = 0; i < transaction->packfile.nr_written; i++)
+ if (oideq(&transaction->packfile.written[i]->oid, oid))
+ return 1;
+
+ /* This is a new object we need to keep */
+ return 0;
+}
+
+/* Lazily create backing packfile for the state */
+static void prepare_packfile_transaction(struct odb_transaction *transaction,
+ unsigned flags)
+{
+ struct transaction_packfile *state = &transaction->packfile;
+ if (!(flags & INDEX_WRITE_OBJECT) || state->f)
+ return;
+
+ state->f = create_tmp_packfile(transaction->odb->repo,
+ &state->pack_tmp_name);
+ reset_pack_idx_option(&state->pack_idx_opts);
+
+ /* Pretend we are going to write only one object */
+ state->offset = write_pack_header(state->f, 1);
+ if (!state->offset)
+ die_errno("unable to write pack header");
+}
+
+/*
+ * Read the contents from fd for size bytes, streaming it to the
+ * packfile in state while updating the hash in ctx. Signal a failure
+ * by returning a negative value when the resulting pack would exceed
+ * the pack size limit and this is not the first object in the pack,
+ * so that the caller can discard what we wrote from the current pack
+ * by truncating it and opening a new one. The caller will then call
+ * us again after rewinding the input fd.
+ *
+ * The already_hashed_to pointer is kept untouched by the caller to
+ * make sure we do not hash the same byte when we are called
+ * again. This way, the caller does not have to checkpoint its hash
+ * status before calling us just in case we ask it to call us again
+ * with a new pack.
+ */
+static int stream_blob_to_pack(struct transaction_packfile *state,
+ struct git_hash_ctx *ctx, off_t *already_hashed_to,
+ int fd, size_t size, const char *path,
+ unsigned flags)
+{
+ git_zstream s;
+ unsigned char ibuf[16384];
+ unsigned char obuf[16384];
+ unsigned hdrlen;
+ int status = Z_OK;
+ int write_object = (flags & INDEX_WRITE_OBJECT);
+ off_t offset = 0;
+
+ git_deflate_init(&s, pack_compression_level);
+
+ hdrlen = encode_in_pack_object_header(obuf, sizeof(obuf), OBJ_BLOB, size);
+ s.next_out = obuf + hdrlen;
+ s.avail_out = sizeof(obuf) - hdrlen;
+
+ while (status != Z_STREAM_END) {
+ if (size && !s.avail_in) {
+ size_t rsize = size < sizeof(ibuf) ? size : sizeof(ibuf);
+ ssize_t read_result = read_in_full(fd, ibuf, rsize);
+ if (read_result < 0)
+ die_errno("failed to read from '%s'", path);
+ if ((size_t)read_result != rsize)
+ die("failed to read %u bytes from '%s'",
+ (unsigned)rsize, path);
+ offset += rsize;
+ if (*already_hashed_to < offset) {
+ size_t hsize = offset - *already_hashed_to;
+ if (rsize < hsize)
+ hsize = rsize;
+ if (hsize)
+ git_hash_update(ctx, ibuf, hsize);
+ *already_hashed_to = offset;
+ }
+ s.next_in = ibuf;
+ s.avail_in = rsize;
+ size -= rsize;
+ }
+
+ status = git_deflate(&s, size ? 0 : Z_FINISH);
+
+ if (!s.avail_out || status == Z_STREAM_END) {
+ if (write_object) {
+ size_t written = s.next_out - obuf;
+
+ /* would we bust the size limit? */
+ if (state->nr_written &&
+ pack_size_limit_cfg &&
+ pack_size_limit_cfg < state->offset + written) {
+ git_deflate_abort(&s);
+ return -1;
+ }
+
+ hashwrite(state->f, obuf, written);
+ state->offset += written;
+ }
+ s.next_out = obuf;
+ s.avail_out = sizeof(obuf);
+ }
+
+ switch (status) {
+ case Z_OK:
+ case Z_BUF_ERROR:
+ case Z_STREAM_END:
+ continue;
+ default:
+ die("unexpected deflate failure: %d", status);
+ }
+ }
+ git_deflate_end(&s);
+ return 0;
+}
+
+static void flush_packfile_transaction(struct odb_transaction *transaction)
+{
+ struct transaction_packfile *state = &transaction->packfile;
+ struct repository *repo = transaction->odb->repo;
+ unsigned char hash[GIT_MAX_RAWSZ];
+ struct strbuf packname = STRBUF_INIT;
+ char *idx_tmp_name = NULL;
+
+ if (!state->f)
+ return;
+
+ if (state->nr_written == 0) {
+ close(state->f->fd);
+ free_hashfile(state->f);
+ unlink(state->pack_tmp_name);
+ goto clear_exit;
+ } else if (state->nr_written == 1) {
+ finalize_hashfile(state->f, hash, FSYNC_COMPONENT_PACK,
+ CSUM_HASH_IN_STREAM | CSUM_FSYNC | CSUM_CLOSE);
+ } else {
+ int fd = finalize_hashfile(state->f, hash, FSYNC_COMPONENT_PACK, 0);
+ fixup_pack_header_footer(repo->hash_algo, fd, hash, state->pack_tmp_name,
+ state->nr_written, hash,
+ state->offset);
+ close(fd);
+ }
+
+ strbuf_addf(&packname, "%s/pack/pack-%s.",
+ repo_get_object_directory(transaction->odb->repo),
+ hash_to_hex_algop(hash, repo->hash_algo));
+
+ stage_tmp_packfiles(repo, &packname, state->pack_tmp_name,
+ state->written, state->nr_written, NULL,
+ &state->pack_idx_opts, hash, &idx_tmp_name);
+ rename_tmp_packfile_idx(repo, &packname, &idx_tmp_name);
+
+ for (uint32_t i = 0; i < state->nr_written; i++)
+ free(state->written[i]);
+
+clear_exit:
+ free(idx_tmp_name);
+ free(state->pack_tmp_name);
+ free(state->written);
+ memset(state, 0, sizeof(*state));
+
+ strbuf_release(&packname);
+ /* Make objects we just wrote available to ourselves */
+ odb_reprepare(repo->objects);
+}
+
+/*
+ * This writes the specified object to a packfile. Objects written here
+ * during the same transaction are written to the same packfile. The
+ * packfile is not flushed until the transaction is flushed. The caller
+ * is expected to ensure a valid transaction is setup for objects to be
+ * recorded to.
+ *
+ * This also bypasses the usual "convert-to-git" dance, and that is on
+ * purpose. We could write a streaming version of the converting
+ * functions and insert that before feeding the data to fast-import
+ * (or equivalent in-core API described above). However, that is
+ * somewhat complicated, as we do not know the size of the filter
+ * result, which we need to know beforehand when writing a git object.
+ * Since the primary motivation for trying to stream from the working
+ * tree file and to avoid mmaping it in core is to deal with large
+ * binary blobs, they generally do not want to get any conversion, and
+ * callers should avoid this code path when filters are requested.
+ */
+static int index_blob_packfile_transaction(struct odb_transaction *transaction,
+ struct object_id *result_oid, int fd,
+ size_t size, const char *path,
+ unsigned flags)
+{
+ struct transaction_packfile *state = &transaction->packfile;
+ off_t seekback, already_hashed_to;
+ struct git_hash_ctx ctx;
+ unsigned char obuf[16384];
+ unsigned header_len;
+ struct hashfile_checkpoint checkpoint;
+ struct pack_idx_entry *idx = NULL;
+
+ seekback = lseek(fd, 0, SEEK_CUR);
+ if (seekback == (off_t)-1)
+ return error("cannot find the current offset");
+
+ header_len = format_object_header((char *)obuf, sizeof(obuf),
+ OBJ_BLOB, size);
+ transaction->odb->repo->hash_algo->init_fn(&ctx);
+ git_hash_update(&ctx, obuf, header_len);
+
+ /* Note: idx is non-NULL when we are writing */
+ if ((flags & INDEX_WRITE_OBJECT) != 0) {
+ CALLOC_ARRAY(idx, 1);
+
+ prepare_packfile_transaction(transaction, flags);
+ hashfile_checkpoint_init(state->f, &checkpoint);
+ }
+
+ already_hashed_to = 0;
+
+ while (1) {
+ prepare_packfile_transaction(transaction, flags);
+ if (idx) {
+ hashfile_checkpoint(state->f, &checkpoint);
+ idx->offset = state->offset;
+ crc32_begin(state->f);
+ }
+ if (!stream_blob_to_pack(state, &ctx, &already_hashed_to,
+ fd, size, path, flags))
+ break;
+ /*
+ * Writing this object to the current pack will make
+ * it too big; we need to truncate it, start a new
+ * pack, and write into it.
+ */
+ if (!idx)
+ BUG("should not happen");
+ hashfile_truncate(state->f, &checkpoint);
+ state->offset = checkpoint.offset;
+ flush_packfile_transaction(transaction);
+ if (lseek(fd, seekback, SEEK_SET) == (off_t)-1)
+ return error("cannot seek back");
+ }
+ git_hash_final_oid(result_oid, &ctx);
+ if (!idx)
+ return 0;
+
+ idx->crc32 = crc32_end(state->f);
+ if (already_written(transaction, result_oid)) {
+ hashfile_truncate(state->f, &checkpoint);
+ state->offset = checkpoint.offset;
+ free(idx);
+ } else {
+ oidcpy(&idx->oid, result_oid);
+ ALLOC_GROW(state->written,
+ state->nr_written + 1,
+ state->alloc_written);
+ state->written[state->nr_written++] = idx;
+ }
+ return 0;
+}
+
int index_fd(struct index_state *istate, struct object_id *oid,
int fd, struct stat *st,
enum object_type type, const char *path, unsigned flags)
@@ -1266,11 +1622,12 @@ int index_fd(struct index_state *istate, struct object_id *oid,
} else {
struct odb_transaction *transaction;
- transaction = begin_odb_transaction(the_repository->objects);
- ret = index_blob_bulk_checkin(transaction,
- oid, fd, xsize_t(st->st_size),
- path, flags);
- end_odb_transaction(transaction);
+ transaction = odb_transaction_begin(the_repository->objects);
+ ret = index_blob_packfile_transaction(the_repository->objects->transaction,
+ oid, fd,
+ xsize_t(st->st_size),
+ path, flags);
+ odb_transaction_commit(transaction);
}
close(fd);
@@ -1609,3 +1966,32 @@ out:
munmap(map, mapsize);
return ret;
}
+
+struct odb_transaction *object_file_transaction_begin(struct odb_source *source)
+{
+ struct object_database *odb = source->odb;
+
+ if (odb->transaction)
+ return NULL;
+
+ CALLOC_ARRAY(odb->transaction, 1);
+ odb->transaction->odb = odb;
+
+ return odb->transaction;
+}
+
+void object_file_transaction_commit(struct odb_transaction *transaction)
+{
+ if (!transaction)
+ return;
+
+ /*
+ * Ensure the transaction ending matches the pending transaction.
+ */
+ ASSERT(transaction == transaction->odb->transaction);
+
+ flush_loose_object_transaction(transaction);
+ flush_packfile_transaction(transaction);
+ transaction->odb->transaction = NULL;
+ free(transaction);
+}
diff --git a/object-file.h b/object-file.h
index 15d97630d3..3fd48dcafb 100644
--- a/object-file.h
+++ b/object-file.h
@@ -218,4 +218,20 @@ int read_loose_object(struct repository *repo,
void **contents,
struct object_info *oi);
+struct odb_transaction;
+
+/*
+ * Tell the object database to optimize for adding
+ * multiple objects. object_file_transaction_commit must be called
+ * to make new objects visible. If a transaction is already
+ * pending, NULL is returned.
+ */
+struct odb_transaction *object_file_transaction_begin(struct odb_source *source);
+
+/*
+ * Tell the object database to make any objects from the
+ * current transaction visible.
+ */
+void object_file_transaction_commit(struct odb_transaction *transaction);
+
#endif /* OBJECT_FILE_H */
diff --git a/object-name.c b/object-name.c
index 7774991d28..f6902e140d 100644
--- a/object-name.c
+++ b/object-name.c
@@ -213,7 +213,7 @@ static void find_short_packed_object(struct disambiguate_state *ds)
unique_in_midx(m, ds);
}
- for (p = get_packed_git(ds->repo); p && !ds->ambiguous;
+ for (p = packfile_store_get_packs(ds->repo->objects->packfiles); p && !ds->ambiguous;
p = p->next)
unique_in_pack(p, ds);
}
@@ -596,7 +596,7 @@ static enum get_oid_result get_short_oid(struct repository *r,
* or migrated from loose to packed.
*/
if (status == MISSING_OBJECT) {
- reprepare_packed_git(r);
+ odb_reprepare(r->objects);
find_short_object_filename(&ds);
find_short_packed_object(&ds);
status = finish_object_disambiguation(&ds, oid);
@@ -805,7 +805,7 @@ static void find_abbrev_len_packed(struct min_abbrev_data *mad)
find_abbrev_len_for_midx(m, mad);
}
- for (p = get_packed_git(mad->repo); p; p = p->next)
+ for (p = packfile_store_get_packs(mad->repo->objects->packfiles); p; p = p->next)
find_abbrev_len_for_pack(p, mad);
}
diff --git a/odb.c b/odb.c
index 75c443fe66..00a6e71568 100644
--- a/odb.c
+++ b/odb.c
@@ -694,7 +694,7 @@ static int do_oid_object_info_extended(struct object_database *odb,
/* Not a loose object; someone else may have just packed it. */
if (!(flags & OBJECT_INFO_QUICK)) {
- reprepare_packed_git(odb->repo);
+ odb_reprepare(odb->repo->objects);
if (find_pack_entry(odb->repo, real, &e))
break;
}
@@ -996,8 +996,7 @@ struct object_database *odb_new(struct repository *repo)
memset(o, 0, sizeof(*o));
o->repo = repo;
- INIT_LIST_HEAD(&o->packed_git_mru);
- hashmap_init(&o->pack_map, pack_map_entry_cmp, NULL, 0);
+ o->packfiles = packfile_store_new(o);
pthread_mutex_init(&o->replace_mutex, NULL);
string_list_init_dup(&o->submodule_source_paths);
return o;
@@ -1035,19 +1034,44 @@ void odb_clear(struct object_database *o)
free((char *) o->cached_objects[i].value.buf);
FREE_AND_NULL(o->cached_objects);
- INIT_LIST_HEAD(&o->packed_git_mru);
close_object_store(o);
+ packfile_store_free(o->packfiles);
+ o->packfiles = NULL;
+
+ string_list_clear(&o->submodule_source_paths, 0);
+}
+
+void odb_reprepare(struct object_database *o)
+{
+ struct odb_source *source;
+
+ obj_read_lock();
/*
- * `close_object_store()` only closes the packfiles, but doesn't free
- * them. We thus have to do this manually.
+ * Reprepare alt odbs, in case the alternates file was modified
+ * during the course of this process. This only _adds_ odbs to
+ * the linked list, so existing odbs will continue to exist for
+ * the lifetime of the process.
*/
- for (struct packed_git *p = o->packed_git, *next; p; p = next) {
- next = p->next;
- free(p);
- }
- o->packed_git = NULL;
+ o->loaded_alternates = 0;
+ odb_prepare_alternates(o);
- hashmap_clear(&o->pack_map);
- string_list_clear(&o->submodule_source_paths, 0);
+ for (source = o->sources; source; source = source->next)
+ odb_clear_loose_cache(source);
+
+ o->approximate_object_count_valid = 0;
+
+ packfile_store_reprepare(o->packfiles);
+
+ obj_read_unlock();
+}
+
+struct odb_transaction *odb_transaction_begin(struct object_database *odb)
+{
+ return object_file_transaction_begin(odb->sources);
+}
+
+void odb_transaction_commit(struct odb_transaction *transaction)
+{
+ object_file_transaction_commit(transaction);
}
diff --git a/odb.h b/odb.h
index bd7374f92f..e6602dd90c 100644
--- a/odb.h
+++ b/odb.h
@@ -3,7 +3,6 @@
#include "hashmap.h"
#include "object.h"
-#include "list.h"
#include "oidset.h"
#include "oidmap.h"
#include "string-list.h"
@@ -91,6 +90,7 @@ struct odb_source {
};
struct packed_git;
+struct packfile_store;
struct cached_object_entry;
struct odb_transaction;
@@ -139,20 +139,8 @@ struct object_database {
struct commit_graph *commit_graph;
unsigned commit_graph_attempted : 1; /* if loading has been attempted */
- /*
- * private data
- *
- * should only be accessed directly by packfile.c
- */
-
- struct packed_git *packed_git;
- /* A most-recently-used ordered version of the packed_git list. */
- struct list_head packed_git_mru;
-
- struct {
- struct packed_git **packs;
- unsigned flags;
- } kept_pack_cache;
+ /* Should only be accessed directly by packfile.c and midx.c. */
+ struct packfile_store *packfiles;
/*
* This is meant to hold a *small* number of objects that you would
@@ -164,12 +152,6 @@ struct object_database {
size_t cached_object_nr, cached_object_alloc;
/*
- * A map of packfiles to packed_git structs for tracking which
- * packs have been loaded already.
- */
- struct hashmap pack_map;
-
- /*
* A fast, rough count of the number of objects in the repository.
* These two fields are not meant for direct access. Use
* repo_approximate_object_count() instead.
@@ -178,12 +160,6 @@ struct object_database {
unsigned approximate_object_count_valid : 1;
/*
- * Whether packed_git has already been populated with this repository's
- * packs.
- */
- unsigned packed_git_initialized : 1;
-
- /*
* Submodule source paths that will be added as additional sources to
* allow lookup of submodule objects via the main object database.
*/
@@ -194,6 +170,25 @@ struct object_database *odb_new(struct repository *repo);
void odb_clear(struct object_database *o);
/*
+ * Clear caches, reload alternates and then reload object sources so that new
+ * objects may become accessible.
+ */
+void odb_reprepare(struct object_database *o);
+
+/*
+ * Starts an ODB transaction. Subsequent objects are written to the transaction
+ * and not committed until odb_transaction_commit() is invoked on the
+ * transaction. If the ODB already has a pending transaction, NULL is returned.
+ */
+struct odb_transaction *odb_transaction_begin(struct object_database *odb);
+
+/*
+ * Commits an ODB transaction making the written objects visible. If the
+ * specified transaction is NULL, the function is a no-op.
+ */
+void odb_transaction_commit(struct odb_transaction *transaction);
+
+/*
* Find source by its object directory path. Returns a `NULL` pointer in case
* the source could not be found.
*/
@@ -494,37 +489,4 @@ static inline int odb_write_object(struct object_database *odb,
return odb_write_object_ext(odb, buf, len, type, oid, NULL, 0);
}
-/* Compatibility wrappers, to be removed once Git 2.51 has been released. */
-#include "repository.h"
-
-static inline int oid_object_info_extended(struct repository *r,
- const struct object_id *oid,
- struct object_info *oi,
- unsigned flags)
-{
- return odb_read_object_info_extended(r->objects, oid, oi, flags);
-}
-
-static inline int oid_object_info(struct repository *r,
- const struct object_id *oid,
- unsigned long *sizep)
-{
- return odb_read_object_info(r->objects, oid, sizep);
-}
-
-static inline void *repo_read_object_file(struct repository *r,
- const struct object_id *oid,
- enum object_type *type,
- unsigned long *size)
-{
- return odb_read_object(r->objects, oid, type, size);
-}
-
-static inline int has_object(struct repository *r,
- const struct object_id *oid,
- unsigned flags)
-{
- return odb_has_object(r->objects, oid, flags);
-}
-
#endif /* ODB_H */
diff --git a/pack-bitmap.c b/pack-bitmap.c
index 058bdb5d7d..ac71035d77 100644
--- a/pack-bitmap.c
+++ b/pack-bitmap.c
@@ -664,7 +664,7 @@ static int open_pack_bitmap(struct repository *r,
struct packed_git *p;
int ret = -1;
- for (p = get_all_packs(r); p; p = p->next) {
+ for (p = packfile_store_get_all_packs(r->objects->packfiles); p; p = p->next) {
if (open_pack_bitmap_1(bitmap_git, p) == 0) {
ret = 0;
/*
@@ -3362,7 +3362,7 @@ int verify_bitmap_files(struct repository *r)
free(midx_bitmap_name);
}
- for (struct packed_git *p = get_all_packs(r);
+ for (struct packed_git *p = packfile_store_get_all_packs(r->objects->packfiles);
p; p = p->next) {
char *pack_bitmap_name = pack_bitmap_filename(p);
res |= verify_bitmap_file(r->hash_algo, pack_bitmap_name);
diff --git a/pack-objects.c b/pack-objects.c
index a9d9855063..9d6ee72569 100644
--- a/pack-objects.c
+++ b/pack-objects.c
@@ -4,6 +4,7 @@
#include "pack-objects.h"
#include "packfile.h"
#include "parse.h"
+#include "repository.h"
static uint32_t locate_object_entry_hash(struct packing_data *pdata,
const struct object_id *oid,
@@ -86,6 +87,7 @@ struct object_entry *packlist_find(struct packing_data *pdata,
static void prepare_in_pack_by_idx(struct packing_data *pdata)
{
+ struct packfile_store *packs = pdata->repo->objects->packfiles;
struct packed_git **mapping, *p;
int cnt = 0, nr = 1U << OE_IN_PACK_BITS;
@@ -95,7 +97,7 @@ static void prepare_in_pack_by_idx(struct packing_data *pdata)
* (i.e. in_pack_idx also zero) should return NULL.
*/
mapping[cnt++] = NULL;
- for (p = get_all_packs(pdata->repo); p; p = p->next, cnt++) {
+ for (p = packfile_store_get_all_packs(packs); p; p = p->next, cnt++) {
if (cnt == nr) {
free(mapping);
return;
diff --git a/pack-refs.c b/pack-refs.c
new file mode 100644
index 0000000000..1a5e07d8b8
--- /dev/null
+++ b/pack-refs.c
@@ -0,0 +1,56 @@
+#include "builtin.h"
+#include "config.h"
+#include "environment.h"
+#include "pack-refs.h"
+#include "parse-options.h"
+#include "refs.h"
+#include "revision.h"
+
+int pack_refs_core(int argc,
+ const char **argv,
+ const char *prefix,
+ struct repository *repo,
+ const char * const *usage_opts)
+{
+ struct ref_exclusions excludes = REF_EXCLUSIONS_INIT;
+ struct string_list included_refs = STRING_LIST_INIT_NODUP;
+ struct pack_refs_opts pack_refs_opts = {
+ .exclusions = &excludes,
+ .includes = &included_refs,
+ .flags = PACK_REFS_PRUNE,
+ };
+ struct string_list option_excluded_refs = STRING_LIST_INIT_NODUP;
+ struct string_list_item *item;
+ int pack_all = 0;
+ int ret;
+
+ struct option opts[] = {
+ OPT_BOOL(0, "all", &pack_all, N_("pack everything")),
+ OPT_BIT(0, "prune", &pack_refs_opts.flags, N_("prune loose refs (default)"), PACK_REFS_PRUNE),
+ OPT_BIT(0, "auto", &pack_refs_opts.flags, N_("auto-pack refs as needed"), PACK_REFS_AUTO),
+ OPT_STRING_LIST(0, "include", pack_refs_opts.includes, N_("pattern"),
+ N_("references to include")),
+ OPT_STRING_LIST(0, "exclude", &option_excluded_refs, N_("pattern"),
+ N_("references to exclude")),
+ OPT_END(),
+ };
+ repo_config(repo, git_default_config, NULL);
+ if (parse_options(argc, argv, prefix, opts, usage_opts, 0))
+ usage_with_options(usage_opts, opts);
+
+ for_each_string_list_item(item, &option_excluded_refs)
+ add_ref_exclusion(pack_refs_opts.exclusions, item->string);
+
+ if (pack_all)
+ string_list_append(pack_refs_opts.includes, "*");
+
+ if (!pack_refs_opts.includes->nr)
+ string_list_append(pack_refs_opts.includes, "refs/tags/*");
+
+ ret = refs_optimize(get_main_ref_store(repo), &pack_refs_opts);
+
+ clear_ref_exclusions(&excludes);
+ string_list_clear(&included_refs, 0);
+ string_list_clear(&option_excluded_refs, 0);
+ return ret;
+}
diff --git a/pack-refs.h b/pack-refs.h
new file mode 100644
index 0000000000..5de27e7da8
--- /dev/null
+++ b/pack-refs.h
@@ -0,0 +1,23 @@
+#ifndef PACK_REFS_H
+#define PACK_REFS_H
+
+struct repository;
+
+/*
+ * Shared usage string for options common to git-pack-refs(1)
+ * and git-refs-optimize(1). The command-specific part (e.g., "git refs optimize ")
+ * must be prepended by the caller.
+ */
+#define PACK_REFS_OPTS \
+ "[--all] [--no-prune] [--auto] [--include <pattern>] [--exclude <pattern>]"
+
+/*
+ * The core logic for pack-refs and its clones.
+ */
+int pack_refs_core(int argc,
+ const char **argv,
+ const char *prefix,
+ struct repository *repo,
+ const char * const *usage_opts);
+
+#endif /* PACK_REFS_H */
diff --git a/packfile.c b/packfile.c
index acb680966d..5a7caec292 100644
--- a/packfile.c
+++ b/packfile.c
@@ -278,7 +278,7 @@ static int unuse_one_window(struct packed_git *current)
if (current)
scan_windows(current, &lru_p, &lru_w, &lru_l);
- for (p = current->repo->objects->packed_git; p; p = p->next)
+ for (p = current->repo->objects->packfiles->packs; p; p = p->next)
scan_windows(p, &lru_p, &lru_w, &lru_l);
if (lru_p) {
munmap(lru_w->base, lru_w->len);
@@ -362,13 +362,8 @@ void close_pack(struct packed_git *p)
void close_object_store(struct object_database *o)
{
struct odb_source *source;
- struct packed_git *p;
- for (p = o->packed_git; p; p = p->next)
- if (p->do_not_close)
- BUG("want to close pack marked 'do-not-close'");
- else
- close_pack(p);
+ packfile_store_close(o->packfiles);
for (source = o->sources; source; source = source->next) {
if (source->midx)
@@ -468,7 +463,7 @@ static int close_one_pack(struct repository *r)
struct pack_window *mru_w = NULL;
int accept_windows_inuse = 1;
- for (p = r->objects->packed_git; p; p = p->next) {
+ for (p = r->objects->packfiles->packs; p; p = p->next) {
if (p->pack_fd == -1)
continue;
find_lru_pack(p, &lru_p, &mru_w, &accept_windows_inuse);
@@ -784,16 +779,44 @@ struct packed_git *add_packed_git(struct repository *r, const char *path,
return p;
}
-void install_packed_git(struct repository *r, struct packed_git *pack)
+void packfile_store_add_pack(struct packfile_store *store,
+ struct packed_git *pack)
{
if (pack->pack_fd != -1)
pack_open_fds++;
- pack->next = r->objects->packed_git;
- r->objects->packed_git = pack;
+ pack->next = store->packs;
+ store->packs = pack;
hashmap_entry_init(&pack->packmap_ent, strhash(pack->pack_name));
- hashmap_add(&r->objects->pack_map, &pack->packmap_ent);
+ hashmap_add(&store->map, &pack->packmap_ent);
+}
+
+struct packed_git *packfile_store_load_pack(struct packfile_store *store,
+ const char *idx_path, int local)
+{
+ struct strbuf key = STRBUF_INIT;
+ struct packed_git *p;
+
+ /*
+ * We're being called with the path to the index file, but `pack_map`
+ * holds the path to the packfile itself.
+ */
+ strbuf_addstr(&key, idx_path);
+ strbuf_strip_suffix(&key, ".idx");
+ strbuf_addstr(&key, ".pack");
+
+ p = hashmap_get_entry_from_hash(&store->map, strhash(key.buf), key.buf,
+ struct packed_git, packmap_ent);
+ if (!p) {
+ p = add_packed_git(store->odb->repo, idx_path,
+ strlen(idx_path), local);
+ if (p)
+ packfile_store_add_pack(store, p);
+ }
+
+ strbuf_release(&key);
+ return p;
}
void (*report_garbage)(unsigned seen_bits, const char *path);
@@ -895,23 +918,14 @@ static void prepare_pack(const char *full_name, size_t full_name_len,
const char *file_name, void *_data)
{
struct prepare_pack_data *data = (struct prepare_pack_data *)_data;
- struct packed_git *p;
size_t base_len = full_name_len;
if (strip_suffix_mem(full_name, &base_len, ".idx") &&
!(data->m && midx_contains_pack(data->m, file_name))) {
- struct hashmap_entry hent;
- char *pack_name = xstrfmt("%.*s.pack", (int)base_len, full_name);
- unsigned int hash = strhash(pack_name);
- hashmap_entry_init(&hent, hash);
-
- /* Don't reopen a pack we already have. */
- if (!hashmap_get(&data->r->objects->pack_map, &hent, pack_name)) {
- p = add_packed_git(data->r, full_name, full_name_len, data->local);
- if (p)
- install_packed_git(data->r, p);
- }
- free(pack_name);
+ char *trimmed_path = xstrndup(full_name, full_name_len);
+ packfile_store_load_pack(data->r->objects->packfiles,
+ trimmed_path, data->local);
+ free(trimmed_path);
}
if (!report_garbage)
@@ -951,40 +965,6 @@ static void prepare_packed_git_one(struct odb_source *source)
string_list_clear(data.garbage, 0);
}
-static void prepare_packed_git(struct repository *r);
-/*
- * Give a fast, rough count of the number of objects in the repository. This
- * ignores loose objects completely. If you have a lot of them, then either
- * you should repack because your performance will be awful, or they are
- * all unreachable objects about to be pruned, in which case they're not really
- * interesting as a measure of repo size in the first place.
- */
-unsigned long repo_approximate_object_count(struct repository *r)
-{
- if (!r->objects->approximate_object_count_valid) {
- struct odb_source *source;
- unsigned long count = 0;
- struct packed_git *p;
-
- prepare_packed_git(r);
-
- for (source = r->objects->sources; source; source = source->next) {
- struct multi_pack_index *m = get_multi_pack_index(source);
- if (m)
- count += m->num_objects;
- }
-
- for (p = r->objects->packed_git; p; p = p->next) {
- if (open_pack_index(p))
- continue;
- count += p->num_objects;
- }
- r->objects->approximate_object_count = count;
- r->objects->approximate_object_count_valid = 1;
- }
- return r->objects->approximate_object_count;
-}
-
DEFINE_LIST_SORT(static, sort_packs, struct packed_git, next);
static int sort_pack(const struct packed_git *a, const struct packed_git *b)
@@ -1013,80 +993,51 @@ static int sort_pack(const struct packed_git *a, const struct packed_git *b)
return -1;
}
-static void rearrange_packed_git(struct repository *r)
-{
- sort_packs(&r->objects->packed_git, sort_pack);
-}
-
-static void prepare_packed_git_mru(struct repository *r)
+static void packfile_store_prepare_mru(struct packfile_store *store)
{
struct packed_git *p;
- INIT_LIST_HEAD(&r->objects->packed_git_mru);
+ INIT_LIST_HEAD(&store->mru);
- for (p = r->objects->packed_git; p; p = p->next)
- list_add_tail(&p->mru, &r->objects->packed_git_mru);
+ for (p = store->packs; p; p = p->next)
+ list_add_tail(&p->mru, &store->mru);
}
-static void prepare_packed_git(struct repository *r)
+void packfile_store_prepare(struct packfile_store *store)
{
struct odb_source *source;
- if (r->objects->packed_git_initialized)
+ if (store->initialized)
return;
- odb_prepare_alternates(r->objects);
- for (source = r->objects->sources; source; source = source->next) {
+ odb_prepare_alternates(store->odb);
+ for (source = store->odb->sources; source; source = source->next) {
prepare_multi_pack_index_one(source);
prepare_packed_git_one(source);
}
- rearrange_packed_git(r);
-
- prepare_packed_git_mru(r);
- r->objects->packed_git_initialized = 1;
-}
-
-void reprepare_packed_git(struct repository *r)
-{
- struct odb_source *source;
+ sort_packs(&store->packs, sort_pack);
- obj_read_lock();
-
- /*
- * Reprepare alt odbs, in case the alternates file was modified
- * during the course of this process. This only _adds_ odbs to
- * the linked list, so existing odbs will continue to exist for
- * the lifetime of the process.
- */
- r->objects->loaded_alternates = 0;
- odb_prepare_alternates(r->objects);
-
- for (source = r->objects->sources; source; source = source->next)
- odb_clear_loose_cache(source);
-
- r->objects->approximate_object_count_valid = 0;
- r->objects->packed_git_initialized = 0;
- prepare_packed_git(r);
- obj_read_unlock();
+ packfile_store_prepare_mru(store);
+ store->initialized = true;
}
-struct packed_git *get_packed_git(struct repository *r)
+void packfile_store_reprepare(struct packfile_store *store)
{
- prepare_packed_git(r);
- return r->objects->packed_git;
+ store->initialized = false;
+ packfile_store_prepare(store);
}
-struct multi_pack_index *get_multi_pack_index(struct odb_source *source)
+struct packed_git *packfile_store_get_packs(struct packfile_store *store)
{
- prepare_packed_git(source->odb->repo);
- return source->midx;
+ packfile_store_prepare(store);
+ return store->packs;
}
-struct packed_git *get_all_packs(struct repository *r)
+struct packed_git *packfile_store_get_all_packs(struct packfile_store *store)
{
- prepare_packed_git(r);
+ packfile_store_prepare(store);
- for (struct odb_source *source = r->objects->sources; source; source = source->next) {
+ for (struct odb_source *source = store->odb->sources; source; source = source->next) {
struct multi_pack_index *m = source->midx;
if (!m)
continue;
@@ -1094,13 +1045,46 @@ struct packed_git *get_all_packs(struct repository *r)
prepare_midx_pack(m, i);
}
- return r->objects->packed_git;
+ return store->packs;
}
-struct list_head *get_packed_git_mru(struct repository *r)
+struct list_head *packfile_store_get_packs_mru(struct packfile_store *store)
{
- prepare_packed_git(r);
- return &r->objects->packed_git_mru;
+ packfile_store_prepare(store);
+ return &store->mru;
+}
+
+/*
+ * Give a fast, rough count of the number of objects in the repository. This
+ * ignores loose objects completely. If you have a lot of them, then either
+ * you should repack because your performance will be awful, or they are
+ * all unreachable objects about to be pruned, in which case they're not really
+ * interesting as a measure of repo size in the first place.
+ */
+unsigned long repo_approximate_object_count(struct repository *r)
+{
+ if (!r->objects->approximate_object_count_valid) {
+ struct odb_source *source;
+ unsigned long count = 0;
+ struct packed_git *p;
+
+ packfile_store_prepare(r->objects->packfiles);
+
+ for (source = r->objects->sources; source; source = source->next) {
+ struct multi_pack_index *m = get_multi_pack_index(source);
+ if (m)
+ count += m->num_objects;
+ }
+
+ for (p = r->objects->packfiles->packs; p; p = p->next) {
+ if (open_pack_index(p))
+ continue;
+ count += p->num_objects;
+ }
+ r->objects->approximate_object_count = count;
+ r->objects->approximate_object_count_valid = 1;
+ }
+ return r->objects->approximate_object_count;
}
unsigned long unpack_object_header_buffer(const unsigned char *buf,
@@ -1155,7 +1139,7 @@ unsigned long get_size_from_delta(struct packed_git *p,
*
* Other worrying sections could be the call to close_pack_fd(),
* which can close packs even with in-use windows, and to
- * reprepare_packed_git(). Regarding the former, mmap doc says:
+ * odb_reprepare(). Regarding the former, mmap doc says:
* "closing the file descriptor does not unmap the region". And
* for the latter, it won't re-open already available packs.
*/
@@ -1219,7 +1203,7 @@ const struct packed_git *has_packed_and_bad(struct repository *r,
{
struct packed_git *p;
- for (p = r->objects->packed_git; p; p = p->next)
+ for (p = r->objects->packfiles->packs; p; p = p->next)
if (oidset_contains(&p->bad_objects, oid))
return p;
return NULL;
@@ -2074,19 +2058,19 @@ int find_pack_entry(struct repository *r, const struct object_id *oid, struct pa
{
struct list_head *pos;
- prepare_packed_git(r);
+ packfile_store_prepare(r->objects->packfiles);
for (struct odb_source *source = r->objects->sources; source; source = source->next)
if (source->midx && fill_midx_entry(source->midx, oid, e))
return 1;
- if (!r->objects->packed_git)
+ if (!r->objects->packfiles->packs)
return 0;
- list_for_each(pos, &r->objects->packed_git_mru) {
+ list_for_each(pos, &r->objects->packfiles->mru) {
struct packed_git *p = list_entry(pos, struct packed_git, mru);
if (!p->multi_pack_index && fill_pack_entry(oid, e, p)) {
- list_move(&p->mru, &r->objects->packed_git_mru);
+ list_move(&p->mru, &r->objects->packfiles->mru);
return 1;
}
}
@@ -2096,19 +2080,19 @@ int find_pack_entry(struct repository *r, const struct object_id *oid, struct pa
static void maybe_invalidate_kept_pack_cache(struct repository *r,
unsigned flags)
{
- if (!r->objects->kept_pack_cache.packs)
+ if (!r->objects->packfiles->kept_cache.packs)
return;
- if (r->objects->kept_pack_cache.flags == flags)
+ if (r->objects->packfiles->kept_cache.flags == flags)
return;
- FREE_AND_NULL(r->objects->kept_pack_cache.packs);
- r->objects->kept_pack_cache.flags = 0;
+ FREE_AND_NULL(r->objects->packfiles->kept_cache.packs);
+ r->objects->packfiles->kept_cache.flags = 0;
}
struct packed_git **kept_pack_cache(struct repository *r, unsigned flags)
{
maybe_invalidate_kept_pack_cache(r, flags);
- if (!r->objects->kept_pack_cache.packs) {
+ if (!r->objects->packfiles->kept_cache.packs) {
struct packed_git **packs = NULL;
size_t nr = 0, alloc = 0;
struct packed_git *p;
@@ -2121,7 +2105,7 @@ struct packed_git **kept_pack_cache(struct repository *r, unsigned flags)
* covers, one kept and one not kept, but the midx returns only
* the non-kept version.
*/
- for (p = get_all_packs(r); p; p = p->next) {
+ for (p = packfile_store_get_all_packs(r->objects->packfiles); p; p = p->next) {
if ((p->pack_keep && (flags & ON_DISK_KEEP_PACKS)) ||
(p->pack_keep_in_core && (flags & IN_CORE_KEEP_PACKS))) {
ALLOC_GROW(packs, nr + 1, alloc);
@@ -2131,11 +2115,11 @@ struct packed_git **kept_pack_cache(struct repository *r, unsigned flags)
ALLOC_GROW(packs, nr + 1, alloc);
packs[nr] = NULL;
- r->objects->kept_pack_cache.packs = packs;
- r->objects->kept_pack_cache.flags = flags;
+ r->objects->packfiles->kept_cache.packs = packs;
+ r->objects->packfiles->kept_cache.flags = flags;
}
- return r->objects->kept_pack_cache.packs;
+ return r->objects->packfiles->kept_cache.packs;
}
int find_kept_pack_entry(struct repository *r,
@@ -2218,7 +2202,7 @@ int for_each_packed_object(struct repository *repo, each_packed_object_fn cb,
int r = 0;
int pack_errors = 0;
- for (p = get_all_packs(repo); p; p = p->next) {
+ for (p = packfile_store_get_all_packs(repo->objects->packfiles); p; p = p->next) {
if ((flags & FOR_EACH_OBJECT_LOCAL_ONLY) && !p->pack_local)
continue;
if ((flags & FOR_EACH_OBJECT_PROMISOR_ONLY) &&
@@ -2332,3 +2316,46 @@ int parse_pack_header_option(const char *in, unsigned char *out, unsigned int *l
*len = hdr - out;
return 0;
}
+
+static int pack_map_entry_cmp(const void *cmp_data UNUSED,
+ const struct hashmap_entry *entry,
+ const struct hashmap_entry *entry2,
+ const void *keydata)
+{
+ const char *key = keydata;
+ const struct packed_git *pg1, *pg2;
+
+ pg1 = container_of(entry, const struct packed_git, packmap_ent);
+ pg2 = container_of(entry2, const struct packed_git, packmap_ent);
+
+ return strcmp(pg1->pack_name, key ? key : pg2->pack_name);
+}
+
+struct packfile_store *packfile_store_new(struct object_database *odb)
+{
+ struct packfile_store *store;
+ CALLOC_ARRAY(store, 1);
+ store->odb = odb;
+ INIT_LIST_HEAD(&store->mru);
+ hashmap_init(&store->map, pack_map_entry_cmp, NULL, 0);
+ return store;
+}
+
+void packfile_store_free(struct packfile_store *store)
+{
+ for (struct packed_git *p = store->packs, *next; p; p = next) {
+ next = p->next;
+ free(p);
+ }
+ hashmap_clear(&store->map);
+ free(store);
+}
+
+void packfile_store_close(struct packfile_store *store)
+{
+ for (struct packed_git *p = store->packs; p; p = p->next) {
+ if (p->do_not_close)
+ BUG("want to close pack marked 'do-not-close'");
+ close_pack(p);
+ }
+}
diff --git a/packfile.h b/packfile.h
index f16753f2a9..e7a5792b6c 100644
--- a/packfile.h
+++ b/packfile.h
@@ -52,19 +52,114 @@ struct packed_git {
char pack_name[FLEX_ARRAY]; /* more */
};
-static inline int pack_map_entry_cmp(const void *cmp_data UNUSED,
- const struct hashmap_entry *entry,
- const struct hashmap_entry *entry2,
- const void *keydata)
-{
- const char *key = keydata;
- const struct packed_git *pg1, *pg2;
+/*
+ * A store that manages packfiles for a given object database.
+ */
+struct packfile_store {
+ struct object_database *odb;
- pg1 = container_of(entry, const struct packed_git, packmap_ent);
- pg2 = container_of(entry2, const struct packed_git, packmap_ent);
+ /*
+ * The list of packfiles in the order in which they are being added to
+ * the store.
+ */
+ struct packed_git *packs;
- return strcmp(pg1->pack_name, key ? key : pg2->pack_name);
-}
+ /*
+ * Cache of packfiles which are marked as "kept", either because there
+ * is an on-disk ".keep" file or because they are marked as "kept" in
+ * memory.
+ *
+ * Should not be accessed directly, but via `kept_pack_cache()`. The
+ * list of packs gets invalidated when the stored flags and the flags
+ * passed to `kept_pack_cache()` mismatch.
+ */
+ struct {
+ struct packed_git **packs;
+ unsigned flags;
+ } kept_cache;
+
+ /* A most-recently-used ordered version of the packs list. */
+ struct list_head mru;
+
+ /*
+ * A map of packfile names to packed_git structs for tracking which
+ * packs have been loaded already.
+ */
+ struct hashmap map;
+
+ /*
+ * Whether packfiles have already been populated with this store's
+ * packs.
+ */
+ bool initialized;
+};
+
+/*
+ * Allocate and initialize a new empty packfile store for the given object
+ * database.
+ */
+struct packfile_store *packfile_store_new(struct object_database *odb);
+
+/*
+ * Free the packfile store and all its associated state. All packfiles
+ * tracked by the store will be closed.
+ */
+void packfile_store_free(struct packfile_store *store);
+
+/*
+ * Close all packfiles associated with this store. The packfiles won't be
+ * free'd, so they can be re-opened at a later point in time.
+ */
+void packfile_store_close(struct packfile_store *store);
+
+/*
+ * Prepare the packfile store by loading packfiles and multi-pack indices for
+ * all alternates. This becomes a no-op if the store is already prepared.
+ *
+ * It shouldn't typically be necessary to call this function directly, as
+ * functions that access the store know to prepare it.
+ */
+void packfile_store_prepare(struct packfile_store *store);
+
+/*
+ * Clear the packfile caches and try to look up any new packfiles that have
+ * appeared since last preparing the packfiles store.
+ *
+ * This function must be called under the `odb_read_lock()`.
+ */
+void packfile_store_reprepare(struct packfile_store *store);
+
+/*
+ * Add the pack to the store so that contained objects become accessible via
+ * the store. This moves ownership into the store.
+ */
+void packfile_store_add_pack(struct packfile_store *store,
+ struct packed_git *pack);
+
+/*
+ * Get packs managed by the given store. Does not load the MIDX or any packs
+ * referenced by it.
+ */
+struct packed_git *packfile_store_get_packs(struct packfile_store *store);
+
+/*
+ * Get all packs managed by the given store, including packfiles that are
+ * referenced by multi-pack indices.
+ */
+struct packed_git *packfile_store_get_all_packs(struct packfile_store *store);
+
+/*
+ * Get all packs in most-recently-used order.
+ */
+struct list_head *packfile_store_get_packs_mru(struct packfile_store *store);
+
+/*
+ * Open the packfile and add it to the store if it isn't yet known. Returns
+ * either the newly opened packfile or the preexisting packfile. Returns a
+ * `NULL` pointer in case the packfile could not be opened.
+ */
+struct packed_git *packfile_store_load_pack(struct packfile_store *store,
+ const char *idx_path, int local);
struct pack_window {
struct pack_window *next;
@@ -142,14 +237,6 @@ int for_each_packed_object(struct repository *repo, each_packed_object_fn cb,
#define PACKDIR_FILE_GARBAGE 4
extern void (*report_garbage)(unsigned seen_bits, const char *path);
-void reprepare_packed_git(struct repository *r);
-void install_packed_git(struct repository *r, struct packed_git *pack);
-
-struct packed_git *get_packed_git(struct repository *r);
-struct list_head *get_packed_git_mru(struct repository *r);
-struct multi_pack_index *get_multi_pack_index(struct odb_source *source);
-struct packed_git *get_all_packs(struct repository *r);
-
/*
* Give a rough count of objects in the repository. This sacrifices accuracy
* for speed.
diff --git a/read-cache.c b/read-cache.c
index 229b8ef11c..032480d0c7 100644
--- a/read-cache.c
+++ b/read-cache.c
@@ -8,7 +8,6 @@
#define DISABLE_SIGN_COMPARE_WARNINGS
#include "git-compat-util.h"
-#include "bulk-checkin.h"
#include "config.h"
#include "date.h"
#include "diff.h"
@@ -1807,7 +1806,7 @@ static struct cache_entry *create_from_disk(struct mem_pool *ce_mem_pool,
if (expand_name_field) {
const unsigned char *cp = (const unsigned char *)name;
- size_t strip_len, previous_len;
+ uint64_t strip_len, previous_len;
/* If we're at the beginning of a block, ignore the previous name */
strip_len = decode_varint(&cp);
@@ -2655,8 +2654,10 @@ static int ce_write_entry(struct hashfile *f, struct cache_entry *ce,
hashwrite(f, ce->name, len);
hashwrite(f, padding, align_padding_size(size, len));
} else {
- int common, to_remove, prefix_size;
+ int common, to_remove;
+ uint8_t prefix_size;
unsigned char to_remove_vi[16];
+
for (common = 0;
(common < previous_name->len &&
ce->name[common] &&
@@ -3973,9 +3974,9 @@ int add_files_to_cache(struct repository *repo, const char *prefix,
* This function is invoked from commands other than 'add', which
* may not have their own transaction active.
*/
- transaction = begin_odb_transaction(repo->objects);
+ transaction = odb_transaction_begin(repo->objects);
run_diff_files(&rev, DIFF_RACY_IS_MODIFIED);
- end_odb_transaction(transaction);
+ odb_transaction_commit(transaction);
release_revisions(&rev);
return !!data.add_errors;
diff --git a/refs.c b/refs.c
index c164374c26..750e5db077 100644
--- a/refs.c
+++ b/refs.c
@@ -2304,6 +2304,11 @@ int refs_pack_refs(struct ref_store *refs, struct pack_refs_opts *opts)
return refs->be->pack_refs(refs, opts);
}
+int refs_optimize(struct ref_store *refs, struct pack_refs_opts *opts)
+{
+ return refs->be->optimize(refs, opts);
+}
+
int peel_iterated_oid(struct repository *r, const struct object_id *base, struct object_id *peeled)
{
if (current_ref_iter &&
diff --git a/refs.h b/refs.h
index bc5d8427a5..4e6bd63aa8 100644
--- a/refs.h
+++ b/refs.h
@@ -483,6 +483,12 @@ struct pack_refs_opts {
int refs_pack_refs(struct ref_store *refs, struct pack_refs_opts *opts);
/*
+ * Optimize the ref store. The exact behavior is up to the backend.
+ * For the files backend, this is equivalent to packing refs.
+ */
+int refs_optimize(struct ref_store *refs, struct pack_refs_opts *opts);
+
+/*
* Setup reflog before using. Fill in err and return -1 on failure.
*/
int refs_create_reflog(struct ref_store *refs, const char *refname,
diff --git a/refs/files-backend.c b/refs/files-backend.c
index bc3347d18c..bb2bec3807 100644
--- a/refs/files-backend.c
+++ b/refs/files-backend.c
@@ -1528,6 +1528,15 @@ static int files_pack_refs(struct ref_store *ref_store,
return 0;
}
+static int files_optimize(struct ref_store *ref_store, struct pack_refs_opts *opts)
+{
+ /*
+ * For the "files" backend, "optimizing" is the same as "packing".
+ * So, we just call the existing worker function for packing.
+ */
+ return files_pack_refs(ref_store, opts);
+}
+
/*
* People using contrib's git-new-workdir have .git/logs/refs ->
* /some/other/path/.git/logs/refs, and that may live on another device.
@@ -3989,6 +3998,7 @@ struct ref_storage_be refs_be_files = {
.transaction_abort = files_transaction_abort,
.pack_refs = files_pack_refs,
+ .optimize = files_optimize,
.rename_ref = files_rename_ref,
.copy_ref = files_copy_ref,
diff --git a/refs/ref-cache.c b/refs/ref-cache.c
index c180e0aad7..e5e5df16d8 100644
--- a/refs/ref-cache.c
+++ b/refs/ref-cache.c
@@ -539,7 +539,7 @@ static int cache_ref_iterator_seek(struct ref_iterator *ref_iterator,
*/
break;
}
- } while (slash);
+ } while (slash && dir->nr);
}
return 0;
diff --git a/refs/refs-internal.h b/refs/refs-internal.h
index 54c2079c12..4ef3bd75c6 100644
--- a/refs/refs-internal.h
+++ b/refs/refs-internal.h
@@ -447,6 +447,8 @@ typedef int ref_transaction_commit_fn(struct ref_store *refs,
typedef int pack_refs_fn(struct ref_store *ref_store,
struct pack_refs_opts *opts);
+typedef int optimize_fn(struct ref_store *ref_store,
+ struct pack_refs_opts *opts);
typedef int rename_ref_fn(struct ref_store *ref_store,
const char *oldref, const char *newref,
const char *logmsg);
@@ -572,6 +574,7 @@ struct ref_storage_be {
ref_transaction_abort_fn *transaction_abort;
pack_refs_fn *pack_refs;
+ optimize_fn *optimize;
rename_ref_fn *rename_ref;
copy_ref_fn *copy_ref;
diff --git a/refs/reftable-backend.c b/refs/reftable-backend.c
index 9e889da2ff..9884b876c1 100644
--- a/refs/reftable-backend.c
+++ b/refs/reftable-backend.c
@@ -1741,6 +1741,12 @@ out:
return ret;
}
+static int reftable_be_optimize(struct ref_store *ref_store,
+ struct pack_refs_opts *opts)
+{
+ return reftable_be_pack_refs(ref_store, opts);
+}
+
struct write_create_symref_arg {
struct reftable_ref_store *refs;
struct reftable_stack *stack;
@@ -2727,6 +2733,7 @@ struct ref_storage_be refs_be_reftable = {
.transaction_abort = reftable_be_transaction_abort,
.pack_refs = reftable_be_pack_refs,
+ .optimize = reftable_be_optimize,
.rename_ref = reftable_be_rename_ref,
.copy_ref = reftable_be_copy_ref,
diff --git a/remote-curl.c b/remote-curl.c
index 84f4694780..69f919454a 100644
--- a/remote-curl.c
+++ b/remote-curl.c
@@ -894,14 +894,6 @@ static int probe_rpc(struct rpc_state *rpc, struct slot_results *results)
return err;
}
-static curl_off_t xcurl_off_t(size_t len)
-{
- uintmax_t size = len;
- if (size > maximum_signed_value_of_type(curl_off_t))
- die(_("cannot handle pushes this big"));
- return (curl_off_t)size;
-}
-
/*
* If flush_received is true, do not attempt to read any more; just use what's
* in rpc->buf.
@@ -999,7 +991,7 @@ retry:
* and we just need to send it.
*/
curl_easy_setopt(slot->curl, CURLOPT_POSTFIELDS, gzip_body);
- curl_easy_setopt(slot->curl, CURLOPT_POSTFIELDSIZE_LARGE, xcurl_off_t(gzip_size));
+ curl_easy_setopt(slot->curl, CURLOPT_POSTFIELDSIZE_LARGE, cast_size_t_to_curl_off_t(gzip_size));
} else if (use_gzip && 1024 < rpc->len) {
/* The client backend isn't giving us compressed data so
@@ -1030,7 +1022,7 @@ retry:
headers = curl_slist_append(headers, "Content-Encoding: gzip");
curl_easy_setopt(slot->curl, CURLOPT_POSTFIELDS, gzip_body);
- curl_easy_setopt(slot->curl, CURLOPT_POSTFIELDSIZE_LARGE, xcurl_off_t(gzip_size));
+ curl_easy_setopt(slot->curl, CURLOPT_POSTFIELDSIZE_LARGE, cast_size_t_to_curl_off_t(gzip_size));
if (options.verbosity > 1) {
fprintf(stderr, "POST %s (gzip %lu to %lu bytes)\n",
@@ -1043,7 +1035,7 @@ retry:
* more normal Content-Length approach.
*/
curl_easy_setopt(slot->curl, CURLOPT_POSTFIELDS, rpc->buf);
- curl_easy_setopt(slot->curl, CURLOPT_POSTFIELDSIZE_LARGE, xcurl_off_t(rpc->len));
+ curl_easy_setopt(slot->curl, CURLOPT_POSTFIELDSIZE_LARGE, cast_size_t_to_curl_off_t(rpc->len));
if (options.verbosity > 1) {
fprintf(stderr, "POST %s (%lu bytes)\n",
rpc->service_name, (unsigned long)rpc->len);
diff --git a/server-info.c b/server-info.c
index 9bb30d9ab7..1d33de821e 100644
--- a/server-info.c
+++ b/server-info.c
@@ -287,12 +287,13 @@ static int compare_info(const void *a_, const void *b_)
static void init_pack_info(struct repository *r, const char *infofile, int force)
{
+ struct packfile_store *packs = r->objects->packfiles;
struct packed_git *p;
int stale;
int i;
size_t alloc = 0;
- for (p = get_all_packs(r); p; p = p->next) {
+ for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
/* we ignore things on alternate path since they are
* not available to the pullers in general.
*/
diff --git a/shared.mak b/shared.mak
index 5c7bc94785..0e7492076e 100644
--- a/shared.mak
+++ b/shared.mak
@@ -56,6 +56,7 @@ ifndef V
QUIET_MKDIR_P_PARENT = @echo ' ' MKDIR -p $(@D);
## Used in "Makefile"
+ QUIET_CARGO = @echo ' ' CARGO $@;
QUIET_CC = @echo ' ' CC $@;
QUIET_AR = @echo ' ' AR $@;
QUIET_LINK = @echo ' ' LINK $@;
diff --git a/src/cargo-meson.sh b/src/cargo-meson.sh
new file mode 100755
index 0000000000..99400986d9
--- /dev/null
+++ b/src/cargo-meson.sh
@@ -0,0 +1,32 @@
+#!/bin/sh
+
+if test "$#" -lt 2
+then
+ exit 1
+fi
+
+SOURCE_DIR="$1"
+BUILD_DIR="$2"
+BUILD_TYPE=debug
+
+shift 2
+
+for arg
+do
+ case "$arg" in
+ --release)
+ BUILD_TYPE=release;;
+ esac
+done
+
+cargo build --lib --quiet --manifest-path="$SOURCE_DIR/Cargo.toml" --target-dir="$BUILD_DIR" "$@"
+RET=$?
+if test $RET -ne 0
+then
+ exit $RET
+fi
+
+if ! cmp "$BUILD_DIR/$BUILD_TYPE/libgitcore.a" "$BUILD_DIR/libgitcore.a" >/dev/null 2>&1
+then
+ cp "$BUILD_DIR/$BUILD_TYPE/libgitcore.a" "$BUILD_DIR/libgitcore.a"
+fi
diff --git a/src/lib.rs b/src/lib.rs
new file mode 100644
index 0000000000..9da70d8b57
--- /dev/null
+++ b/src/lib.rs
@@ -0,0 +1 @@
+pub mod varint;
diff --git a/src/meson.build b/src/meson.build
new file mode 100644
index 0000000000..25b9ad5a14
--- /dev/null
+++ b/src/meson.build
@@ -0,0 +1,41 @@
+libgit_rs_sources = [
+ 'lib.rs',
+ 'varint.rs',
+]
+
+# Unfortunately we must use a wrapper command to move the output file into the
+# current build directory. This can fixed once `cargo build --artifact-dir`
+# stabilizes. See https://github.com/rust-lang/cargo/issues/6790 for that
+# effort.
+cargo_command = [
+ shell,
+ meson.current_source_dir() / 'cargo-meson.sh',
+ meson.project_source_root(),
+ meson.current_build_dir(),
+]
+if get_option('buildtype') == 'release'
+ cargo_command += '--release'
+endif
+
+libgit_rs = custom_target('git_rs',
+ input: libgit_rs_sources + [
+ meson.project_source_root() / 'Cargo.toml',
+ ],
+ output: 'libgitcore.a',
+ command: cargo_command,
+)
+libgit_dependencies += declare_dependency(link_with: libgit_rs)
+
+if get_option('tests')
+ test('rust', cargo,
+ args: [
+ 'test',
+ '--manifest-path',
+ meson.project_source_root() / 'Cargo.toml',
+ '--target-dir',
+ meson.current_build_dir() / 'target',
+ ],
+ timeout: 0,
+ protocol: 'rust',
+ )
+endif
diff --git a/src/varint.rs b/src/varint.rs
new file mode 100644
index 0000000000..6e610bdd8e
--- /dev/null
+++ b/src/varint.rs
@@ -0,0 +1,92 @@
+#[no_mangle]
+pub unsafe extern "C" fn decode_varint(bufp: *mut *const u8) -> u64 {
+ let mut buf = *bufp;
+ let mut c = *buf;
+ let mut val = u64::from(c & 127);
+
+ buf = buf.add(1);
+
+ while (c & 128) != 0 {
+ val = val.wrapping_add(1);
+ if val == 0 || val.leading_zeros() < 7 {
+ return 0; // overflow
+ }
+
+ c = *buf;
+ buf = buf.add(1);
+
+ val = (val << 7) + u64::from(c & 127);
+ }
+
+ *bufp = buf;
+ val
+}
+
+#[no_mangle]
+pub unsafe extern "C" fn encode_varint(value: u64, buf: *mut u8) -> u8 {
+ let mut varint: [u8; 16] = [0; 16];
+ let mut pos = varint.len() - 1;
+
+ varint[pos] = (value & 127) as u8;
+
+ let mut value = value >> 7;
+ while value != 0 {
+ pos -= 1;
+ value -= 1;
+ varint[pos] = 128 | (value & 127) as u8;
+ value >>= 7;
+ }
+
+ if !buf.is_null() {
+ std::ptr::copy_nonoverlapping(varint.as_ptr().add(pos), buf, varint.len() - pos);
+ }
+
+ (varint.len() - pos) as u8
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_decode_varint() {
+ unsafe {
+ assert_eq!(decode_varint(&mut [0x00].as_slice().as_ptr()), 0);
+ assert_eq!(decode_varint(&mut [0x01].as_slice().as_ptr()), 1);
+ assert_eq!(decode_varint(&mut [0x7f].as_slice().as_ptr()), 127);
+ assert_eq!(decode_varint(&mut [0x80, 0x00].as_slice().as_ptr()), 128);
+ assert_eq!(decode_varint(&mut [0x80, 0x01].as_slice().as_ptr()), 129);
+ assert_eq!(decode_varint(&mut [0x80, 0x7f].as_slice().as_ptr()), 255);
+
+ // Overflows are expected to return 0.
+ assert_eq!(decode_varint(&mut [0x88; 16].as_slice().as_ptr()), 0);
+ }
+ }
+
+ #[test]
+ fn test_encode_varint() {
+ unsafe {
+ let mut varint: [u8; 16] = [0; 16];
+
+ assert_eq!(encode_varint(0, std::ptr::null_mut()), 1);
+
+ assert_eq!(encode_varint(0, varint.as_mut_slice().as_mut_ptr()), 1);
+ assert_eq!(varint, [0; 16]);
+
+ assert_eq!(encode_varint(10, varint.as_mut_slice().as_mut_ptr()), 1);
+ assert_eq!(varint, [10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
+
+ assert_eq!(encode_varint(127, varint.as_mut_slice().as_mut_ptr()), 1);
+ assert_eq!(varint, [127, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
+
+ assert_eq!(encode_varint(128, varint.as_mut_slice().as_mut_ptr()), 2);
+ assert_eq!(varint, [128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
+
+ assert_eq!(encode_varint(129, varint.as_mut_slice().as_mut_ptr()), 2);
+ assert_eq!(varint, [128, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
+
+ assert_eq!(encode_varint(255, varint.as_mut_slice().as_mut_ptr()), 2);
+ assert_eq!(varint, [128, 127, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
+ }
+ }
+}
diff --git a/t/helper/test-find-pack.c b/t/helper/test-find-pack.c
index 611a13a326..e001dc3066 100644
--- a/t/helper/test-find-pack.c
+++ b/t/helper/test-find-pack.c
@@ -39,7 +39,7 @@ int cmd__find_pack(int argc, const char **argv)
if (repo_get_oid(the_repository, argv[0], &oid))
die("cannot parse %s as an object name", argv[0]);
- for (p = get_all_packs(the_repository); p; p = p->next)
+ for (p = packfile_store_get_all_packs(the_repository->objects->packfiles); p; p = p->next)
if (find_pack_entry_one(&oid, p)) {
printf("%s\n", p->pack_name);
actual_count++;
diff --git a/t/helper/test-pack-deltas.c b/t/helper/test-pack-deltas.c
index 4caa024b1e..4981401eaa 100644
--- a/t/helper/test-pack-deltas.c
+++ b/t/helper/test-pack-deltas.c
@@ -51,16 +51,14 @@ static void write_ref_delta(struct hashfile *f,
unsigned long size, base_size, delta_size, compressed_size, hdrlen;
enum object_type type;
void *base_buf, *delta_buf;
- void *buf = repo_read_object_file(the_repository,
- oid, &type,
- &size);
+ void *buf = odb_read_object(the_repository->objects,
+ oid, &type, &size);
if (!buf)
die("unable to read %s", oid_to_hex(oid));
- base_buf = repo_read_object_file(the_repository,
- base, &type,
- &base_size);
+ base_buf = odb_read_object(the_repository->objects,
+ base, &type, &base_size);
if (!base_buf)
die("unable to read %s", oid_to_hex(base));
diff --git a/t/helper/test-pack-mtimes.c b/t/helper/test-pack-mtimes.c
index d51aaa3dc4..7c428c1601 100644
--- a/t/helper/test-pack-mtimes.c
+++ b/t/helper/test-pack-mtimes.c
@@ -37,7 +37,7 @@ int cmd__pack_mtimes(int argc, const char **argv)
if (argc != 2)
usage(pack_mtimes_usage);
- for (p = get_all_packs(the_repository); p; p = p->next) {
+ for (p = packfile_store_get_all_packs(the_repository->objects->packfiles); p; p = p->next) {
strbuf_addstr(&buf, basename(p->pack_name));
strbuf_strip_suffix(&buf, ".pack");
strbuf_addstr(&buf, ".mtimes");
diff --git a/t/meson.build b/t/meson.build
index 7974795fe4..11376b9e25 100644
--- a/t/meson.build
+++ b/t/meson.build
@@ -213,6 +213,7 @@ integration_tests = [
't1460-refs-migrate.sh',
't1461-refs-list.sh',
't1462-refs-exists.sh',
+ 't1463-refs-optimize.sh',
't1500-rev-parse.sh',
't1501-work-tree.sh',
't1502-rev-parse-parseopt.sh',
@@ -1034,6 +1035,7 @@ integration_tests = [
't9302-fast-import-unpack-limit.sh',
't9303-fast-import-compression.sh',
't9304-fast-import-marks.sh',
+ 't9305-fast-import-signatures.sh',
't9350-fast-export.sh',
't9351-fast-export-anonymize.sh',
't9400-git-cvsserver-server.sh',
diff --git a/t/pack-refs-tests.sh b/t/pack-refs-tests.sh
new file mode 100644
index 0000000000..3dbcc01718
--- /dev/null
+++ b/t/pack-refs-tests.sh
@@ -0,0 +1,431 @@
+pack_refs=${pack_refs:-pack-refs}
+
+test_expect_success 'enable reflogs' '
+ git config core.logallrefupdates true
+'
+
+test_expect_success 'prepare a trivial repository' '
+ echo Hello > A &&
+ git update-index --add A &&
+ git commit -m "Initial commit." &&
+ HEAD=$(git rev-parse --verify HEAD)
+'
+
+test_expect_success '${pack_refs} --prune --all' '
+ test_path_is_missing .git/packed-refs &&
+ git ${pack_refs} --no-prune --all &&
+ test_path_is_file .git/packed-refs &&
+ N=$(find .git/refs -type f | wc -l) &&
+ test "$N" != 0 &&
+
+ git ${pack_refs} --prune --all &&
+ test_path_is_file .git/packed-refs &&
+ N=$(find .git/refs -type f) &&
+ test -z "$N"
+'
+
+SHA1=
+
+test_expect_success 'see if git show-ref works as expected' '
+ git branch a &&
+ SHA1=$(cat .git/refs/heads/a) &&
+ echo "$SHA1 refs/heads/a" >expect &&
+ git show-ref a >result &&
+ test_cmp expect result
+'
+
+test_expect_success 'see if a branch still exists when packed' '
+ git branch b &&
+ git ${pack_refs} --all &&
+ rm -f .git/refs/heads/b &&
+ echo "$SHA1 refs/heads/b" >expect &&
+ git show-ref b >result &&
+ test_cmp expect result
+'
+
+test_expect_success 'git branch c/d should barf if branch c exists' '
+ git branch c &&
+ git ${pack_refs} --all &&
+ rm -f .git/refs/heads/c &&
+ test_must_fail git branch c/d
+'
+
+test_expect_success 'see if a branch still exists after git ${pack_refs} --prune' '
+ git branch e &&
+ git ${pack_refs} --all --prune &&
+ echo "$SHA1 refs/heads/e" >expect &&
+ git show-ref e >result &&
+ test_cmp expect result
+'
+
+test_expect_success 'see if git ${pack_refs} --prune remove ref files' '
+ git branch f &&
+ git ${pack_refs} --all --prune &&
+ ! test -f .git/refs/heads/f
+'
+
+test_expect_success 'see if git ${pack_refs} --prune removes empty dirs' '
+ git branch r/s/t &&
+ git ${pack_refs} --all --prune &&
+ ! test -e .git/refs/heads/r
+'
+
+test_expect_success 'git branch g should work when git branch g/h has been deleted' '
+ git branch g/h &&
+ git ${pack_refs} --all --prune &&
+ git branch -d g/h &&
+ git branch g &&
+ git ${pack_refs} --all &&
+ git branch -d g
+'
+
+test_expect_success 'git branch i/j/k should barf if branch i exists' '
+ git branch i &&
+ git ${pack_refs} --all --prune &&
+ test_must_fail git branch i/j/k
+'
+
+test_expect_success 'test git branch k after branch k/l/m and k/lm have been deleted' '
+ git branch k/l &&
+ git branch k/lm &&
+ git branch -d k/l &&
+ git branch k/l/m &&
+ git branch -d k/l/m &&
+ git branch -d k/lm &&
+ git branch k
+'
+
+test_expect_success 'test git branch n after some branch deletion and pruning' '
+ git branch n/o &&
+ git branch n/op &&
+ git branch -d n/o &&
+ git branch n/o/p &&
+ git branch -d n/op &&
+ git ${pack_refs} --all --prune &&
+ git branch -d n/o/p &&
+ git branch n
+'
+
+test_expect_success 'test excluded refs are not packed' '
+ git branch dont_pack1 &&
+ git branch dont_pack2 &&
+ git branch pack_this &&
+ git ${pack_refs} --all --exclude "refs/heads/dont_pack*" &&
+ test -f .git/refs/heads/dont_pack1 &&
+ test -f .git/refs/heads/dont_pack2 &&
+ ! test -f .git/refs/heads/pack_this'
+
+test_expect_success 'test --no-exclude refs clears excluded refs' '
+ git branch dont_pack3 &&
+ git branch dont_pack4 &&
+ git ${pack_refs} --all --exclude "refs/heads/dont_pack*" --no-exclude &&
+ ! test -f .git/refs/heads/dont_pack3 &&
+ ! test -f .git/refs/heads/dont_pack4'
+
+test_expect_success 'test only included refs are packed' '
+ git branch pack_this1 &&
+ git branch pack_this2 &&
+ git tag dont_pack5 &&
+ git ${pack_refs} --include "refs/heads/pack_this*" &&
+ test -f .git/refs/tags/dont_pack5 &&
+ ! test -f .git/refs/heads/pack_this1 &&
+ ! test -f .git/refs/heads/pack_this2'
+
+test_expect_success 'test --no-include refs clears included refs' '
+ git branch pack1 &&
+ git branch pack2 &&
+ git ${pack_refs} --include "refs/heads/pack*" --no-include &&
+ test -f .git/refs/heads/pack1 &&
+ test -f .git/refs/heads/pack2'
+
+test_expect_success 'test --exclude takes precedence over --include' '
+ git branch dont_pack5 &&
+ git ${pack_refs} --include "refs/heads/pack*" --exclude "refs/heads/pack*" &&
+ test -f .git/refs/heads/dont_pack5'
+
+test_expect_success 'see if up-to-date packed refs are preserved' '
+ git branch q &&
+ git ${pack_refs} --all --prune &&
+ git update-ref refs/heads/q refs/heads/q &&
+ ! test -f .git/refs/heads/q
+'
+
+test_expect_success 'pack, prune and repack' '
+ git tag foo &&
+ git ${pack_refs} --all --prune &&
+ git show-ref >all-of-them &&
+ git ${pack_refs} &&
+ git show-ref >again &&
+ test_cmp all-of-them again
+'
+
+test_expect_success 'explicit ${pack_refs} with dangling packed reference' '
+ git commit --allow-empty -m "soon to be garbage-collected" &&
+ git ${pack_refs} --all &&
+ git reset --hard HEAD^ &&
+ git reflog expire --expire=all --all &&
+ git prune --expire=all &&
+ git ${pack_refs} --all 2>result &&
+ test_must_be_empty result
+'
+
+test_expect_success 'delete ref with dangling packed version' '
+ git checkout -b lamb &&
+ git commit --allow-empty -m "future garbage" &&
+ git ${pack_refs} --all &&
+ git reset --hard HEAD^ &&
+ git checkout main &&
+ git reflog expire --expire=all --all &&
+ git prune --expire=all &&
+ git branch -d lamb 2>result &&
+ test_must_be_empty result
+'
+
+test_expect_success 'delete ref while another dangling packed ref' '
+ git branch lamb &&
+ git commit --allow-empty -m "future garbage" &&
+ git ${pack_refs} --all &&
+ git reset --hard HEAD^ &&
+ git reflog expire --expire=all --all &&
+ git prune --expire=all &&
+ git branch -d lamb 2>result &&
+ test_must_be_empty result
+'
+
+test_expect_success 'pack ref directly below refs/' '
+ git update-ref refs/top HEAD &&
+ git ${pack_refs} --all --prune &&
+ grep refs/top .git/packed-refs &&
+ test_path_is_missing .git/refs/top
+'
+
+test_expect_success 'do not pack ref in refs/bisect' '
+ git update-ref refs/bisect/local HEAD &&
+ git ${pack_refs} --all --prune &&
+ ! grep refs/bisect/local .git/packed-refs >/dev/null &&
+ test_path_is_file .git/refs/bisect/local
+'
+
+test_expect_success 'disable reflogs' '
+ git config core.logallrefupdates false &&
+ rm -rf .git/logs
+'
+
+test_expect_success 'create packed foo/bar/baz branch' '
+ git branch foo/bar/baz &&
+ git ${pack_refs} --all --prune &&
+ test_path_is_missing .git/refs/heads/foo/bar/baz &&
+ test_must_fail git reflog exists refs/heads/foo/bar/baz
+'
+
+test_expect_success 'notice d/f conflict with existing directory' '
+ test_must_fail git branch foo &&
+ test_must_fail git branch foo/bar
+'
+
+test_expect_success 'existing directory reports concrete ref' '
+ test_must_fail git branch foo 2>stderr &&
+ test_grep refs/heads/foo/bar/baz stderr
+'
+
+test_expect_success 'notice d/f conflict with existing ref' '
+ test_must_fail git branch foo/bar/baz/extra &&
+ test_must_fail git branch foo/bar/baz/lots/of/extra/components
+'
+
+test_expect_success 'reject packed-refs with unterminated line' '
+ cp .git/packed-refs .git/packed-refs.bak &&
+ test_when_finished "mv .git/packed-refs.bak .git/packed-refs" &&
+ printf "%s" "$HEAD refs/zzzzz" >>.git/packed-refs &&
+ echo "fatal: unterminated line in .git/packed-refs: $HEAD refs/zzzzz" >expected_err &&
+ test_must_fail git for-each-ref >out 2>err &&
+ test_cmp expected_err err
+'
+
+test_expect_success 'reject packed-refs containing junk' '
+ cp .git/packed-refs .git/packed-refs.bak &&
+ test_when_finished "mv .git/packed-refs.bak .git/packed-refs" &&
+ printf "%s\n" "bogus content" >>.git/packed-refs &&
+ echo "fatal: unexpected line in .git/packed-refs: bogus content" >expected_err &&
+ test_must_fail git for-each-ref >out 2>err &&
+ test_cmp expected_err err
+'
+
+test_expect_success 'reject packed-refs with a short SHA-1' '
+ cp .git/packed-refs .git/packed-refs.bak &&
+ test_when_finished "mv .git/packed-refs.bak .git/packed-refs" &&
+ printf "%.7s %s\n" $HEAD refs/zzzzz >>.git/packed-refs &&
+ printf "fatal: unexpected line in .git/packed-refs: %.7s %s\n" $HEAD refs/zzzzz >expected_err &&
+ test_must_fail git for-each-ref >out 2>err &&
+ test_cmp expected_err err
+'
+
+test_expect_success 'timeout if packed-refs.lock exists' '
+ LOCK=.git/packed-refs.lock &&
+ >"$LOCK" &&
+ test_when_finished "rm -f $LOCK" &&
+ test_must_fail git ${pack_refs} --all --prune
+'
+
+test_expect_success 'retry acquiring packed-refs.lock' '
+ LOCK=.git/packed-refs.lock &&
+ >"$LOCK" &&
+ test_when_finished "wait && rm -f $LOCK" &&
+ {
+ ( sleep 1 && rm -f $LOCK ) &
+ } &&
+ git -c core.packedrefstimeout=3000 ${pack_refs} --all --prune
+'
+
+test_expect_success SYMLINKS 'pack symlinked packed-refs' '
+ # First make sure that symlinking works when reading:
+ git update-ref refs/heads/lossy refs/heads/main &&
+ git for-each-ref >all-refs-before &&
+ mv .git/packed-refs .git/my-deviant-packed-refs &&
+ ln -s my-deviant-packed-refs .git/packed-refs &&
+ git for-each-ref >all-refs-linked &&
+ test_cmp all-refs-before all-refs-linked &&
+ git ${pack_refs} --all --prune &&
+ git for-each-ref >all-refs-packed &&
+ test_cmp all-refs-before all-refs-packed &&
+ test -h .git/packed-refs &&
+ test "$(test_readlink .git/packed-refs)" = "my-deviant-packed-refs"
+'
+
+# The 'packed-refs' file is stored directly in .git/. This means it is global
+# to the repository, and can only contain refs that are shared across all
+# worktrees.
+test_expect_success 'refs/worktree must not be packed' '
+ test_commit initial &&
+ test_commit wt1 &&
+ test_commit wt2 &&
+ git worktree add wt1 wt1 &&
+ git worktree add wt2 wt2 &&
+ git checkout initial &&
+ git update-ref refs/worktree/foo HEAD &&
+ git -C wt1 update-ref refs/worktree/foo HEAD &&
+ git -C wt2 update-ref refs/worktree/foo HEAD &&
+ git ${pack_refs} --all &&
+ test_path_is_missing .git/refs/tags/wt1 &&
+ test_path_is_file .git/refs/worktree/foo &&
+ test_path_is_file .git/worktrees/wt1/refs/worktree/foo &&
+ test_path_is_file .git/worktrees/wt2/refs/worktree/foo
+'
+
+# we do not want to count on running ${pack_refs} to
+# actually pack it, as it is perfectly reasonable to
+# skip processing a broken ref
+test_expect_success 'create packed-refs file with broken ref' '
+ test_tick && git commit --allow-empty -m one &&
+ recoverable=$(git rev-parse HEAD) &&
+ test_tick && git commit --allow-empty -m two &&
+ missing=$(git rev-parse HEAD) &&
+ rm -f .git/refs/heads/main &&
+ cat >.git/packed-refs <<-EOF &&
+ $missing refs/heads/main
+ $recoverable refs/heads/other
+ EOF
+ echo $missing >expect &&
+ git rev-parse refs/heads/main >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success '${pack_refs} does not silently delete broken packed ref' '
+ git ${pack_refs} --all --prune &&
+ git rev-parse refs/heads/main >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success '${pack_refs} does not drop broken refs during deletion' '
+ git update-ref -d refs/heads/other &&
+ git rev-parse refs/heads/main >actual &&
+ test_cmp expect actual
+'
+
+for command in "git ${pack_refs} --all --auto" "git maintenance run --task=${pack_refs} --auto"
+do
+ test_expect_success "$command does not repack below 16 refs without packed-refs" '
+ test_when_finished "rm -rf repo" &&
+ git init repo &&
+ (
+ cd repo &&
+ git config set maintenance.auto false &&
+ git commit --allow-empty --message "initial" &&
+
+ # Create 14 additional references, which brings us to
+ # 15 together with the default branch.
+ printf "create refs/heads/loose-%d HEAD\n" $(test_seq 14) >stdin &&
+ git update-ref --stdin <stdin &&
+ test_path_is_missing .git/packed-refs &&
+ git ${pack_refs} --auto --all &&
+ test_path_is_missing .git/packed-refs &&
+
+ # Create the 16th reference, which should cause us to repack.
+ git update-ref refs/heads/loose-15 HEAD &&
+ git ${pack_refs} --auto --all &&
+ test_path_is_file .git/packed-refs
+ )
+ '
+
+ test_expect_success "$command does not repack below 16 refs with small packed-refs" '
+ test_when_finished "rm -rf repo" &&
+ git init repo &&
+ (
+ cd repo &&
+ git config set maintenance.auto false &&
+ git commit --allow-empty --message "initial" &&
+
+ git ${pack_refs} --all &&
+ test_line_count = 2 .git/packed-refs &&
+
+ # Create 15 loose references.
+ printf "create refs/heads/loose-%d HEAD\n" $(test_seq 15) >stdin &&
+ git update-ref --stdin <stdin &&
+ git ${pack_refs} --auto --all &&
+ test_line_count = 2 .git/packed-refs &&
+
+ # Create the 16th loose reference, which should cause us to repack.
+ git update-ref refs/heads/loose-17 HEAD &&
+ git ${pack_refs} --auto --all &&
+ test_line_count = 18 .git/packed-refs
+ )
+ '
+
+ test_expect_success "$command scales with size of packed-refs" '
+ test_when_finished "rm -rf repo" &&
+ git init repo &&
+ (
+ cd repo &&
+ git config set maintenance.auto false &&
+ git commit --allow-empty --message "initial" &&
+
+ # Create 99 packed refs. This should cause the heuristic
+ # to require more than the minimum amount of loose refs.
+ test_seq 99 |
+ while read i
+ do
+ printf "create refs/heads/packed-%d HEAD\n" $i || return 1
+ done >stdin &&
+ git update-ref --stdin <stdin &&
+ git ${pack_refs} --all &&
+ test_line_count = 101 .git/packed-refs &&
+
+ # Create 24 loose refs, which should not yet cause us to repack.
+ printf "create refs/heads/loose-%d HEAD\n" $(test_seq 24) >stdin &&
+ git update-ref --stdin <stdin &&
+ git ${pack_refs} --auto --all &&
+ test_line_count = 101 .git/packed-refs &&
+
+ # Create another handful of refs to cross the border.
+ # Note that we explicitly do not check for strict
+ # boundaries here, as this also depends on the size of
+ # the object hash.
+ printf "create refs/heads/addn-%d HEAD\n" $(test_seq 10) >stdin &&
+ git update-ref --stdin <stdin &&
+ git ${pack_refs} --auto --all &&
+ test_line_count = 135 .git/packed-refs
+ )
+ '
+done
+
+test_done
diff --git a/t/t0014-alias.sh b/t/t0014-alias.sh
index 854d59ec58..07a53e7366 100755
--- a/t/t0014-alias.sh
+++ b/t/t0014-alias.sh
@@ -27,6 +27,20 @@ test_expect_success 'looping aliases - internal execution' '
test_grep "^fatal: alias loop detected: expansion of" output
'
+test_expect_success 'looping aliases - deprecated builtins' '
+ test_config alias.whatchanged pack-redundant &&
+ test_config alias.pack-redundant whatchanged &&
+ cat >expect <<-EOF &&
+ ${SQ}whatchanged${SQ} is aliased to ${SQ}pack-redundant${SQ}
+ ${SQ}pack-redundant${SQ} is aliased to ${SQ}whatchanged${SQ}
+ fatal: alias loop detected: expansion of ${SQ}whatchanged${SQ} does not terminate:
+ whatchanged <==
+ pack-redundant ==>
+ EOF
+ test_must_fail git whatchanged -h 2>actual &&
+ test_cmp expect actual
+'
+
# This test is disabled until external loops are fixed, because would block
# the test suite for a full minute.
#
@@ -55,4 +69,47 @@ test_expect_success 'tracing a shell alias with arguments shows trace of prepare
test_cmp expect actual
'
+can_alias_deprecated_builtin () {
+ cmd="$1" &&
+ # some git(1) commands will fail for `-h` (the case for
+ # git-status as of 2025-09-07)
+ test_might_fail git status -h >expect &&
+ test_file_not_empty expect &&
+ test_might_fail git -c alias."$cmd"=status "$cmd" -h >actual &&
+ test_cmp expect actual
+}
+
+test_expect_success 'can alias-shadow deprecated builtins' '
+ for cmd in $(git --list-cmds=deprecated)
+ do
+ can_alias_deprecated_builtin "$cmd" || return 1
+ done
+'
+
+test_expect_success 'can alias-shadow via two deprecated builtins' '
+ # some git(1) commands will fail... (see above)
+ test_might_fail git status -h >expect &&
+ test_file_not_empty expect &&
+ test_might_fail git -c alias.whatchanged=pack-redundant \
+ -c alias.pack-redundant=status whatchanged -h >actual &&
+ test_cmp expect actual
+'
+
+cannot_alias_regular_builtin () {
+ cmd="$1" &&
+ # some git(1) commands will fail... (see above)
+ test_might_fail git "$cmd" -h >expect &&
+ test_file_not_empty expect &&
+ test_might_fail git -c alias."$cmd"=status "$cmd" -h >actual &&
+ test_cmp expect actual
+}
+
+test_expect_success 'cannot alias-shadow a sample of regular builtins' '
+ for cmd in grep check-ref-format interpret-trailers \
+ checkout-index fast-import diagnose rev-list prune
+ do
+ cannot_alias_regular_builtin "$cmd" || return 1
+ done
+'
+
test_done
diff --git a/t/t0300-credentials.sh b/t/t0300-credentials.sh
index cb3a85c7ff..07aa834d33 100755
--- a/t/t0300-credentials.sh
+++ b/t/t0300-credentials.sh
@@ -991,18 +991,24 @@ test_expect_success 'url parser not confused by encoded markers' '
test_expect_success 'credential config with partial URLs' '
echo "echo password=yep" | write_script git-credential-yep &&
- test_write_lines url=https://user@example.com/repo.git >stdin &&
+ test_write_lines url=https://user@example.com/org/repo.git >stdin &&
for partial in \
example.com \
+ example.com/org/repo.git \
user@example.com \
+ user@example.com/org/repo.git \
https:// \
https://example.com \
https://example.com/ \
+ https://example.com/org \
+ https://example.com/org/ \
+ https://example.com/org/repo.git \
https://user@example.com \
https://user@example.com/ \
- https://example.com/repo.git \
- https://user@example.com/repo.git \
- /repo.git
+ https://user@example.com/org \
+ https://user@example.com/org/ \
+ https://user@example.com/org/repo.git \
+ /org/repo.git
do
git -c credential.$partial.helper=yep \
credential fill <stdin >stdout &&
@@ -1012,7 +1018,12 @@ test_expect_success 'credential config with partial URLs' '
for partial in \
dont.use.this \
+ example.com/o \
+ user@example.com/o \
http:// \
+ https://example.com/o \
+ https://user@example.com/o \
+ /o \
/repo
do
git -c credential.$partial.helper=yep \
diff --git a/t/t0601-reffiles-pack-refs.sh b/t/t0601-reffiles-pack-refs.sh
index aa7f6ecd81..12cf5d1dcb 100755
--- a/t/t0601-reffiles-pack-refs.sh
+++ b/t/t0601-reffiles-pack-refs.sh
@@ -17,432 +17,4 @@ export GIT_TEST_DEFAULT_REF_FORMAT
. ./test-lib.sh
-test_expect_success 'enable reflogs' '
- git config core.logallrefupdates true
-'
-
-test_expect_success 'prepare a trivial repository' '
- echo Hello > A &&
- git update-index --add A &&
- git commit -m "Initial commit." &&
- HEAD=$(git rev-parse --verify HEAD)
-'
-
-test_expect_success 'pack-refs --prune --all' '
- test_path_is_missing .git/packed-refs &&
- git pack-refs --no-prune --all &&
- test_path_is_file .git/packed-refs &&
- N=$(find .git/refs -type f | wc -l) &&
- test "$N" != 0 &&
-
- git pack-refs --prune --all &&
- test_path_is_file .git/packed-refs &&
- N=$(find .git/refs -type f) &&
- test -z "$N"
-'
-
-SHA1=
-
-test_expect_success 'see if git show-ref works as expected' '
- git branch a &&
- SHA1=$(cat .git/refs/heads/a) &&
- echo "$SHA1 refs/heads/a" >expect &&
- git show-ref a >result &&
- test_cmp expect result
-'
-
-test_expect_success 'see if a branch still exists when packed' '
- git branch b &&
- git pack-refs --all &&
- rm -f .git/refs/heads/b &&
- echo "$SHA1 refs/heads/b" >expect &&
- git show-ref b >result &&
- test_cmp expect result
-'
-
-test_expect_success 'git branch c/d should barf if branch c exists' '
- git branch c &&
- git pack-refs --all &&
- rm -f .git/refs/heads/c &&
- test_must_fail git branch c/d
-'
-
-test_expect_success 'see if a branch still exists after git pack-refs --prune' '
- git branch e &&
- git pack-refs --all --prune &&
- echo "$SHA1 refs/heads/e" >expect &&
- git show-ref e >result &&
- test_cmp expect result
-'
-
-test_expect_success 'see if git pack-refs --prune remove ref files' '
- git branch f &&
- git pack-refs --all --prune &&
- ! test -f .git/refs/heads/f
-'
-
-test_expect_success 'see if git pack-refs --prune removes empty dirs' '
- git branch r/s/t &&
- git pack-refs --all --prune &&
- ! test -e .git/refs/heads/r
-'
-
-test_expect_success 'git branch g should work when git branch g/h has been deleted' '
- git branch g/h &&
- git pack-refs --all --prune &&
- git branch -d g/h &&
- git branch g &&
- git pack-refs --all &&
- git branch -d g
-'
-
-test_expect_success 'git branch i/j/k should barf if branch i exists' '
- git branch i &&
- git pack-refs --all --prune &&
- test_must_fail git branch i/j/k
-'
-
-test_expect_success 'test git branch k after branch k/l/m and k/lm have been deleted' '
- git branch k/l &&
- git branch k/lm &&
- git branch -d k/l &&
- git branch k/l/m &&
- git branch -d k/l/m &&
- git branch -d k/lm &&
- git branch k
-'
-
-test_expect_success 'test git branch n after some branch deletion and pruning' '
- git branch n/o &&
- git branch n/op &&
- git branch -d n/o &&
- git branch n/o/p &&
- git branch -d n/op &&
- git pack-refs --all --prune &&
- git branch -d n/o/p &&
- git branch n
-'
-
-test_expect_success 'test excluded refs are not packed' '
- git branch dont_pack1 &&
- git branch dont_pack2 &&
- git branch pack_this &&
- git pack-refs --all --exclude "refs/heads/dont_pack*" &&
- test -f .git/refs/heads/dont_pack1 &&
- test -f .git/refs/heads/dont_pack2 &&
- ! test -f .git/refs/heads/pack_this'
-
-test_expect_success 'test --no-exclude refs clears excluded refs' '
- git branch dont_pack3 &&
- git branch dont_pack4 &&
- git pack-refs --all --exclude "refs/heads/dont_pack*" --no-exclude &&
- ! test -f .git/refs/heads/dont_pack3 &&
- ! test -f .git/refs/heads/dont_pack4'
-
-test_expect_success 'test only included refs are packed' '
- git branch pack_this1 &&
- git branch pack_this2 &&
- git tag dont_pack5 &&
- git pack-refs --include "refs/heads/pack_this*" &&
- test -f .git/refs/tags/dont_pack5 &&
- ! test -f .git/refs/heads/pack_this1 &&
- ! test -f .git/refs/heads/pack_this2'
-
-test_expect_success 'test --no-include refs clears included refs' '
- git branch pack1 &&
- git branch pack2 &&
- git pack-refs --include "refs/heads/pack*" --no-include &&
- test -f .git/refs/heads/pack1 &&
- test -f .git/refs/heads/pack2'
-
-test_expect_success 'test --exclude takes precedence over --include' '
- git branch dont_pack5 &&
- git pack-refs --include "refs/heads/pack*" --exclude "refs/heads/pack*" &&
- test -f .git/refs/heads/dont_pack5'
-
-test_expect_success 'see if up-to-date packed refs are preserved' '
- git branch q &&
- git pack-refs --all --prune &&
- git update-ref refs/heads/q refs/heads/q &&
- ! test -f .git/refs/heads/q
-'
-
-test_expect_success 'pack, prune and repack' '
- git tag foo &&
- git pack-refs --all --prune &&
- git show-ref >all-of-them &&
- git pack-refs &&
- git show-ref >again &&
- test_cmp all-of-them again
-'
-
-test_expect_success 'explicit pack-refs with dangling packed reference' '
- git commit --allow-empty -m "soon to be garbage-collected" &&
- git pack-refs --all &&
- git reset --hard HEAD^ &&
- git reflog expire --expire=all --all &&
- git prune --expire=all &&
- git pack-refs --all 2>result &&
- test_must_be_empty result
-'
-
-test_expect_success 'delete ref with dangling packed version' '
- git checkout -b lamb &&
- git commit --allow-empty -m "future garbage" &&
- git pack-refs --all &&
- git reset --hard HEAD^ &&
- git checkout main &&
- git reflog expire --expire=all --all &&
- git prune --expire=all &&
- git branch -d lamb 2>result &&
- test_must_be_empty result
-'
-
-test_expect_success 'delete ref while another dangling packed ref' '
- git branch lamb &&
- git commit --allow-empty -m "future garbage" &&
- git pack-refs --all &&
- git reset --hard HEAD^ &&
- git reflog expire --expire=all --all &&
- git prune --expire=all &&
- git branch -d lamb 2>result &&
- test_must_be_empty result
-'
-
-test_expect_success 'pack ref directly below refs/' '
- git update-ref refs/top HEAD &&
- git pack-refs --all --prune &&
- grep refs/top .git/packed-refs &&
- test_path_is_missing .git/refs/top
-'
-
-test_expect_success 'do not pack ref in refs/bisect' '
- git update-ref refs/bisect/local HEAD &&
- git pack-refs --all --prune &&
- ! grep refs/bisect/local .git/packed-refs >/dev/null &&
- test_path_is_file .git/refs/bisect/local
-'
-
-test_expect_success 'disable reflogs' '
- git config core.logallrefupdates false &&
- rm -rf .git/logs
-'
-
-test_expect_success 'create packed foo/bar/baz branch' '
- git branch foo/bar/baz &&
- git pack-refs --all --prune &&
- test_path_is_missing .git/refs/heads/foo/bar/baz &&
- test_must_fail git reflog exists refs/heads/foo/bar/baz
-'
-
-test_expect_success 'notice d/f conflict with existing directory' '
- test_must_fail git branch foo &&
- test_must_fail git branch foo/bar
-'
-
-test_expect_success 'existing directory reports concrete ref' '
- test_must_fail git branch foo 2>stderr &&
- test_grep refs/heads/foo/bar/baz stderr
-'
-
-test_expect_success 'notice d/f conflict with existing ref' '
- test_must_fail git branch foo/bar/baz/extra &&
- test_must_fail git branch foo/bar/baz/lots/of/extra/components
-'
-
-test_expect_success 'reject packed-refs with unterminated line' '
- cp .git/packed-refs .git/packed-refs.bak &&
- test_when_finished "mv .git/packed-refs.bak .git/packed-refs" &&
- printf "%s" "$HEAD refs/zzzzz" >>.git/packed-refs &&
- echo "fatal: unterminated line in .git/packed-refs: $HEAD refs/zzzzz" >expected_err &&
- test_must_fail git for-each-ref >out 2>err &&
- test_cmp expected_err err
-'
-
-test_expect_success 'reject packed-refs containing junk' '
- cp .git/packed-refs .git/packed-refs.bak &&
- test_when_finished "mv .git/packed-refs.bak .git/packed-refs" &&
- printf "%s\n" "bogus content" >>.git/packed-refs &&
- echo "fatal: unexpected line in .git/packed-refs: bogus content" >expected_err &&
- test_must_fail git for-each-ref >out 2>err &&
- test_cmp expected_err err
-'
-
-test_expect_success 'reject packed-refs with a short SHA-1' '
- cp .git/packed-refs .git/packed-refs.bak &&
- test_when_finished "mv .git/packed-refs.bak .git/packed-refs" &&
- printf "%.7s %s\n" $HEAD refs/zzzzz >>.git/packed-refs &&
- printf "fatal: unexpected line in .git/packed-refs: %.7s %s\n" $HEAD refs/zzzzz >expected_err &&
- test_must_fail git for-each-ref >out 2>err &&
- test_cmp expected_err err
-'
-
-test_expect_success 'timeout if packed-refs.lock exists' '
- LOCK=.git/packed-refs.lock &&
- >"$LOCK" &&
- test_when_finished "rm -f $LOCK" &&
- test_must_fail git pack-refs --all --prune
-'
-
-test_expect_success 'retry acquiring packed-refs.lock' '
- LOCK=.git/packed-refs.lock &&
- >"$LOCK" &&
- test_when_finished "wait && rm -f $LOCK" &&
- {
- ( sleep 1 && rm -f $LOCK ) &
- } &&
- git -c core.packedrefstimeout=3000 pack-refs --all --prune
-'
-
-test_expect_success SYMLINKS 'pack symlinked packed-refs' '
- # First make sure that symlinking works when reading:
- git update-ref refs/heads/lossy refs/heads/main &&
- git for-each-ref >all-refs-before &&
- mv .git/packed-refs .git/my-deviant-packed-refs &&
- ln -s my-deviant-packed-refs .git/packed-refs &&
- git for-each-ref >all-refs-linked &&
- test_cmp all-refs-before all-refs-linked &&
- git pack-refs --all --prune &&
- git for-each-ref >all-refs-packed &&
- test_cmp all-refs-before all-refs-packed &&
- test -h .git/packed-refs &&
- test "$(test_readlink .git/packed-refs)" = "my-deviant-packed-refs"
-'
-
-# The 'packed-refs' file is stored directly in .git/. This means it is global
-# to the repository, and can only contain refs that are shared across all
-# worktrees.
-test_expect_success 'refs/worktree must not be packed' '
- test_commit initial &&
- test_commit wt1 &&
- test_commit wt2 &&
- git worktree add wt1 wt1 &&
- git worktree add wt2 wt2 &&
- git checkout initial &&
- git update-ref refs/worktree/foo HEAD &&
- git -C wt1 update-ref refs/worktree/foo HEAD &&
- git -C wt2 update-ref refs/worktree/foo HEAD &&
- git pack-refs --all &&
- test_path_is_missing .git/refs/tags/wt1 &&
- test_path_is_file .git/refs/worktree/foo &&
- test_path_is_file .git/worktrees/wt1/refs/worktree/foo &&
- test_path_is_file .git/worktrees/wt2/refs/worktree/foo
-'
-
-# we do not want to count on running pack-refs to
-# actually pack it, as it is perfectly reasonable to
-# skip processing a broken ref
-test_expect_success 'create packed-refs file with broken ref' '
- test_tick && git commit --allow-empty -m one &&
- recoverable=$(git rev-parse HEAD) &&
- test_tick && git commit --allow-empty -m two &&
- missing=$(git rev-parse HEAD) &&
- rm -f .git/refs/heads/main &&
- cat >.git/packed-refs <<-EOF &&
- $missing refs/heads/main
- $recoverable refs/heads/other
- EOF
- echo $missing >expect &&
- git rev-parse refs/heads/main >actual &&
- test_cmp expect actual
-'
-
-test_expect_success 'pack-refs does not silently delete broken packed ref' '
- git pack-refs --all --prune &&
- git rev-parse refs/heads/main >actual &&
- test_cmp expect actual
-'
-
-test_expect_success 'pack-refs does not drop broken refs during deletion' '
- git update-ref -d refs/heads/other &&
- git rev-parse refs/heads/main >actual &&
- test_cmp expect actual
-'
-
-for command in "git pack-refs --all --auto" "git maintenance run --task=pack-refs --auto"
-do
- test_expect_success "$command does not repack below 16 refs without packed-refs" '
- test_when_finished "rm -rf repo" &&
- git init repo &&
- (
- cd repo &&
- git config set maintenance.auto false &&
- git commit --allow-empty --message "initial" &&
-
- # Create 14 additional references, which brings us to
- # 15 together with the default branch.
- printf "create refs/heads/loose-%d HEAD\n" $(test_seq 14) >stdin &&
- git update-ref --stdin <stdin &&
- test_path_is_missing .git/packed-refs &&
- git pack-refs --auto --all &&
- test_path_is_missing .git/packed-refs &&
-
- # Create the 16th reference, which should cause us to repack.
- git update-ref refs/heads/loose-15 HEAD &&
- git pack-refs --auto --all &&
- test_path_is_file .git/packed-refs
- )
- '
-
- test_expect_success "$command does not repack below 16 refs with small packed-refs" '
- test_when_finished "rm -rf repo" &&
- git init repo &&
- (
- cd repo &&
- git config set maintenance.auto false &&
- git commit --allow-empty --message "initial" &&
-
- git pack-refs --all &&
- test_line_count = 2 .git/packed-refs &&
-
- # Create 15 loose references.
- printf "create refs/heads/loose-%d HEAD\n" $(test_seq 15) >stdin &&
- git update-ref --stdin <stdin &&
- git pack-refs --auto --all &&
- test_line_count = 2 .git/packed-refs &&
-
- # Create the 16th loose reference, which should cause us to repack.
- git update-ref refs/heads/loose-17 HEAD &&
- git pack-refs --auto --all &&
- test_line_count = 18 .git/packed-refs
- )
- '
-
- test_expect_success "$command scales with size of packed-refs" '
- test_when_finished "rm -rf repo" &&
- git init repo &&
- (
- cd repo &&
- git config set maintenance.auto false &&
- git commit --allow-empty --message "initial" &&
-
- # Create 99 packed refs. This should cause the heuristic
- # to require more than the minimum amount of loose refs.
- test_seq 99 |
- while read i
- do
- printf "create refs/heads/packed-%d HEAD\n" $i || return 1
- done >stdin &&
- git update-ref --stdin <stdin &&
- git pack-refs --all &&
- test_line_count = 101 .git/packed-refs &&
-
- # Create 24 loose refs, which should not yet cause us to repack.
- printf "create refs/heads/loose-%d HEAD\n" $(test_seq 24) >stdin &&
- git update-ref --stdin <stdin &&
- git pack-refs --auto --all &&
- test_line_count = 101 .git/packed-refs &&
-
- # Create another handful of refs to cross the border.
- # Note that we explicitly do not check for strict
- # boundaries here, as this also depends on the size of
- # the object hash.
- printf "create refs/heads/addn-%d HEAD\n" $(test_seq 10) >stdin &&
- git update-ref --stdin <stdin &&
- git pack-refs --auto --all &&
- test_line_count = 135 .git/packed-refs
- )
- '
-done
-
-test_done
+. "$TEST_DIRECTORY"/pack-refs-tests.sh
diff --git a/t/t1300-config.sh b/t/t1300-config.sh
index f856821839..358d636379 100755
--- a/t/t1300-config.sh
+++ b/t/t1300-config.sh
@@ -9,6 +9,7 @@ GIT_TEST_DEFAULT_INITIAL_BRANCH_NAME=main
export GIT_TEST_DEFAULT_INITIAL_BRANCH_NAME
. ./test-lib.sh
+. "$TEST_DIRECTORY"/lib-terminal.sh
for mode in legacy subcommands
do
@@ -134,38 +135,39 @@ test_expect_success 'clear default config' '
rm -f .git/config
'
-cat > expect << EOF
+test_expect_success 'initial' '
+ cat >expect <<\EOF &&
[section]
penguin = little blue
EOF
-test_expect_success 'initial' '
git config ${mode_set} section.penguin "little blue" &&
test_cmp expect .git/config
'
-cat > expect << EOF
+test_expect_success 'mixed case' '
+ cat >expect <<\EOF &&
[section]
penguin = little blue
Movie = BadPhysics
EOF
-test_expect_success 'mixed case' '
git config ${mode_set} Section.Movie BadPhysics &&
test_cmp expect .git/config
'
-cat > expect << EOF
+test_expect_success 'similar section' '
+ cat >expect <<\EOF &&
[section]
penguin = little blue
Movie = BadPhysics
[Sections]
WhatEver = Second
EOF
-test_expect_success 'similar section' '
git config ${mode_set} Sections.WhatEver Second &&
test_cmp expect .git/config
'
-cat > expect << EOF
+test_expect_success 'uppercase section' '
+ cat >expect <<\EOF &&
[section]
penguin = little blue
Movie = BadPhysics
@@ -173,7 +175,6 @@ cat > expect << EOF
[Sections]
WhatEver = Second
EOF
-test_expect_success 'uppercase section' '
git config ${mode_set} SECTION.UPPERCASE true &&
test_cmp expect .git/config
'
@@ -186,7 +187,8 @@ test_expect_success 'replace with non-match (actually matching)' '
git config section.penguin "very blue" !kingpin
'
-cat > expect << EOF
+test_expect_success 'append comments' '
+ cat >expect <<\EOF &&
[section]
Movie = BadPhysics
UPPERCASE = true
@@ -198,8 +200,6 @@ cat > expect << EOF
[Sections]
WhatEver = Second
EOF
-
-test_expect_success 'append comments' '
git config --replace-all --comment="Pygoscelis papua" section.penguin gentoo &&
git config ${mode_set} --comment="find fish" section.disposition peckish &&
git config ${mode_set} --comment="#abc" section.foo bar &&
@@ -214,7 +214,9 @@ test_expect_success 'Prohibited LF in comment' '
test_must_fail git config ${mode_set} --comment="a${LF}b" section.k v
'
-test_expect_success 'non-match result' 'test_cmp expect .git/config'
+test_expect_success 'non-match result' '
+ test_cmp expect .git/config
+'
test_expect_success 'find mixed-case key by canonical name' '
test_cmp_config Second sections.whatever
@@ -265,14 +267,15 @@ test_expect_success 'unset with cont. lines' '
git config ${mode_unset} beta.baz
'
-cat > expect <<\EOF
-[alpha]
-bar = foo
-[beta]
-foo = bar
-EOF
-
-test_expect_success 'unset with cont. lines is correct' 'test_cmp expect .git/config'
+test_expect_success 'unset with cont. lines is correct' '
+ cat >expect <<-\EOF &&
+ [alpha]
+ bar = foo
+ [beta]
+ foo = bar
+ EOF
+ test_cmp expect .git/config
+'
cat > .git/config << EOF
[beta] ; silly comment # another comment
@@ -292,16 +295,15 @@ test_expect_success 'multiple unset' '
git config ${mode_unset_all} beta.haha
'
-cat > expect << EOF
+test_expect_success 'multiple unset is correct' '
+ cat >expect <<EOF &&
[beta] ; silly comment # another comment
-noIndent= sillyValue ; 'nother silly comment
+noIndent= sillyValue ; ${SQ}nother silly comment
# empty line
; comment
[nextSection] noNewline = ouch
EOF
-
-test_expect_success 'multiple unset is correct' '
test_cmp expect .git/config
'
@@ -318,37 +320,37 @@ test_expect_success '--replace-all' '
git config ${mode_replace_all} beta.haha gamma
'
-cat > expect << EOF
+test_expect_success 'all replaced' '
+ cat >expect <<EOF &&
[beta] ; silly comment # another comment
-noIndent= sillyValue ; 'nother silly comment
+noIndent= sillyValue ; ${SQ}nother silly comment
# empty line
; comment
haha = gamma
[nextSection] noNewline = ouch
EOF
-
-test_expect_success 'all replaced' '
test_cmp expect .git/config
'
-cat > expect << EOF
+test_expect_success 'really mean test' '
+ cat >expect <<EOF &&
[beta] ; silly comment # another comment
-noIndent= sillyValue ; 'nother silly comment
+noIndent= sillyValue ; ${SQ}nother silly comment
# empty line
; comment
haha = alpha
[nextSection] noNewline = ouch
EOF
-test_expect_success 'really mean test' '
git config ${mode_set} beta.haha alpha &&
test_cmp expect .git/config
'
-cat > expect << EOF
+test_expect_success 'really really mean test' '
+ cat >expect <<EOF &&
[beta] ; silly comment # another comment
-noIndent= sillyValue ; 'nother silly comment
+noIndent= sillyValue ; ${SQ}nother silly comment
# empty line
; comment
@@ -356,7 +358,6 @@ noIndent= sillyValue ; 'nother silly comment
[nextSection]
nonewline = wow
EOF
-test_expect_success 'really really mean test' '
git config ${mode_set} nextsection.nonewline wow &&
test_cmp expect .git/config
'
@@ -365,23 +366,24 @@ test_expect_success 'get value' '
test_cmp_config alpha beta.haha
'
-cat > expect << EOF
+test_expect_success 'unset' '
+ cat >expect <<EOF &&
[beta] ; silly comment # another comment
-noIndent= sillyValue ; 'nother silly comment
+noIndent= sillyValue ; ${SQ}nother silly comment
# empty line
; comment
[nextSection]
nonewline = wow
EOF
-test_expect_success 'unset' '
git config ${mode_unset} beta.haha &&
test_cmp expect .git/config
'
-cat > expect << EOF
+test_expect_success 'multivar' '
+ cat >expect <<EOF &&
[beta] ; silly comment # another comment
-noIndent= sillyValue ; 'nother silly comment
+noIndent= sillyValue ; ${SQ}nother silly comment
# empty line
; comment
@@ -389,7 +391,6 @@ noIndent= sillyValue ; 'nother silly comment
nonewline = wow
NoNewLine = wow2 for me
EOF
-test_expect_success 'multivar' '
git config nextsection.NoNewLine "wow2 for me" "for me$" &&
test_cmp expect .git/config
'
@@ -415,9 +416,10 @@ test_expect_success 'multi-valued get-all returns all' '
test_cmp expect actual
'
-cat > expect << EOF
+test_expect_success 'multivar replace' '
+ cat >expect <<EOF &&
[beta] ; silly comment # another comment
-noIndent= sillyValue ; 'nother silly comment
+noIndent= sillyValue ; ${SQ}nother silly comment
# empty line
; comment
@@ -425,7 +427,6 @@ noIndent= sillyValue ; 'nother silly comment
nonewline = wow3
NoNewLine = wow2 for me
EOF
-test_expect_success 'multivar replace' '
git config nextsection.nonewline "wow3" "wow$" &&
test_cmp expect .git/config
'
@@ -438,17 +439,16 @@ test_expect_success 'invalid unset' '
test_must_fail git config ${mode_unset} somesection.nonewline
'
-cat > expect << EOF
+test_expect_success 'multivar unset' '
+ cat >expect <<EOF &&
[beta] ; silly comment # another comment
-noIndent= sillyValue ; 'nother silly comment
+noIndent= sillyValue ; ${SQ}nother silly comment
# empty line
; comment
[nextSection]
NoNewLine = wow2 for me
EOF
-
-test_expect_success 'multivar unset' '
case "$mode" in
legacy)
git config --unset nextsection.nonewline "wow3$";;
@@ -458,17 +458,22 @@ test_expect_success 'multivar unset' '
test_cmp expect .git/config
'
-test_expect_success 'invalid key' 'test_must_fail git config inval.2key blabla'
+test_expect_success 'invalid key' '
+ test_must_fail git config inval.2key blabla
+'
-test_expect_success 'correct key' 'git config 123456.a123 987'
+test_expect_success 'correct key' '
+ git config 123456.a123 987
+'
test_expect_success 'hierarchical section' '
git config Version.1.2.3eX.Alpha beta
'
-cat > expect << EOF
+test_expect_success 'hierarchical section value' '
+ cat >expect <<EOF &&
[beta] ; silly comment # another comment
-noIndent= sillyValue ; 'nother silly comment
+noIndent= sillyValue ; ${SQ}nother silly comment
# empty line
; comment
@@ -479,65 +484,59 @@ noIndent= sillyValue ; 'nother silly comment
[Version "1.2.3eX"]
Alpha = beta
EOF
-
-test_expect_success 'hierarchical section value' '
test_cmp expect .git/config
'
-cat > expect << EOF
-beta.noindent=sillyValue
-nextsection.nonewline=wow2 for me
-123456.a123=987
-version.1.2.3eX.alpha=beta
-EOF
-
test_expect_success 'working --list' '
+ cat >expect <<-\EOF &&
+ beta.noindent=sillyValue
+ nextsection.nonewline=wow2 for me
+ 123456.a123=987
+ version.1.2.3eX.alpha=beta
+ EOF
git config ${mode_prefix}list > output &&
test_cmp expect output
'
+
test_expect_success '--list without repo produces empty output' '
git --git-dir=nonexistent config ${mode_prefix}list >output &&
test_must_be_empty output
'
-cat > expect << EOF
-beta.noindent
-nextsection.nonewline
-123456.a123
-version.1.2.3eX.alpha
-EOF
-
test_expect_success '--name-only --list' '
+ cat >expect <<-\EOF &&
+ beta.noindent
+ nextsection.nonewline
+ 123456.a123
+ version.1.2.3eX.alpha
+ EOF
git config ${mode_prefix}list --name-only >output &&
test_cmp expect output
'
-cat > expect << EOF
-beta.noindent sillyValue
-nextsection.nonewline wow2 for me
-EOF
-
test_expect_success '--get-regexp' '
+ cat >expect <<-\EOF &&
+ beta.noindent sillyValue
+ nextsection.nonewline wow2 for me
+ EOF
git config ${mode_get_regexp} in >output &&
test_cmp expect output
'
-cat > expect << EOF
-beta.noindent
-nextsection.nonewline
-EOF
-
test_expect_success '--name-only --get-regexp' '
+ cat >expect <<-\EOF &&
+ beta.noindent
+ nextsection.nonewline
+ EOF
git config ${mode_get_regexp} --name-only in >output &&
test_cmp expect output
'
-cat > expect << EOF
-wow2 for me
-wow4 for you
-EOF
-
test_expect_success '--add' '
+ cat >expect <<-\EOF &&
+ wow2 for me
+ wow4 for you
+ EOF
git config --add nextsection.nonewline "wow4 for you" &&
git config ${mode_get_all} nextsection.nonewline > output &&
test_cmp expect output
@@ -558,37 +557,32 @@ test_expect_success 'get variable with empty value' '
git config --get emptyvalue.variable ^$
'
-echo novalue.variable > expect
-
test_expect_success 'get-regexp variable with no value' '
+ echo novalue.variable >expect &&
git config ${mode_get_regexp} novalue > output &&
test_cmp expect output
'
-echo 'novalue.variable true' > expect
-
test_expect_success 'get-regexp --bool variable with no value' '
+ echo "novalue.variable true" >expect &&
git config ${mode_get_regexp} --bool novalue > output &&
test_cmp expect output
'
-echo 'emptyvalue.variable ' > expect
-
test_expect_success 'get-regexp variable with empty value' '
+ echo "emptyvalue.variable " >expect &&
git config ${mode_get_regexp} emptyvalue > output &&
test_cmp expect output
'
-echo true > expect
-
test_expect_success 'get bool variable with no value' '
+ echo true >expect &&
git config --bool novalue.variable > output &&
test_cmp expect output
'
-echo false > expect
-
test_expect_success 'get bool variable with empty value' '
+ echo false >expect &&
git config --bool emptyvalue.variable > output &&
test_cmp expect output
'
@@ -604,19 +598,19 @@ cat > .git/config << EOF
c = d
EOF
-cat > expect << EOF
+test_expect_success 'new section is partial match of another' '
+ cat >expect <<\EOF &&
[a.b]
c = d
[a]
x = y
EOF
-
-test_expect_success 'new section is partial match of another' '
git config a.x y &&
test_cmp expect .git/config
'
-cat > expect << EOF
+test_expect_success 'new variable inserts into proper section' '
+ cat >expect <<\EOF &&
[a.b]
c = d
[a]
@@ -625,8 +619,6 @@ cat > expect << EOF
[b]
x = y
EOF
-
-test_expect_success 'new variable inserts into proper section' '
git config b.x y &&
git config a.b c &&
test_cmp expect .git/config
@@ -642,11 +634,10 @@ cat > other-config << EOF
bahn = strasse
EOF
-cat > expect << EOF
-ein.bahn=strasse
-EOF
-
test_expect_success 'alternative GIT_CONFIG' '
+ cat >expect <<-\EOF &&
+ ein.bahn=strasse
+ EOF
GIT_CONFIG=other-config git config ${mode_prefix}list >output &&
test_cmp expect output
'
@@ -675,14 +666,13 @@ test_expect_success 'refer config from subdirectory' '
test_cmp_config -C x strasse --file=../other-config --get ein.bahn
'
-cat > expect << EOF
+test_expect_success '--set in alternative file' '
+ cat >expect <<\EOF &&
[ein]
bahn = strasse
[anwohner]
park = ausweis
EOF
-
-test_expect_success '--set in alternative file' '
git config --file=other-config anwohner.park ausweis &&
test_cmp expect other-config
'
@@ -730,7 +720,8 @@ test_expect_success 'rename another section' '
git config ${mode_prefix}rename-section branch."1 234 blabl/a" branch.drei
'
-cat > expect << EOF
+test_expect_success 'rename succeeded' '
+ cat >expect <<\EOF &&
# Hallo
#Bello
[branch "zwei"]
@@ -740,8 +731,6 @@ cat > expect << EOF
[branch "drei"]
weird
EOF
-
-test_expect_success 'rename succeeded' '
test_cmp expect .git/config
'
@@ -753,7 +742,8 @@ test_expect_success 'rename a section with a var on the same line' '
git config ${mode_prefix}rename-section branch.vier branch.zwei
'
-cat > expect << EOF
+test_expect_success 'rename succeeded' '
+ cat >expect <<\EOF &&
# Hallo
#Bello
[branch "zwei"]
@@ -765,8 +755,6 @@ weird
[branch "zwei"]
z = 1
EOF
-
-test_expect_success 'rename succeeded' '
test_cmp expect .git/config
'
@@ -816,32 +804,29 @@ test_expect_success 'remove section' '
git config ${mode_prefix}remove-section branch.zwei
'
-cat > expect << EOF
+test_expect_success 'section was removed properly' '
+ cat >expect <<\EOF &&
# Hallo
#Bello
[branch "drei"]
weird
EOF
-
-test_expect_success 'section was removed properly' '
test_cmp expect .git/config
'
-cat > expect << EOF
+test_expect_success 'section ending' '
+ cat >expect <<\EOF &&
[gitcvs]
enabled = true
dbname = %Ggitcvs2.%a.%m.sqlite
[gitcvs "ext"]
dbname = %Ggitcvs1.%a.%m.sqlite
EOF
-
-test_expect_success 'section ending' '
rm -f .git/config &&
git config ${mode_set} gitcvs.enabled true &&
git config ${mode_set} gitcvs.ext.dbname %Ggitcvs1.%a.%m.sqlite &&
git config ${mode_set} gitcvs.dbname %Ggitcvs2.%a.%m.sqlite &&
test_cmp expect .git/config
-
'
test_expect_success numbers '
@@ -885,19 +870,17 @@ test_expect_success 'invalid stdin config' '
test_grep "bad config line 1 in standard input" output
'
-cat > expect << EOF
-true
-false
-true
-false
-true
-false
-true
-false
-EOF
-
test_expect_success bool '
-
+ cat >expect <<-\EOF &&
+ true
+ false
+ true
+ false
+ true
+ false
+ true
+ false
+ EOF
git config ${mode_set} bool.true1 01 &&
git config ${mode_set} bool.true2 -1 &&
git config ${mode_set} bool.true3 YeS &&
@@ -912,18 +895,20 @@ test_expect_success bool '
git config --bool --get bool.true$i >>result &&
git config --bool --get bool.false$i >>result || return 1
done &&
- test_cmp expect result'
+ test_cmp expect result
+'
test_expect_success 'invalid bool (--get)' '
-
git config ${mode_set} bool.nobool foobar &&
- test_must_fail git config --bool --get bool.nobool'
+ test_must_fail git config --bool --get bool.nobool
+'
test_expect_success 'invalid bool (set)' '
+ test_must_fail git config --bool bool.nobool foobar
+'
- test_must_fail git config --bool bool.nobool foobar'
-
-cat > expect <<\EOF
+test_expect_success 'set --bool' '
+ cat >expect <<\EOF &&
[bool]
true1 = true
true2 = true
@@ -934,9 +919,6 @@ cat > expect <<\EOF
false3 = false
false4 = false
EOF
-
-test_expect_success 'set --bool' '
-
rm -f .git/config &&
git config --bool bool.true1 01 &&
git config --bool bool.true2 -1 &&
@@ -948,15 +930,13 @@ test_expect_success 'set --bool' '
git config --bool bool.false4 FALSE &&
test_cmp expect .git/config'
-cat > expect <<\EOF
+test_expect_success 'set --int' '
+ cat >expect <<\EOF &&
[int]
val1 = 1
val2 = -1
val3 = 5242880
EOF
-
-test_expect_success 'set --int' '
-
rm -f .git/config &&
git config --int int.val1 01 &&
git config --int int.val2 -1 &&
@@ -994,7 +974,8 @@ test_expect_success 'get --bool-or-int' '
test_cmp expect actual
'
-cat >expect <<\EOF
+test_expect_success 'set --bool-or-int' '
+ cat >expect <<\EOF &&
[bool]
true1 = true
false1 = false
@@ -1005,8 +986,6 @@ cat >expect <<\EOF
int2 = 1
int3 = -1
EOF
-
-test_expect_success 'set --bool-or-int' '
rm -f .git/config &&
git config --bool-or-int bool.true1 true &&
git config --bool-or-int bool.false1 false &&
@@ -1018,44 +997,42 @@ test_expect_success 'set --bool-or-int' '
test_cmp expect .git/config
'
-cat >expect <<\EOF
+test_expect_success !MINGW 'set --path' '
+ cat >expect <<\EOF &&
[path]
home = ~/
normal = /dev/null
trailingtilde = foo~
EOF
-
-test_expect_success !MINGW 'set --path' '
rm -f .git/config &&
git config --path path.home "~/" &&
git config --path path.normal "/dev/null" &&
git config --path path.trailingtilde "foo~" &&
- test_cmp expect .git/config'
+ test_cmp expect .git/config
+'
if test_have_prereq !MINGW && test "${HOME+set}"
then
test_set_prereq HOMEVAR
fi
-cat >expect <<EOF
-$HOME/
-/dev/null
-foo~
-EOF
-
test_expect_success HOMEVAR 'get --path' '
+ cat >expect <<-EOF &&
+ $HOME/
+ /dev/null
+ foo~
+ EOF
git config --get --path path.home > result &&
git config --get --path path.normal >> result &&
git config --get --path path.trailingtilde >> result &&
test_cmp expect result
'
-cat >expect <<\EOF
-/dev/null
-foo~
-EOF
-
test_expect_success !MINGW 'get --path copes with unset $HOME' '
+ cat >expect <<-\EOF &&
+ /dev/null
+ foo~
+ EOF
(
sane_unset HOME &&
test_must_fail git config --get --path path.home \
@@ -1107,17 +1084,35 @@ test_expect_success 'get --type=color' '
rm .git/config &&
git config ${mode_set} foo.color "red" &&
git config --get --type=color foo.color >actual.raw &&
+ git config get --type=color foo.color >actual-subcommand.raw &&
+ test_cmp actual.raw actual-subcommand.raw &&
+ test_decode_color <actual.raw >actual &&
+ echo "<RED>" >expect &&
+ test_cmp expect actual
+'
+
+test_expect_success 'get --type=color with default value only' '
+ git config --get-color "" "red" >actual.raw &&
+ test_decode_color <actual.raw >actual &&
+ echo "<RED>" >expect &&
+ test_cmp expect actual &&
+ git config get --type=color --default="red" "" >actual-subcommand.raw &&
+ test_cmp actual.raw actual-subcommand.raw
+'
+
+test_expect_success TTY 'get --type=color does not use a pager' '
+ test_config core.pager "echo foobar" &&
+ test_terminal git config get --type=color --default="red" "" >actual.raw &&
test_decode_color <actual.raw >actual &&
echo "<RED>" >expect &&
test_cmp expect actual
'
-cat >expect << EOF
+test_expect_success 'set --type=color' '
+ cat >expect <<\EOF &&
[foo]
color = red
EOF
-
-test_expect_success 'set --type=color' '
rm .git/config &&
git config --type=color foo.color "red" &&
test_cmp expect .git/config
@@ -1133,14 +1128,14 @@ test_expect_success 'set --type=color barfs on non-color' '
test_grep "cannot parse color" error
'
-cat > expect << EOF
+test_expect_success 'quoting' '
+ cat >expect <<\EOF &&
[quote]
leading = " test"
ending = "test "
semicolon = "test;test"
hash = "test#test"
EOF
-test_expect_success 'quoting' '
rm -f .git/config &&
git config ${mode_set} quote.leading " test" &&
git config ${mode_set} quote.ending "test " &&
@@ -1151,10 +1146,13 @@ test_expect_success 'quoting' '
test_expect_success 'key with newline' '
test_must_fail git config ${mode_get} "key.with
-newline" 123'
+newline" 123
+'
-test_expect_success 'value with newline' 'git config ${mode_set} key.sub value.with\\\
-newline'
+test_expect_success 'value with newline' '
+ git config ${mode_set} key.sub value.with\\\
+newline
+'
cat > .git/config <<\EOF
[section]
@@ -1166,13 +1164,12 @@ inued
inued"
EOF
-cat > expect <<\EOF
-section.continued=continued
-section.noncont=not continued
-section.quotecont=cont;inued
-EOF
-
test_expect_success 'value continued on next line' '
+ cat >expect <<-\EOF &&
+ section.continued=continued
+ section.noncont=not continued
+ section.quotecont=cont;inued
+ EOF
git config ${mode_prefix}list > result &&
test_cmp expect result
'
@@ -1365,7 +1362,6 @@ test_expect_success 'multiple git -c appends config' '
'
test_expect_success 'last one wins: two level vars' '
-
# sec.var and sec.VAR are the same variable, as the first
# and the last level of a configuration variable name is
# case insensitive.
@@ -1384,7 +1380,6 @@ test_expect_success 'last one wins: two level vars' '
'
test_expect_success 'last one wins: three level vars' '
-
# v.a.r and v.A.r are not the same variable, as the middle
# level of a three-level configuration variable name is
# case sensitive.
diff --git a/t/t1421-reflog-write.sh b/t/t1421-reflog-write.sh
index 46df64c176..603ec3f6ed 100755
--- a/t/t1421-reflog-write.sh
+++ b/t/t1421-reflog-write.sh
@@ -108,6 +108,42 @@ test_expect_success 'simple writes' '
)
'
+test_expect_success 'uses user.name and user.email config' '
+ test_when_finished "rm -rf repo" &&
+ git init repo &&
+ (
+ cd repo &&
+ test_commit initial &&
+ COMMIT_OID=$(git rev-parse HEAD) &&
+
+ sane_unset GIT_COMMITTER_NAME &&
+ sane_unset GIT_COMMITTER_EMAIL &&
+ git config --local user.name "Author" &&
+ git config --local user.email "a@uth.or" &&
+ git reflog write refs/heads/something $ZERO_OID $COMMIT_OID first &&
+ test_reflog_matches . refs/heads/something <<-EOF
+ $ZERO_OID $COMMIT_OID Author <a@uth.or> $GIT_COMMITTER_DATE first
+ EOF
+ )
+'
+
+test_expect_success 'environment variables take precedence over config' '
+ test_when_finished "rm -rf repo" &&
+ git init repo &&
+ (
+ cd repo &&
+ test_commit initial &&
+ COMMIT_OID=$(git rev-parse HEAD) &&
+
+ git config --local user.name "Author" &&
+ git config --local user.email "a@uth.or" &&
+ git reflog write refs/heads/something $ZERO_OID $COMMIT_OID first &&
+ test_reflog_matches . refs/heads/something <<-EOF
+ $ZERO_OID $COMMIT_OID $SIGNATURE first
+ EOF
+ )
+'
+
test_expect_success 'can write to root ref' '
test_when_finished "rm -rf repo" &&
git init repo &&
diff --git a/t/t1463-refs-optimize.sh b/t/t1463-refs-optimize.sh
new file mode 100755
index 0000000000..c11c905d79
--- /dev/null
+++ b/t/t1463-refs-optimize.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+test_description='git refs optimize should not change the branch semantic
+
+This test runs git refs optimize and git show-ref and checks that the branch
+semantic is still the same.
+'
+
+GIT_TEST_DEFAULT_INITIAL_BRANCH_NAME=main
+export GIT_TEST_DEFAULT_INITIAL_BRANCH_NAME
+GIT_TEST_DEFAULT_REF_FORMAT=files
+export GIT_TEST_DEFAULT_REF_FORMAT
+
+. ./test-lib.sh
+
+pack_refs='refs optimize'
+. "$TEST_DIRECTORY"/pack-refs-tests.sh
diff --git a/t/t6302-for-each-ref-filter.sh b/t/t6302-for-each-ref-filter.sh
index 9b80ea1e3b..7f060d97bf 100755
--- a/t/t6302-for-each-ref-filter.sh
+++ b/t/t6302-for-each-ref-filter.sh
@@ -754,4 +754,69 @@ test_expect_success 'start after used with custom sort order' '
test_cmp expect actual
'
+test_expect_success 'start after with packed refs' '
+ test_when_finished "rm -rf repo" &&
+ git init repo &&
+ (
+ cd repo &&
+ test_commit default &&
+
+ git update-ref --stdin <<-\EOF &&
+ create refs/heads/branch @
+ create refs/heads/side @
+ create refs/odd/spot @
+ create refs/tags/one @
+ create refs/tags/two @
+ commit
+ EOF
+
+ cat >expect <<-\EOF &&
+ refs/tags/default
+ refs/tags/one
+ refs/tags/two
+ EOF
+
+ git pack-refs --all &&
+ git for-each-ref --format="%(refname)" --start-after=refs/odd/spot >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'start after with packed refs and some loose refs' '
+ test_when_finished "rm -rf repo" &&
+ git init repo &&
+ (
+ cd repo &&
+ test_commit default &&
+
+ git update-ref --stdin <<-\EOF &&
+ create refs/heads/branch @
+ create refs/heads/side @
+ create refs/odd/spot @
+ create refs/tags/one @
+ create refs/tags/two @
+ commit
+ EOF
+
+ git pack-refs --all &&
+
+ git update-ref --stdin <<-\EOF &&
+ create refs/heads/foo @
+ create refs/odd/tee @
+ commit
+ EOF
+
+ cat >expect <<-\EOF &&
+ refs/odd/tee
+ refs/tags/default
+ refs/tags/one
+ refs/tags/two
+ EOF
+
+
+ git for-each-ref --format="%(refname)" --start-after=refs/odd/spot >actual &&
+ test_cmp expect actual
+ )
+'
+
test_done
diff --git a/t/t9305-fast-import-signatures.sh b/t/t9305-fast-import-signatures.sh
new file mode 100755
index 0000000000..c2b4271658
--- /dev/null
+++ b/t/t9305-fast-import-signatures.sh
@@ -0,0 +1,106 @@
+#!/bin/sh
+
+test_description='git fast-import --signed-commits=<mode>'
+
+GIT_TEST_DEFAULT_INITIAL_BRANCH_NAME=main
+
+. ./test-lib.sh
+. "$TEST_DIRECTORY/lib-gpg.sh"
+
+test_expect_success 'set up unsigned initial commit and import repo' '
+ test_commit first &&
+ git init new
+'
+
+test_expect_success GPG 'set up OpenPGP signed commit' '
+ git checkout -b openpgp-signing main &&
+ echo "Content for OpenPGP signing." >file-sign &&
+ git add file-sign &&
+ git commit -S -m "OpenPGP signed commit" &&
+ OPENPGP_SIGNING=$(git rev-parse --verify openpgp-signing)
+'
+
+test_expect_success GPG 'import OpenPGP signature with --signed-commits=verbatim' '
+ git fast-export --signed-commits=verbatim openpgp-signing >output &&
+ git -C new fast-import --quiet --signed-commits=verbatim <output >log 2>&1 &&
+ IMPORTED=$(git -C new rev-parse --verify refs/heads/openpgp-signing) &&
+ test $OPENPGP_SIGNING = $IMPORTED &&
+ test_must_be_empty log
+'
+
+test_expect_success GPGSM 'set up X.509 signed commit' '
+ git checkout -b x509-signing main &&
+ test_config gpg.format x509 &&
+ test_config user.signingkey $GIT_COMMITTER_EMAIL &&
+ echo "Content for X.509 signing." >file-sign &&
+ git add file-sign &&
+ git commit -S -m "X.509 signed commit" &&
+ X509_SIGNING=$(git rev-parse HEAD)
+'
+
+test_expect_success GPGSM 'import X.509 signature fails with --signed-commits=abort' '
+ git fast-export --signed-commits=verbatim x509-signing >output &&
+ test_must_fail git -C new fast-import --quiet --signed-commits=abort <output
+'
+
+test_expect_success GPGSM 'import X.509 signature with --signed-commits=warn-verbatim' '
+ git -C new fast-import --quiet --signed-commits=warn-verbatim <output >log 2>&1 &&
+ IMPORTED=$(git -C new rev-parse --verify refs/heads/x509-signing) &&
+ test $X509_SIGNING = $IMPORTED &&
+ test_grep "importing a commit signature" log
+'
+
+test_expect_success GPGSSH 'set up SSH signed commit' '
+ git checkout -b ssh-signing main &&
+ test_config gpg.format ssh &&
+ test_config user.signingkey "${GPGSSH_KEY_PRIMARY}" &&
+ echo "Content for SSH signing." >file-sign &&
+ git add file-sign &&
+ git commit -S -m "SSH signed commit" &&
+ SSH_SIGNING=$(git rev-parse HEAD)
+'
+
+test_expect_success GPGSSH 'strip SSH signature with --signed-commits=strip' '
+ git fast-export --signed-commits=verbatim ssh-signing >output &&
+ git -C new fast-import --quiet --signed-commits=strip <output >log 2>&1 &&
+ IMPORTED=$(git -C new rev-parse --verify refs/heads/ssh-signing) &&
+ test $SSH_SIGNING != $IMPORTED &&
+ git -C new cat-file commit "$IMPORTED" >actual &&
+ test_grep ! -E "^gpgsig" actual &&
+ test_must_be_empty log
+'
+
+test_expect_success GPG 'setup a commit with dual OpenPGP signatures on its SHA-1 and SHA-256 formats' '
+ # Create a signed SHA-256 commit
+ git init --object-format=sha256 explicit-sha256 &&
+ git -C explicit-sha256 config extensions.compatObjectFormat sha1 &&
+ git -C explicit-sha256 checkout -b dual-signed &&
+ test_commit -C explicit-sha256 A &&
+ echo B >explicit-sha256/B &&
+ git -C explicit-sha256 add B &&
+ test_tick &&
+ git -C explicit-sha256 commit -S -m "signed" B &&
+ SHA256_B=$(git -C explicit-sha256 rev-parse dual-signed) &&
+
+ # Create the corresponding SHA-1 commit
+ SHA1_B=$(git -C explicit-sha256 rev-parse --output-object-format=sha1 dual-signed) &&
+
+ # Check that the resulting SHA-1 commit has both signatures
+ git -C explicit-sha256 cat-file -p $SHA1_B >out &&
+ test_grep -E "^gpgsig " out &&
+ test_grep -E "^gpgsig-sha256 " out
+'
+
+test_expect_success GPG 'strip both OpenPGP signatures with --signed-commits=warn-strip' '
+ git -C explicit-sha256 fast-export --signed-commits=verbatim dual-signed >output &&
+ test_grep -E "^gpgsig sha1 openpgp" output &&
+ test_grep -E "^gpgsig sha256 openpgp" output &&
+ git -C new fast-import --quiet --signed-commits=warn-strip <output >log 2>&1 &&
+ git -C new cat-file commit refs/heads/dual-signed >actual &&
+ test_grep ! -E "^gpgsig " actual &&
+ test_grep ! -E "^gpgsig-sha256 " actual &&
+ test_grep "stripping a commit signature" log >out &&
+ test_line_count = 2 out
+'
+
+test_done
diff --git a/transport-helper.c b/transport-helper.c
index 0789e5bca5..4d95d84f9e 100644
--- a/transport-helper.c
+++ b/transport-helper.c
@@ -450,7 +450,7 @@ static int fetch_with_fetch(struct transport *transport,
}
strbuf_release(&buf);
- reprepare_packed_git(the_repository);
+ odb_reprepare(the_repository->objects);
return 0;
}
diff --git a/usage.c b/usage.c
index 4c245ba0cb..527edb1e79 100644
--- a/usage.c
+++ b/usage.c
@@ -7,6 +7,7 @@
#include "git-compat-util.h"
#include "gettext.h"
#include "trace2.h"
+#include "strbuf.h"
static void vfreportf(FILE *f, const char *prefix, const char *err, va_list params)
{
@@ -376,14 +377,32 @@ void bug_fl(const char *file, int line, const char *fmt, ...)
va_end(ap);
}
-NORETURN void you_still_use_that(const char *command_name)
+
+NORETURN void you_still_use_that(const char *command_name, const char *hint)
{
+ struct strbuf percent_encoded = STRBUF_INIT;
+ strbuf_add_percentencode(&percent_encoded,
+ command_name,
+ STRBUF_ENCODE_SLASH);
+
+ fprintf(stderr,
+ _("'%s' is nominated for removal.\n"), command_name);
+
+ if (hint)
+ fputs(hint, stderr);
+
fprintf(stderr,
- _("'%s' is nominated for removal.\n"
- "If you still use this command, please add an extra\n"
- "option, '--i-still-use-this', on the command line\n"
- "and let us know you still use it by sending an e-mail\n"
- "to <git@vger.kernel.org>. Thanks.\n"),
- command_name);
+ _("If you still use this command, here's what you can do:\n"
+ "\n"
+ "- read https://git-scm.com/docs/BreakingChanges.html\n"
+ "- check if anyone has discussed this on the mailing\n"
+ " list and if they came up with something that can\n"
+ " help you: https://lore.kernel.org/git/?q=%s\n"
+ "- send an email to <git@vger.kernel.org> to let us\n"
+ " know that you still use this command and were unable\n"
+ " to determine a suitable replacement\n"
+ "\n"),
+ percent_encoded.buf);
+ strbuf_release(&percent_encoded);
die(_("refusing to run without --i-still-use-this"));
}
diff --git a/varint.c b/varint.c
index 409c4977a1..03cd54416b 100644
--- a/varint.c
+++ b/varint.c
@@ -1,11 +1,11 @@
#include "git-compat-util.h"
#include "varint.h"
-uintmax_t decode_varint(const unsigned char **bufp)
+uint64_t decode_varint(const unsigned char **bufp)
{
const unsigned char *buf = *bufp;
unsigned char c = *buf++;
- uintmax_t val = c & 127;
+ uint64_t val = c & 127;
while (c & 128) {
val += 1;
if (!val || MSB(val, 7))
@@ -17,7 +17,7 @@ uintmax_t decode_varint(const unsigned char **bufp)
return val;
}
-int encode_varint(uintmax_t value, unsigned char *buf)
+uint8_t encode_varint(uint64_t value, unsigned char *buf)
{
unsigned char varint[16];
unsigned pos = sizeof(varint) - 1;
diff --git a/varint.h b/varint.h
index f78bb0ca52..eb401935bd 100644
--- a/varint.h
+++ b/varint.h
@@ -1,7 +1,7 @@
#ifndef VARINT_H
#define VARINT_H
-int encode_varint(uintmax_t, unsigned char *);
-uintmax_t decode_varint(const unsigned char **);
+uint8_t encode_varint(uint64_t, unsigned char *);
+uint64_t decode_varint(const unsigned char **);
#endif /* VARINT_H */