From 4d16656d9376c1e39b89252434920a2003012c80 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 28 Nov 2025 12:08:16 +0100 Subject: [PATCH] Initial commit --- GIT-VERSION-FILE | 1 + GIT-VERSION-FILE.in | 1 + GIT-VERSION-GEN | 106 + Makefile | 4017 ++++++++++ config.mak.uname | 794 ++ gitweb/GITWEB-BUILD-OPTIONS.in | 24 + gitweb/INSTALL | 328 + gitweb/Makefile | 146 + gitweb/README | 70 + gitweb/generate-gitweb-cgi.sh | 47 + gitweb/generate-gitweb-js.sh | 12 + gitweb/gitweb.perl | 8490 ++++++++++++++++++++++ gitweb/meson.build | 89 + gitweb/static/git-favicon.png | Bin 0 -> 115 bytes gitweb/static/git-logo.png | Bin 0 -> 207 bytes gitweb/static/gitweb.css | 686 ++ gitweb/static/js/README | 20 + gitweb/static/js/adjust-timezone.js | 330 + gitweb/static/js/blame_incremental.js | 692 ++ gitweb/static/js/javascript-detection.js | 43 + gitweb/static/js/lib/common-lib.js | 224 + gitweb/static/js/lib/cookies.js | 114 + gitweb/static/js/lib/datetime.js | 176 + shared.mak | 132 + 24 files changed, 16542 insertions(+) create mode 100644 GIT-VERSION-FILE create mode 100644 GIT-VERSION-FILE.in create mode 100755 GIT-VERSION-GEN create mode 100644 Makefile create mode 100644 config.mak.uname create mode 100644 gitweb/GITWEB-BUILD-OPTIONS.in create mode 100644 gitweb/INSTALL create mode 100644 gitweb/Makefile create mode 100644 gitweb/README create mode 100755 gitweb/generate-gitweb-cgi.sh create mode 100755 gitweb/generate-gitweb-js.sh create mode 100755 gitweb/gitweb.perl create mode 100644 gitweb/meson.build create mode 100644 gitweb/static/git-favicon.png create mode 100644 gitweb/static/git-logo.png create mode 100644 gitweb/static/gitweb.css create mode 100644 gitweb/static/js/README create mode 100644 gitweb/static/js/adjust-timezone.js create mode 100644 gitweb/static/js/blame_incremental.js create mode 100644 gitweb/static/js/javascript-detection.js create mode 100644 gitweb/static/js/lib/common-lib.js create mode 100644 gitweb/static/js/lib/cookies.js create mode 100644 gitweb/static/js/lib/datetime.js create mode 100644 shared.mak diff --git a/GIT-VERSION-FILE b/GIT-VERSION-FILE new file mode 100644 index 0000000..37c6caa --- /dev/null +++ b/GIT-VERSION-FILE @@ -0,0 +1 @@ +GIT_VERSION=2.52.GIT diff --git a/GIT-VERSION-FILE.in b/GIT-VERSION-FILE.in new file mode 100644 index 0000000..3789a48 --- /dev/null +++ b/GIT-VERSION-FILE.in @@ -0,0 +1 @@ +GIT_VERSION=@GIT_VERSION@ diff --git a/GIT-VERSION-GEN b/GIT-VERSION-GEN new file mode 100755 index 0000000..1f7af03 --- /dev/null +++ b/GIT-VERSION-GEN @@ -0,0 +1,106 @@ +#!/bin/sh + +DEF_VER=v2.52.GIT + +LF=' +' + +if test "$#" -lt 2 || test "$#" -gt 3 +then + echo >&2 "USAGE: $0 (--format=|) []" + exit 1 +fi + +SOURCE_DIR="$1" + +case "$2" in +--format=*) + INPUT="${2#--format=}" + ;; +*) + if ! test -f "$2" + then + echo >&2 "Input is not a file: $2" + exit 1 + fi + INPUT=$(cat "$2") + ;; +esac + +OUTPUT="$3" + +# Protect us from reading Git version information outside of the Git directory +# in case it is not a repository itself, but embedded in an unrelated +# repository. +GIT_CEILING_DIRECTORIES="$SOURCE_DIR/.." +export GIT_CEILING_DIRECTORIES + +if test -z "$GIT_VERSION" +then + # First see if there is a version file (included in release tarballs), + # then try git-describe, then default. + if test -f "$SOURCE_DIR"/version + then + VN=$(cat "$SOURCE_DIR"/version) || VN="$DEF_VER" + elif { + test -d "$SOURCE_DIR/.git" || + test -d "${GIT_DIR:-.git}" || + test -f "$SOURCE_DIR"/.git; + } && + VN=$(git -C "$SOURCE_DIR" describe --dirty --match="v[0-9]*" 2>/dev/null) && + case "$VN" in + *$LF*) (exit 1) ;; + esac + then + VN=$(echo "$VN" | sed -e 's/-/./g'); + else + VN="$DEF_VER" + fi + + GIT_VERSION=$(expr "$VN" : v*'\(.*\)') +fi + +if test -z "$GIT_BUILT_FROM_COMMIT" +then + GIT_BUILT_FROM_COMMIT=$(git -C "$SOURCE_DIR" rev-parse -q --verify HEAD 2>/dev/null) +fi + +if test -z "$GIT_DATE" +then + GIT_DATE=$(git -C "$SOURCE_DIR" show --quiet --format='%as' 2>/dev/null) +fi + +if test -z "$GIT_USER_AGENT" +then + GIT_USER_AGENT="git/$GIT_VERSION" +fi + +# While released Git versions only have three numbers, development builds also +# have a fourth number that corresponds to the number of patches since the last +# release. +read GIT_MAJOR_VERSION GIT_MINOR_VERSION GIT_MICRO_VERSION GIT_PATCH_LEVEL trailing <"$OUTPUT".$$+ + if ! test -f "$OUTPUT" || ! cmp "$OUTPUT".$$+ "$OUTPUT" >/dev/null + then + mv "$OUTPUT".$$+ "$OUTPUT" + else + rm "$OUTPUT".$$+ + fi +fi diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..2a67546 --- /dev/null +++ b/Makefile @@ -0,0 +1,4017 @@ +# The default target of this Makefile is... +all:: + +# Import tree-wide shared Makefile behavior and libraries +include shared.mak + +# == Makefile defines == +# +# These defines change the behavior of the Makefile itself, but have +# no impact on what it builds: +# +# Define V=1 to have a more verbose compile. +# +# == Portability and optional library defines == +# +# These defines indicate what Git can expect from the OS, what +# libraries are available etc. Much of this is auto-detected in +# config.mak.uname, or in configure.ac when using the optional "make +# configure && ./configure" (see INSTALL). +# +# Define SHELL_PATH to a POSIX shell if your /bin/sh is broken. +# +# Define SANE_TOOL_PATH to a colon-separated list of paths to prepend +# to PATH if your tools in /usr/bin are broken. +# +# Define SOCKLEN_T to a suitable type (such as 'size_t') if your +# system headers do not define a socklen_t type. +# +# Define INLINE to a suitable substitute (such as '__inline' or '') if git +# fails to compile with errors about undefined inline functions or similar. +# +# Define SNPRINTF_RETURNS_BOGUS if you are on a system which snprintf() +# or vsnprintf() return -1 instead of number of characters which would +# have been written to the final string if enough space had been available. +# +# Define FREAD_READS_DIRECTORIES if you are on a system which succeeds +# when attempting to read from an fopen'ed directory (or even to fopen +# it at all). +# +# Define OPEN_RETURNS_EINTR if your open() system call may return EINTR +# when a signal is received (as opposed to restarting). +# +# Define NO_OPENSSL environment variable if you do not have OpenSSL. +# +# Define HAVE_ALLOCA_H if you have working alloca(3) defined in that header. +# +# Define HAVE_PATHS_H if you have paths.h and want to use the default PATH +# it specifies. +# +# Define NO_D_TYPE_IN_DIRENT if your platform defines DT_UNKNOWN but lacks +# d_type in struct dirent (Cygwin 1.5, fixed in Cygwin 1.7). +# +# Define HAVE_STRINGS_H if you have strings.h and need it for strcasecmp. +# +# Define NO_STRCASESTR if you don't have strcasestr. +# +# Define NO_MEMMEM if you don't have memmem. +# +# Define NO_GETPAGESIZE if you don't have getpagesize. +# +# Define NO_STRLCPY if you don't have strlcpy. +# +# Define NO_STRTOUMAX if you don't have both strtoimax and strtoumax in the +# C library. If your compiler also does not support long long or does not have +# strtoull, define NO_STRTOULL. +# +# Define NO_SETENV if you don't have setenv in the C library. +# +# Define NO_UNSETENV if you don't have unsetenv in the C library. +# +# Define NO_MKDTEMP if you don't have mkdtemp in the C library. +# +# Define MKDIR_WO_TRAILING_SLASH if your mkdir() can't deal with trailing slash. +# +# Define NO_GECOS_IN_PWENT if you don't have pw_gecos in struct passwd +# in the C library. +# +# Define NO_LIBGEN_H if you don't have libgen.h. +# +# Define NEEDS_LIBGEN if your libgen needs -lgen when linking +# +# Define NO_SYS_SELECT_H if you don't have sys/select.h. +# +# Define NO_SYMLINK_HEAD if you never want .git/HEAD to be a symbolic link. +# Enable it on Windows. By default, symrefs are still used. +# +# Define NO_SVN_TESTS if you want to skip time-consuming SVN interoperability +# tests. These tests take up a significant amount of the total test time +# but are not needed unless you plan to talk to SVN repos. +# +# Define NO_FINK if you are building on Darwin/Mac OS X, have Fink +# installed in /sw, but don't want GIT to link against any libraries +# installed there. If defined you may specify your own (or Fink's) +# include directories and library directories by defining CFLAGS +# and LDFLAGS appropriately. +# +# Define NO_DARWIN_PORTS if you are building on Darwin/Mac OS X, +# have DarwinPorts installed in /opt/local, but don't want GIT to +# link against any libraries installed there. If defined you may +# specify your own (or DarwinPort's) include directories and +# library directories by defining CFLAGS and LDFLAGS appropriately. +# +# Define NO_APPLE_COMMON_CRYPTO if you are building on Darwin/Mac OS X +# and do not want to use Apple's CommonCrypto library. This allows you +# to provide your own OpenSSL library, for example from MacPorts. +# +# Define NEEDS_CRYPTO_WITH_SSL if you need -lcrypto when using -lssl (Darwin). +# +# Define NEEDS_SSL_WITH_CRYPTO if you need -lssl when using -lcrypto (Darwin). +# +# Define NEEDS_LIBICONV if linking with libc is not enough (Darwin). +# +# Define NEEDS_LIBINTL_BEFORE_LIBICONV if you need libintl before libiconv. +# +# Define NO_INTPTR_T if you don't have intptr_t or uintptr_t. +# +# Define NEEDS_SOCKET if linking with libc is not enough (SunOS, +# Patrick Mauritz). +# +# Define NEEDS_RESOLV if linking with -lnsl and/or -lsocket is not enough. +# Notably on Solaris hstrerror resides in libresolv and on Solaris 7 +# inet_ntop and inet_pton additionally reside there. +# +# Define NO_MMAP if you want to avoid mmap. +# +# Define MMAP_PREVENTS_DELETE if a file that is currently mmapped cannot be +# deleted or cannot be replaced using rename(). +# +# Define NO_POLL_H if you don't have poll.h. +# +# Define NO_SYS_POLL_H if you don't have sys/poll.h. +# +# Define NO_POLL if you do not have or don't want to use poll(). +# This also implies NO_POLL_H and NO_SYS_POLL_H. +# +# Define NEEDS_SYS_PARAM_H if you need to include sys/param.h to compile, +# *PLEASE* REPORT to git@vger.kernel.org if your platform needs this; +# we want to know more about the issue. +# +# Define NO_PTHREADS if you do not have or do not want to use Pthreads. +# +# Define NO_PREAD if you have a problem with pread() system call (e.g. +# cygwin1.dll before v1.5.22). +# +# Define NO_SETITIMER if you don't have setitimer() +# +# Define NO_STRUCT_ITIMERVAL if you don't have struct itimerval +# This also implies NO_SETITIMER +# +# Define NO_FAST_WORKING_DIRECTORY if accessing objects in pack files is +# generally faster on your platform than accessing the working directory. +# +# Define NO_TRUSTABLE_FILEMODE if your filesystem may claim to support +# the executable mode bit, but doesn't really do so. +# +# Define CSPRNG_METHOD to "arc4random" if your system has arc4random and +# arc4random_buf, "libbsd" if your system has those functions from libbsd, +# "getrandom" if your system has getrandom, "getentropy" if your system has +# getentropy, "rtlgenrandom" for RtlGenRandom (Windows only), or "openssl" if +# you'd want to use the OpenSSL CSPRNG. You may set multiple options with +# spaces, in which case a suitable option will be chosen. If unset or set to +# anything else, defaults to using "/dev/urandom". +# +# Define NEEDS_MODE_TRANSLATION if your OS strays from the typical file type +# bits in mode values (e.g. z/OS defines I_SFMT to 0xFF000000 as opposed to the +# usual 0xF000). +# +# Define NO_IPV6 if you lack IPv6 support and getaddrinfo(). +# +# Define NO_UNIX_SOCKETS if your system does not offer unix sockets. +# +# Define NO_SOCKADDR_STORAGE if your platform does not have struct +# sockaddr_storage. +# +# Define NO_ICONV if your libc does not properly support iconv. +# +# Define OLD_ICONV if your library has an old iconv(), where the second +# (input buffer pointer) parameter is declared with type (const char **). +# +# Define ICONV_OMITS_BOM if your iconv implementation does not write a +# byte-order mark (BOM) when writing UTF-16 or UTF-32 and always writes in +# big-endian format. +# +# Define NO_DEFLATE_BOUND if your zlib does not have deflateBound. Define +# ZLIB_NG if you want to use zlib-ng instead of zlib. +# +# Define NO_NORETURN if using buggy versions of gcc 4.6+ and profile feedback, +# as the compiler can crash (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=49299) +# +# Define USE_NSEC below if you want git to care about sub-second file mtimes +# and ctimes. Note that you need recent glibc (at least 2.2.4) for this. On +# Linux, kernel 2.6.11 or newer is required for reliable sub-second file times +# on file systems with exactly 1 ns or 1 s resolution. If you intend to use Git +# on other file systems (e.g. CEPH, CIFS, NTFS, UDF), don't enable USE_NSEC. See +# Documentation/technical/racy-git.adoc for details. +# +# Define USE_ST_TIMESPEC if your "struct stat" uses "st_ctimespec" instead of +# "st_ctim" +# +# Define NO_NSEC if your "struct stat" does not have "st_ctim.tv_nsec" +# available. This automatically turns USE_NSEC off. +# +# Define USE_STDEV below if you want git to care about the underlying device +# change being considered an inode change from the update-index perspective. +# +# Define NO_ST_BLOCKS_IN_STRUCT_STAT if your platform does not have st_blocks +# field that counts the on-disk footprint in 512-byte blocks. +# +# Define USE_ASCIIDOCTOR to use Asciidoctor instead of AsciiDoc to build the +# documentation. +# +# Define ASCIIDOCTOR_EXTENSIONS_LAB to point to the location of the Asciidoctor +# Extensions Lab if you have it available. +# +# Define PERL_PATH to the path of your Perl binary (usually /usr/bin/perl). +# +# Define NO_PERL if you do not want Perl scripts or libraries at all. +# +# Define NO_PERL_CPAN_FALLBACKS if you do not want to install bundled +# copies of CPAN modules that serve as a fallback in case the modules +# are not available on the system. This option is intended for +# distributions that want to use their packaged versions of Perl +# modules, instead of the fallbacks shipped with Git. +# +# Define NO_GITWEB if you do not want to build or install +# 'gitweb'. Note that defining NO_PERL currently has the same effect +# on not installing gitweb, but not on whether it's built in the +# gitweb/ directory. +# +# Define PYTHON_PATH to the path of your Python binary (often /usr/bin/python +# but /usr/bin/python2.7 or /usr/bin/python3 on some platforms). +# +# Define NO_PYTHON if you do not want Python scripts or libraries at all. +# +# Define NO_TCLTK if you do not want Tcl/Tk GUI. +# +# The TCL_PATH variable governs the location of the Tcl interpreter +# used to optimize git-gui for your system. Only used if NO_TCLTK +# is not set. Defaults to the bare 'tclsh'. +# +# The TCLTK_PATH variable governs the location of the Tcl/Tk interpreter. +# If not set it defaults to the bare 'wish'. If it is set to the empty +# string then NO_TCLTK will be forced (this is used by configure script). +# +# Define INTERNAL_QSORT to use Git's implementation of qsort(), which +# is a simplified version of the merge sort used in glibc. This is +# recommended if Git triggers O(n^2) behavior in your platform's qsort(). +# +# Define HAVE_ISO_QSORT_S if your platform provides a qsort_s() that's +# compatible with the one described in C11 Annex K. +# +# Define UNRELIABLE_FSTAT if your system's fstat does not return the same +# information on a not yet closed file that lstat would return for the same +# file after it was closed. +# +# Define OBJECT_CREATION_USES_RENAMES if your operating systems has problems +# when hardlinking a file to another name and unlinking the original file right +# away (some NTFS drivers seem to zero the contents in that scenario). +# +# Define INSTALL_SYMLINKS if you prefer to have everything that can be +# symlinked between bin/ and libexec/ to use relative symlinks between +# the two. This option overrides NO_CROSS_DIRECTORY_HARDLINKS and +# NO_INSTALL_HARDLINKS which will also use symlinking by indirection +# within the same directory in some cases, INSTALL_SYMLINKS will +# always symlink to the final target directly. +# +# Define NO_CROSS_DIRECTORY_HARDLINKS if you plan to distribute the installed +# programs as a tar, where bin/ and libexec/ might be on different file systems. +# +# Define NO_INSTALL_HARDLINKS if you prefer to use either symbolic links or +# copies to install built-in git commands e.g. git-cat-file. +# +# Define SKIP_DASHED_BUILT_INS if you do not need the dashed versions of the +# built-ins to be linked/copied at all. +# +# Define USE_NED_ALLOCATOR if you want to replace the platforms default +# memory allocators with the nedmalloc allocator written by Niall Douglas. +# +# Define OVERRIDE_STRDUP to override the libc version of strdup(3). +# This is necessary when using a custom allocator in order to avoid +# crashes due to allocation and free working on different 'heaps'. +# It's defined automatically if USE_NED_ALLOCATOR is set. +# +# Define NO_REGEX if your C library lacks regex support with REG_STARTEND +# feature. +# +# Define USE_ENHANCED_BASIC_REGULAR_EXPRESSIONS if your C library provides +# the flag REG_ENHANCED and you'd like to use it to enable enhanced basic +# regular expressions. +# +# Define HAVE_DEV_TTY if your system can open /dev/tty to interact with the +# user. +# +# Define JSMIN to point to JavaScript minifier that functions as +# a filter to have gitweb.js minified. +# +# Define CSSMIN to point to a CSS minifier in order to generate a minified +# version of gitweb.css +# +# Define DEFAULT_PAGER to a sensible pager command (defaults to "less") if +# you want to use something different. The value will be interpreted by the +# shell at runtime when it is used. +# +# Define DEFAULT_EDITOR to a sensible editor command (defaults to "vi") if you +# want to use something different. The value will be interpreted by the shell +# if necessary when it is used. Examples: +# +# DEFAULT_EDITOR='~/bin/vi', +# DEFAULT_EDITOR='$GIT_FALLBACK_EDITOR', +# DEFAULT_EDITOR='"C:\Program Files\Vim\gvim.exe" --nofork' +# +# Define COMPUTE_HEADER_DEPENDENCIES to "yes" if you want dependencies on +# header files to be automatically computed, to avoid rebuilding objects when +# an unrelated header file changes. Define it to "no" to use the hard-coded +# dependency rules. The default is "auto", which means to use computed header +# dependencies if your compiler is detected to support it. +# +# Define NATIVE_CRLF if your platform uses CRLF for line endings. +# +# Define GIT_USER_AGENT if you want to change how git identifies itself during +# network interactions. The default is "git/$(GIT_VERSION)". +# +# Define DEFAULT_HELP_FORMAT to "man", "info" or "html" +# (defaults to "man") if you want to have a different default when +# "git help" is called without a parameter specifying the format. +# +# Define GIT_TEST_INDEX_VERSION to 2, 3 or 4 to run the test suite +# with a different indexfile format version. If it isn't set the index +# file format used is index-v[23]. +# +# Define GIT_TEST_UTF8_LOCALE to preferred utf-8 locale for testing. +# If it isn't set, fallback to $LC_ALL, $LANG or use the first utf-8 +# locale returned by "locale -a". +# +# Define HAVE_CLOCK_GETTIME if your platform has clock_gettime. +# +# Define HAVE_CLOCK_MONOTONIC if your platform has CLOCK_MONOTONIC. +# +# Define HAVE_SYNC_FILE_RANGE if your platform has sync_file_range. +# +# Define HAVE_BSD_SYSCTL if your platform has a BSD-compatible sysctl function. +# +# Define HAVE_GETDELIM if your system has the getdelim() function. +# +# Define FILENO_IS_A_MACRO if fileno() is a macro, not a real function. +# +# Define NEED_ACCESS_ROOT_HANDLER if access() under root may success for X_OK +# even if execution permission isn't granted for any user. +# +# Define PAGER_ENV to a SP separated VAR=VAL pairs to define +# default environment variables to be passed when a pager is spawned, e.g. +# +# PAGER_ENV = LESS=FRX LV=-c +# +# to say "export LESS=FRX (and LV=-c) if the environment variable +# LESS (and LV) is not set, respectively". +# +# Define TEST_SHELL_PATH if you want to use a shell besides SHELL_PATH for +# running the test scripts (e.g., bash has better support for "set -x" +# tracing). +# +# When cross-compiling, define HOST_CPU as the canonical name of the CPU on +# which the built Git will run (for instance "x86_64"). +# +# Define RUNTIME_PREFIX to configure Git to resolve its ancillary tooling and +# support files relative to the location of the runtime binary, rather than +# hard-coding them into the binary. Git installations built with RUNTIME_PREFIX +# can be moved to arbitrary filesystem locations. RUNTIME_PREFIX also causes +# Perl scripts to use a modified entry point header allowing them to resolve +# support files at runtime. +# +# When using RUNTIME_PREFIX, define HAVE_BSD_KERN_PROC_SYSCTL if your platform +# supports the KERN_PROC BSD sysctl function. +# +# When using RUNTIME_PREFIX, define PROCFS_EXECUTABLE_PATH if your platform +# mounts a "procfs" filesystem capable of resolving the path of the current +# executable. If defined, this must be the canonical path for the "procfs" +# current executable path. +# +# When using RUNTIME_PREFIX, define HAVE_NS_GET_EXECUTABLE_PATH if your platform +# supports calling _NSGetExecutablePath to retrieve the path of the running +# executable. +# +# When using RUNTIME_PREFIX, define HAVE_ZOS_GET_EXECUTABLE_PATH if your platform +# supports calling __getprogramdir and getprogname to retrieve the path of the +# running executable. +# +# When using RUNTIME_PREFIX, define HAVE_WPGMPTR if your platform offers +# the global variable _wpgmptr containing the absolute path of the current +# executable (this is the case on Windows). +# +# INSTALL_STRIP can be set to "-s" to strip binaries during installation, +# if your $(INSTALL) command supports the option. +# +# Define GENERATE_COMPILATION_DATABASE to "yes" to generate JSON compilation +# database entries during compilation if your compiler supports it, using the +# `-MJ` flag. The JSON entries will be placed in the `compile_commands/` +# directory, and the JSON compilation database 'compile_commands.json' will be +# created at the root of the repository. +# +# If your platform supports a built-in fsmonitor backend, set +# FSMONITOR_DAEMON_BACKEND to the "" of the corresponding +# `compat/fsmonitor/fsm-listen-.c` and +# `compat/fsmonitor/fsm-health-.c` files +# that implement the `fsm_listen__*()` and `fsm_health__*()` routines. +# +# If your platform has OS-specific ways to tell if a repo is incompatible with +# fsmonitor (whether the hook or IPC daemon version), set FSMONITOR_OS_SETTINGS +# to the "" of the corresponding `compat/fsmonitor/fsm-settings-.c` +# that implements the `fsm_os_settings__*()` routines. +# +# Define LINK_FUZZ_PROGRAMS if you want `make all` to also build the fuzz test +# programs in oss-fuzz/. +# +# Define INCLUDE_LIBGIT_RS if you want `make all` and `make test` to build and +# test the Rust crates in contrib/libgit-sys and contrib/libgit-rs. +# +# === Optional library: libintl === +# +# Define NO_GETTEXT if you don't want Git output to be translated. +# A translated Git requires GNU libintl or another gettext implementation, +# plus libintl-perl at runtime. +# +# Define USE_GETTEXT_SCHEME and set it to 'fallthrough', if you don't trust +# the installed gettext translation of the shell scripts output. +# +# Define HAVE_LIBCHARSET_H if you haven't set NO_GETTEXT and you can't +# trust the langinfo.h's nl_langinfo(CODESET) function to return the +# current character set. GNU and Solaris have a nl_langinfo(CODESET), +# FreeBSD can use either, but MinGW and some others need to use +# libcharset.h's locale_charset() instead. +# +# Define CHARSET_LIB to the library you need to link with in order to +# use locale_charset() function. On some platforms this needs to set to +# -lcharset, on others to -liconv . +# +# Define LIBC_CONTAINS_LIBINTL if your gettext implementation doesn't +# need -lintl when linking. +# +# Define NO_MSGFMT_EXTENDED_OPTIONS if your implementation of msgfmt +# doesn't support GNU extensions like --check and --statistics +# +# === Optional library: libexpat === +# +# Define NO_EXPAT if you do not have expat installed. git-http-push is +# not built, and you cannot push using http:// and https:// transports (dumb). +# +# Define EXPATDIR=/foo/bar if your expat header and library files are in +# /foo/bar/include and /foo/bar/lib directories. +# +# Define EXPAT_NEEDS_XMLPARSE_H if you have an old version of expat (e.g., +# 1.1 or 1.2) that provides xmlparse.h instead of expat.h. + +# === Optional library: libcurl === +# +# Define NO_CURL if you do not have libcurl installed. git-http-fetch and +# git-http-push are not built, and you cannot use http:// and https:// +# transports (neither smart nor dumb). +# +# Define CURLDIR=/foo/bar if your curl header and library files are in +# /foo/bar/include and /foo/bar/lib directories. +# +# Define CURL_CONFIG to curl's configuration program that prints information +# about the library (e.g., its version number). The default is 'curl-config'. +# +# Define CURL_LDFLAGS to specify flags that you need to link when using libcurl, +# if you do not want to rely on the libraries provided by CURL_CONFIG. The +# default value is a result of `curl-config --libs`. An example value for +# CURL_LDFLAGS is as follows: +# +# CURL_LDFLAGS=-lcurl +# +# === Optional library: libpcre2 === +# +# Define USE_LIBPCRE if you have and want to use libpcre. Various +# commands such as log and grep offer runtime options to use +# Perl-compatible regular expressions instead of standard or extended +# POSIX regular expressions. +# +# Only libpcre version 2 is supported. USE_LIBPCRE2 is a synonym for +# USE_LIBPCRE, support for the old USE_LIBPCRE1 has been removed. +# +# Define LIBPCREDIR=/foo/bar if your PCRE header and library files are +# in /foo/bar/include and /foo/bar/lib directories. +# +# == Optional Rust support == +# +# Define WITH_RUST if you want to include features and subsystems written in +# Rust into Git. For now, Rust is still an optional feature of the build +# process. With Git 3.0 though, Rust will always be enabled. +# +# Building Rust code requires Cargo. +# +# == SHA-1 and SHA-256 defines == +# +# === SHA-1 backend === +# +# ==== Security ==== +# +# Due to the SHAttered (https://shattered.io) attack vector on SHA-1 +# it's strongly recommended to use the sha1collisiondetection +# counter-cryptanalysis library for SHA-1 hashing. +# +# If you know that you can trust the repository contents, or where +# potential SHA-1 attacks are otherwise mitigated the other backends +# listed in "SHA-1 implementations" are faster than +# sha1collisiondetection. +# +# ==== Default SHA-1 backend ==== +# +# If no *_SHA1 backend is picked, the first supported one listed in +# "SHA-1 implementations" will be picked. +# +# ==== Options common to all SHA-1 implementations ==== +# +# Define SHA1_MAX_BLOCK_SIZE to limit the amount of data that will be hashed +# in one call to the platform's SHA1_Update(). e.g. APPLE_COMMON_CRYPTO +# wants 'SHA1_MAX_BLOCK_SIZE=1024L*1024L*1024L' defined. +# +# ==== SHA-1 implementations ==== +# +# Define OPENSSL_SHA1 to link to the SHA-1 routines from the OpenSSL +# library. +# +# Define BLK_SHA1 to make use of optimized C SHA-1 routines bundled +# with git (in the block-sha1/ directory). +# +# Define APPLE_COMMON_CRYPTO_SHA1 to use Apple's CommonCrypto for +# SHA-1. +# +# Define the same Makefile knobs as above, but suffixed with _UNSAFE to +# use the corresponding implementations for unsafe SHA-1 hashing for +# non-cryptographic purposes. +# +# If don't enable any of the *_SHA1 settings in this section, Git will +# default to its built-in sha1collisiondetection library, which is a +# collision-detecting sha1 This is slower, but may detect attempted +# collision attacks. +# +# ==== Options for the sha1collisiondetection library ==== +# +# Define DC_SHA1_EXTERNAL if you want to build / link +# git with the external SHA1 collision-detect library. +# Without this option, i.e. the default behavior is to build git with its +# own built-in code (or submodule). +# +# Define DC_SHA1_SUBMODULE to use the +# sha1collisiondetection shipped as a submodule instead of the +# non-submodule copy in sha1dc/. This is an experimental option used +# by the git project to migrate to using sha1collisiondetection as a +# submodule. +# +# === SHA-256 backend === +# +# ==== Security ==== +# +# Unlike SHA-1 the SHA-256 algorithm does not suffer from any known +# vulnerabilities, so any implementation will do. +# +# ==== SHA-256 implementations ==== +# +# Define OPENSSL_SHA256 to use the SHA-256 routines in OpenSSL. +# +# Define NETTLE_SHA256 to use the SHA-256 routines in libnettle. +# +# Define GCRYPT_SHA256 to use the SHA-256 routines in libgcrypt. +# +# If don't enable any of the *_SHA256 settings in this section, Git +# will default to its built-in sha256 implementation. +# +# == DEVELOPER defines == +# +# Define DEVELOPER to enable more compiler warnings. Compiler version +# and family are auto detected, but could be overridden by defining +# COMPILER_FEATURES (see config.mak.dev). You can still set +# CFLAGS="..." in combination with DEVELOPER enables, whether that's +# for tweaking something unrelated (e.g. optimization level), or for +# selectively overriding something DEVELOPER or one of the DEVOPTS +# (see just below) brings in. +# +# When DEVELOPER is set, DEVOPTS can be used to control compiler +# options. This variable contains keywords separated by +# whitespace. The following keywords are recognized: +# +# no-error: +# +# suppresses the -Werror that implicitly comes with +# DEVELOPER=1. Useful for getting the full set of errors +# without immediately dying, or for logging them. +# +# extra-all: +# +# The DEVELOPER mode enables -Wextra with a few exceptions. By +# setting this flag the exceptions are removed, and all of +# -Wextra is used. +# +# no-pedantic: +# +# Disable -pedantic compilation. + +# Set our default configuration. +# +# Among the variables below, these: +# gitexecdir +# template_dir +# sysconfdir +# can be specified as a relative path some/where/else; +# this is interpreted as relative to $(prefix) and "git" built with +# RUNTIME_PREFIX flag will figure out (at runtime) where they are +# based on the path to the executable. +# Additionally, the following will be treated as relative by "git" if they +# begin with "$(prefix)/": +# mandir +# infodir +# htmldir +# localedir +# perllibdir +# This can help installing the suite in a relocatable way. + +prefix = $(HOME) +bindir = $(prefix)/bin +mandir = $(prefix)/share/man +infodir = $(prefix)/share/info +bash_completion_dir = $(prefix)/share/bash-completion/completions +gitexecdir = libexec/git-core +mergetoolsdir = $(gitexecdir)/mergetools +sharedir = $(prefix)/share +gitwebdir = $(sharedir)/gitweb +gitwebstaticdir = $(gitwebdir)/static +perllibdir = $(sharedir)/perl5 +localedir = $(sharedir)/locale +template_dir = share/git-core/templates +htmldir = $(prefix)/share/doc/git-doc +ETC_GITCONFIG = $(sysconfdir)/gitconfig +ETC_GITATTRIBUTES = $(sysconfdir)/gitattributes +lib = lib +# DESTDIR = +pathsep = : + +bindir_relative = $(patsubst $(prefix)/%,%,$(bindir)) +mandir_relative = $(patsubst $(prefix)/%,%,$(mandir)) +infodir_relative = $(patsubst $(prefix)/%,%,$(infodir)) +gitexecdir_relative = $(patsubst $(prefix)/%,%,$(gitexecdir)) +localedir_relative = $(patsubst $(prefix)/%,%,$(localedir)) +htmldir_relative = $(patsubst $(prefix)/%,%,$(htmldir)) +perllibdir_relative = $(patsubst $(prefix)/%,%,$(perllibdir)) + +export prefix bindir sharedir sysconfdir perllibdir localedir + +# Set our default programs +CC = cc +AR = ar +RM = rm -f +DIFF = diff +TAR = tar +FIND = find +INSTALL = install +TCL_PATH = tclsh +TCLTK_PATH = wish +XGETTEXT = xgettext +MSGCAT = msgcat +MSGFMT = msgfmt +MSGMERGE = msgmerge +CURL_CONFIG = curl-config +GCOV = gcov +STRIP = strip +SPATCH = spatch +LD = ld +OBJCOPY = objcopy + +export TCL_PATH TCLTK_PATH + +# Set our default LIBS variables +PTHREAD_LIBS = -lpthread + +# Guard against environment variables +BUILTIN_OBJS = +BUILT_INS = +COMPAT_CFLAGS = +COMPAT_OBJS = +XDIFF_OBJS = +GENERATED_H = +EXTRA_CPPFLAGS = +FUZZ_OBJS = +FUZZ_PROGRAMS = +GIT_OBJS = +LIB_OBJS = +LIBGIT_PUB_OBJS = +SCALAR_OBJS = +OBJECTS = +OTHER_PROGRAMS = +PROGRAM_OBJS = +PROGRAMS = +RUST_SOURCES = +EXCLUDED_PROGRAMS = +SCRIPT_PERL = +SCRIPT_PYTHON = +SCRIPT_SH = +SCRIPT_LIB = +TEST_BUILTINS_OBJS = +TEST_OBJS = +TEST_PROGRAMS_NEED_X = +THIRD_PARTY_SOURCES = +UNIT_TEST_PROGRAMS = +UNIT_TEST_DIR = t/unit-tests +UNIT_TEST_BIN = $(UNIT_TEST_DIR)/bin + +# Having this variable in your environment would break pipelines because +# you cause "cd" to echo its destination to stdout. It can also take +# scripts to unexpected places. If you like CDPATH, define it for your +# interactive shell sessions without exporting it. +unexport CDPATH + +SCRIPT_SH += git-difftool--helper.sh +SCRIPT_SH += git-filter-branch.sh +SCRIPT_SH += git-merge-octopus.sh +SCRIPT_SH += git-merge-one-file.sh +SCRIPT_SH += git-merge-resolve.sh +SCRIPT_SH += git-mergetool.sh +SCRIPT_SH += git-quiltimport.sh +SCRIPT_SH += git-request-pull.sh +SCRIPT_SH += git-submodule.sh +SCRIPT_SH += git-web--browse.sh + +SCRIPT_LIB += git-mergetool--lib +SCRIPT_LIB += git-sh-i18n +SCRIPT_LIB += git-sh-setup + +SCRIPT_PERL += git-archimport.perl +SCRIPT_PERL += git-cvsexportcommit.perl +SCRIPT_PERL += git-cvsimport.perl +SCRIPT_PERL += git-cvsserver.perl +SCRIPT_PERL += git-send-email.perl +SCRIPT_PERL += git-svn.perl + +SCRIPT_PYTHON += git-p4.py + +# Generated files for scripts +SCRIPT_SH_GEN = $(patsubst %.sh,%,$(SCRIPT_SH)) +SCRIPT_PERL_GEN = $(patsubst %.perl,%,$(SCRIPT_PERL)) +SCRIPT_PYTHON_GEN = $(patsubst %.py,%,$(SCRIPT_PYTHON)) + +# Individual rules to allow e.g. +# "make -C ../.. SCRIPT_PERL=contrib/foo/bar.perl build-perl-script" +# from subdirectories like contrib/*/ +.PHONY: build-perl-script build-sh-script build-python-script +build-perl-script: $(SCRIPT_PERL_GEN) +build-sh-script: $(SCRIPT_SH_GEN) +build-python-script: $(SCRIPT_PYTHON_GEN) + +.PHONY: install-perl-script install-sh-script install-python-script +install-sh-script: $(SCRIPT_SH_GEN) + $(INSTALL) $^ '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' +install-perl-script: $(SCRIPT_PERL_GEN) + $(INSTALL) $^ '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' +install-python-script: $(SCRIPT_PYTHON_GEN) + $(INSTALL) $^ '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' + +.PHONY: clean-perl-script clean-sh-script clean-python-script +clean-sh-script: + $(RM) $(SCRIPT_SH_GEN) +clean-perl-script: + $(RM) $(SCRIPT_PERL_GEN) +clean-python-script: + $(RM) $(SCRIPT_PYTHON_GEN) + +SCRIPTS = $(SCRIPT_SH_GEN) \ + $(SCRIPT_PERL_GEN) \ + $(SCRIPT_PYTHON_GEN) \ + git-instaweb + +ETAGS_TARGET = TAGS + +# Empty... +EXTRA_PROGRAMS = + +# ... and all the rest that could be moved out of bindir to gitexecdir +PROGRAMS += $(EXTRA_PROGRAMS) + +PROGRAM_OBJS += daemon.o +PROGRAM_OBJS += http-backend.o +PROGRAM_OBJS += imap-send.o +PROGRAM_OBJS += sh-i18n--envsubst.o +PROGRAM_OBJS += shell.o +.PHONY: program-objs +program-objs: $(PROGRAM_OBJS) + +# Binary suffix, set to .exe for Windows builds +X = + +PROGRAMS += $(patsubst %.o,git-%$X,$(PROGRAM_OBJS)) + +TEST_BUILTINS_OBJS += test-advise.o +TEST_BUILTINS_OBJS += test-bitmap.o +TEST_BUILTINS_OBJS += test-bloom.o +TEST_BUILTINS_OBJS += test-bundle-uri.o +TEST_BUILTINS_OBJS += test-cache-tree.o +TEST_BUILTINS_OBJS += test-chmtime.o +TEST_BUILTINS_OBJS += test-config.o +TEST_BUILTINS_OBJS += test-crontab.o +TEST_BUILTINS_OBJS += test-csprng.o +TEST_BUILTINS_OBJS += test-date.o +TEST_BUILTINS_OBJS += test-delete-gpgsig.o +TEST_BUILTINS_OBJS += test-delta.o +TEST_BUILTINS_OBJS += test-dir-iterator.o +TEST_BUILTINS_OBJS += test-drop-caches.o +TEST_BUILTINS_OBJS += test-dump-cache-tree.o +TEST_BUILTINS_OBJS += test-dump-fsmonitor.o +TEST_BUILTINS_OBJS += test-dump-split-index.o +TEST_BUILTINS_OBJS += test-dump-untracked-cache.o +TEST_BUILTINS_OBJS += test-env-helper.o +TEST_BUILTINS_OBJS += test-example-tap.o +TEST_BUILTINS_OBJS += test-find-pack.o +TEST_BUILTINS_OBJS += test-fsmonitor-client.o +TEST_BUILTINS_OBJS += test-genrandom.o +TEST_BUILTINS_OBJS += test-genzeros.o +TEST_BUILTINS_OBJS += test-getcwd.o +TEST_BUILTINS_OBJS += test-hash-speed.o +TEST_BUILTINS_OBJS += test-hash.o +TEST_BUILTINS_OBJS += test-hashmap.o +TEST_BUILTINS_OBJS += test-hexdump.o +TEST_BUILTINS_OBJS += test-json-writer.o +TEST_BUILTINS_OBJS += test-lazy-init-name-hash.o +TEST_BUILTINS_OBJS += test-match-trees.o +TEST_BUILTINS_OBJS += test-mergesort.o +TEST_BUILTINS_OBJS += test-mktemp.o +TEST_BUILTINS_OBJS += test-name-hash.o +TEST_BUILTINS_OBJS += test-online-cpus.o +TEST_BUILTINS_OBJS += test-pack-deltas.o +TEST_BUILTINS_OBJS += test-pack-mtimes.o +TEST_BUILTINS_OBJS += test-parse-options.o +TEST_BUILTINS_OBJS += test-parse-pathspec-file.o +TEST_BUILTINS_OBJS += test-partial-clone.o +TEST_BUILTINS_OBJS += test-path-utils.o +TEST_BUILTINS_OBJS += test-path-walk.o +TEST_BUILTINS_OBJS += test-pcre2-config.o +TEST_BUILTINS_OBJS += test-pkt-line.o +TEST_BUILTINS_OBJS += test-proc-receive.o +TEST_BUILTINS_OBJS += test-progress.o +TEST_BUILTINS_OBJS += test-reach.o +TEST_BUILTINS_OBJS += test-read-cache.o +TEST_BUILTINS_OBJS += test-read-graph.o +TEST_BUILTINS_OBJS += test-read-midx.o +TEST_BUILTINS_OBJS += test-ref-store.o +TEST_BUILTINS_OBJS += test-reftable.o +TEST_BUILTINS_OBJS += test-regex.o +TEST_BUILTINS_OBJS += test-rot13-filter.o +TEST_BUILTINS_OBJS += test-repository.o +TEST_BUILTINS_OBJS += test-revision-walking.o +TEST_BUILTINS_OBJS += test-run-command.o +TEST_BUILTINS_OBJS += test-scrap-cache-tree.o +TEST_BUILTINS_OBJS += test-serve-v2.o +TEST_BUILTINS_OBJS += test-sha1.o +TEST_BUILTINS_OBJS += test-sha256.o +TEST_BUILTINS_OBJS += test-sigchain.o +TEST_BUILTINS_OBJS += test-simple-ipc.o +TEST_BUILTINS_OBJS += test-string-list.o +TEST_BUILTINS_OBJS += test-submodule-config.o +TEST_BUILTINS_OBJS += test-submodule-nested-repo-config.o +TEST_BUILTINS_OBJS += test-submodule.o +TEST_BUILTINS_OBJS += test-subprocess.o +TEST_BUILTINS_OBJS += test-trace2.o +TEST_BUILTINS_OBJS += test-truncate.o +TEST_BUILTINS_OBJS += test-userdiff.o +TEST_BUILTINS_OBJS += test-wildmatch.o +TEST_BUILTINS_OBJS += test-windows-named-pipe.o +TEST_BUILTINS_OBJS += test-write-cache.o +TEST_BUILTINS_OBJS += test-xml-encode.o +TEST_BUILTINS_OBJS += test-zlib.o + +# Do not add more tests here unless they have extra dependencies. Add +# them in TEST_BUILTINS_OBJS above. +TEST_PROGRAMS_NEED_X += test-fake-ssh +TEST_PROGRAMS_NEED_X += test-tool + +TEST_PROGRAMS = $(patsubst %,t/helper/%$X,$(TEST_PROGRAMS_NEED_X)) + +# List built-in command $C whose implementation cmd_$C() is not in +# builtin/$C.o but is linked in as part of some other command. +BUILT_INS += $(patsubst builtin/%.o,git-%$X,$(BUILTIN_OBJS)) + +BUILT_INS += git-cherry$X +BUILT_INS += git-cherry-pick$X +BUILT_INS += git-format-patch$X +BUILT_INS += git-fsck-objects$X +BUILT_INS += git-init$X +BUILT_INS += git-maintenance$X +BUILT_INS += git-merge-subtree$X +BUILT_INS += git-restore$X +BUILT_INS += git-show$X +BUILT_INS += git-stage$X +BUILT_INS += git-status$X +BUILT_INS += git-switch$X +BUILT_INS += git-version$X +ifndef WITH_BREAKING_CHANGES +BUILT_INS += git-whatchanged$X +endif + +# what 'all' will build but not install in gitexecdir +OTHER_PROGRAMS += git$X +OTHER_PROGRAMS += scalar$X + +# what test wrappers are needed and 'install' will install, in bindir +BINDIR_PROGRAMS_NEED_X += git +BINDIR_PROGRAMS_NEED_X += scalar +BINDIR_PROGRAMS_NEED_X += git-receive-pack +BINDIR_PROGRAMS_NEED_X += git-shell +BINDIR_PROGRAMS_NEED_X += git-upload-archive +BINDIR_PROGRAMS_NEED_X += git-upload-pack + +BINDIR_PROGRAMS_NO_X += git-cvsserver + +# Set paths to tools early so that they can be used for version tests. +ifndef SHELL_PATH + SHELL_PATH = /bin/sh +endif +ifndef PERL_PATH + PERL_PATH = /usr/bin/perl +endif +ifndef PYTHON_PATH + PYTHON_PATH = /usr/bin/python +endif + +export PERL_PATH +export PYTHON_PATH + +TEST_SHELL_PATH = $(SHELL_PATH) + +LIB_FILE = libgit.a + +ifdef DEBUG +RUST_TARGET_DIR = target/debug +else +RUST_TARGET_DIR = target/release +endif + +ifeq ($(uname_S),Windows) +RUST_LIB = $(RUST_TARGET_DIR)/gitcore.lib +else +RUST_LIB = $(RUST_TARGET_DIR)/libgitcore.a +endif + +GITLIBS = common-main.o $(LIB_FILE) +EXTLIBS = + +GIT_USER_AGENT = git/$(GIT_VERSION) + +ifeq ($(wildcard sha1collisiondetection/lib/sha1.h),sha1collisiondetection/lib/sha1.h) +DC_SHA1_SUBMODULE = auto +endif + +# Set CFLAGS, LDFLAGS and other *FLAGS variables. These might be +# tweaked by config.* below as well as the command-line, both of +# which'll override these defaults. +# Older versions of GCC may require adding "-std=gnu99" at the end. +CFLAGS = -g -O2 -Wall +LDFLAGS = +CC_LD_DYNPATH = -Wl,-rpath, +BASIC_CFLAGS = -I. +BASIC_LDFLAGS = + +# library flags +ARFLAGS = rcs +PTHREAD_CFLAGS = + +# Rust flags +CARGO_ARGS = +ifndef V +CARGO_ARGS += --quiet +endif +ifndef DEBUG +CARGO_ARGS += --release +endif + +# For the 'sparse' target +SPARSE_FLAGS ?= -std=gnu99 -D__STDC_NO_VLA__ +SP_EXTRA_FLAGS = + +# For informing GIT-BUILD-OPTIONS of the SANITIZE=leak,address targets +SANITIZE_LEAK = +SANITIZE_ADDRESS = + +# For the 'coccicheck' target +SPATCH_INCLUDE_FLAGS = --all-includes +SPATCH_FLAGS = +SPATCH_TEST_FLAGS = + +# If *.o files are present, have "coccicheck" depend on them, with +# COMPUTE_HEADER_DEPENDENCIES this will speed up the common-case of +# only needing to re-generate coccicheck results for the users of a +# given API if it's changed, and not all files in the project. If +# COMPUTE_HEADER_DEPENDENCIES=no this will be unset too. +SPATCH_USE_O_DEPENDENCIES = YesPlease + +# Set SPATCH_CONCAT_COCCI to concatenate the contrib/cocci/*.cocci +# files into a single contrib/cocci/ALL.cocci before running +# "coccicheck". +# +# Pros: +# +# - Speeds up a one-shot run of "make coccicheck", as we won't have to +# parse *.[ch] files N times for the N *.cocci rules +# +# Cons: +# +# - Will make incremental development of *.cocci slower, as +# e.g. changing strbuf.cocci will re-run all *.cocci. +# +# - Makes error and performance analysis harder, as rules will be +# applied from a monolithic ALL.cocci, rather than +# e.g. strbuf.cocci. To work around this either undefine this, or +# generate a specific patch, e.g. this will always use strbuf.cocci, +# not ALL.cocci: +# +# make contrib/coccinelle/strbuf.cocci.patch +SPATCH_CONCAT_COCCI = YesPlease + +# Rebuild 'coccicheck' if $(SPATCH), its flags etc. change +TRACK_SPATCH_DEFINES = +TRACK_SPATCH_DEFINES += $(SPATCH) +TRACK_SPATCH_DEFINES += $(SPATCH_INCLUDE_FLAGS) +TRACK_SPATCH_DEFINES += $(SPATCH_FLAGS) +TRACK_SPATCH_DEFINES += $(SPATCH_TEST_FLAGS) +GIT-SPATCH-DEFINES: FORCE + @FLAGS='$(TRACK_SPATCH_DEFINES)'; \ + if test x"$$FLAGS" != x"`cat GIT-SPATCH-DEFINES 2>/dev/null`" ; then \ + echo >&2 " * new spatch flags"; \ + echo "$$FLAGS" >GIT-SPATCH-DEFINES; \ + fi + +include config.mak.uname +-include config.mak.autogen +-include config.mak + +ifdef DEVELOPER +include config.mak.dev +endif + +GENERATED_H += command-list.h +GENERATED_H += config-list.h +GENERATED_H += hook-list.h +GENERATED_H += $(UNIT_TEST_DIR)/clar-decls.h +GENERATED_H += $(UNIT_TEST_DIR)/clar.suite + +.PHONY: generated-hdrs +generated-hdrs: $(GENERATED_H) + +## Exhaustive lists of our source files, either dynamically generated, +## or hardcoded. +SOURCES_CMD = ( \ + git ls-files --deduplicate \ + '*.[hcS]' \ + '*.sh' \ + ':!*[tp][0-9][0-9][0-9][0-9]*' \ + ':!contrib' \ + 2>/dev/null || \ + $(FIND) . \ + \( -name .git -type d -prune \) \ + -o \( -name '[tp][0-9][0-9][0-9][0-9]*' -prune \) \ + -o \( -name contrib -type d -prune \) \ + -o \( -name build -type d -prune \) \ + -o \( -name .build -type d -prune \) \ + -o \( -name 'trash*' -type d -prune \) \ + -o \( -name '*.[hcS]' -type f -print \) \ + -o \( -name '*.sh' -type f -print \) \ + | sed -e 's|^\./||' \ + ) +FOUND_SOURCE_FILES := $(filter-out $(GENERATED_H),$(shell $(SOURCES_CMD))) + +FOUND_C_SOURCES = $(filter %.c,$(FOUND_SOURCE_FILES)) +FOUND_H_SOURCES = $(filter %.h,$(FOUND_SOURCE_FILES)) + +COCCI_SOURCES = $(filter-out $(THIRD_PARTY_SOURCES) reftable/%,$(FOUND_C_SOURCES)) + +LIB_H = $(FOUND_H_SOURCES) + +LIB_OBJS += abspath.o +LIB_OBJS += add-interactive.o +LIB_OBJS += add-patch.o +LIB_OBJS += advice.o +LIB_OBJS += alias.o +LIB_OBJS += alloc.o +LIB_OBJS += apply.o +LIB_OBJS += archive-tar.o +LIB_OBJS += archive-zip.o +LIB_OBJS += archive.o +LIB_OBJS += attr.o +LIB_OBJS += base85.o +LIB_OBJS += bisect.o +LIB_OBJS += blame.o +LIB_OBJS += blob.o +LIB_OBJS += bloom.o +LIB_OBJS += branch.o +LIB_OBJS += bundle-uri.o +LIB_OBJS += bundle.o +LIB_OBJS += cache-tree.o +LIB_OBJS += cbtree.o +LIB_OBJS += chdir-notify.o +LIB_OBJS += checkout.o +LIB_OBJS += chunk-format.o +LIB_OBJS += color.o +LIB_OBJS += column.o +LIB_OBJS += combine-diff.o +LIB_OBJS += commit-graph.o +LIB_OBJS += commit-reach.o +LIB_OBJS += commit.o +LIB_OBJS += common-exit.o +LIB_OBJS += common-init.o +LIB_OBJS += compat/nonblock.o +LIB_OBJS += compat/obstack.o +LIB_OBJS += compat/open.o +LIB_OBJS += compat/terminal.o +LIB_OBJS += compiler-tricks/not-constant.o +LIB_OBJS += config.o +LIB_OBJS += connect.o +LIB_OBJS += connected.o +LIB_OBJS += convert.o +LIB_OBJS += copy.o +LIB_OBJS += credential.o +LIB_OBJS += csum-file.o +LIB_OBJS += ctype.o +LIB_OBJS += date.o +LIB_OBJS += decorate.o +LIB_OBJS += delta-islands.o +LIB_OBJS += diagnose.o +LIB_OBJS += diff-delta.o +LIB_OBJS += diff-merges.o +LIB_OBJS += diff-lib.o +LIB_OBJS += diff-no-index.o +LIB_OBJS += diff.o +LIB_OBJS += diffcore-break.o +LIB_OBJS += diffcore-delta.o +LIB_OBJS += diffcore-order.o +LIB_OBJS += diffcore-pickaxe.o +LIB_OBJS += diffcore-rename.o +LIB_OBJS += diffcore-rotate.o +LIB_OBJS += dir-iterator.o +LIB_OBJS += dir.o +LIB_OBJS += editor.o +LIB_OBJS += entry.o +LIB_OBJS += environment.o +LIB_OBJS += ewah/bitmap.o +LIB_OBJS += ewah/ewah_bitmap.o +LIB_OBJS += ewah/ewah_io.o +LIB_OBJS += ewah/ewah_rlw.o +LIB_OBJS += exec-cmd.o +LIB_OBJS += fetch-negotiator.o +LIB_OBJS += fetch-pack.o +LIB_OBJS += fmt-merge-msg.o +LIB_OBJS += fsck.o +LIB_OBJS += fsmonitor.o +LIB_OBJS += fsmonitor-ipc.o +LIB_OBJS += fsmonitor-settings.o +LIB_OBJS += gettext.o +LIB_OBJS += git-zlib.o +LIB_OBJS += gpg-interface.o +LIB_OBJS += graph.o +LIB_OBJS += grep.o +LIB_OBJS += hash-lookup.o +LIB_OBJS += hash.o +LIB_OBJS += hashmap.o +LIB_OBJS += help.o +LIB_OBJS += hex.o +LIB_OBJS += hex-ll.o +LIB_OBJS += hook.o +LIB_OBJS += ident.o +LIB_OBJS += json-writer.o +LIB_OBJS += kwset.o +LIB_OBJS += levenshtein.o +LIB_OBJS += line-log.o +LIB_OBJS += line-range.o +LIB_OBJS += linear-assignment.o +LIB_OBJS += list-objects-filter-options.o +LIB_OBJS += list-objects-filter.o +LIB_OBJS += list-objects.o +LIB_OBJS += lockfile.o +LIB_OBJS += log-tree.o +LIB_OBJS += loose.o +LIB_OBJS += ls-refs.o +LIB_OBJS += mailinfo.o +LIB_OBJS += mailmap.o +LIB_OBJS += match-trees.o +LIB_OBJS += mem-pool.o +LIB_OBJS += merge-blobs.o +LIB_OBJS += merge-ll.o +LIB_OBJS += merge-ort.o +LIB_OBJS += merge-ort-wrappers.o +LIB_OBJS += merge.o +LIB_OBJS += midx.o +LIB_OBJS += midx-write.o +LIB_OBJS += name-hash.o +LIB_OBJS += negotiator/default.o +LIB_OBJS += negotiator/noop.o +LIB_OBJS += negotiator/skipping.o +LIB_OBJS += notes-cache.o +LIB_OBJS += notes-merge.o +LIB_OBJS += notes-utils.o +LIB_OBJS += notes.o +LIB_OBJS += object-file-convert.o +LIB_OBJS += object-file.o +LIB_OBJS += object-name.o +LIB_OBJS += object.o +LIB_OBJS += odb.o +LIB_OBJS += oid-array.o +LIB_OBJS += oidmap.o +LIB_OBJS += oidset.o +LIB_OBJS += oidtree.o +LIB_OBJS += pack-bitmap-write.o +LIB_OBJS += pack-bitmap.o +LIB_OBJS += pack-check.o +LIB_OBJS += pack-mtimes.o +LIB_OBJS += pack-objects.o +LIB_OBJS += pack-refs.o +LIB_OBJS += pack-revindex.o +LIB_OBJS += pack-write.o +LIB_OBJS += packfile.o +LIB_OBJS += pager.o +LIB_OBJS += parallel-checkout.o +LIB_OBJS += parse.o +LIB_OBJS += parse-options-cb.o +LIB_OBJS += parse-options.o +LIB_OBJS += patch-delta.o +LIB_OBJS += patch-ids.o +LIB_OBJS += path.o +LIB_OBJS += path-walk.o +LIB_OBJS += pathspec.o +LIB_OBJS += pkt-line.o +LIB_OBJS += preload-index.o +LIB_OBJS += pretty.o +LIB_OBJS += prio-queue.o +LIB_OBJS += progress.o +LIB_OBJS += promisor-remote.o +LIB_OBJS += prompt.o +LIB_OBJS += protocol.o +LIB_OBJS += protocol-caps.o +LIB_OBJS += prune-packed.o +LIB_OBJS += pseudo-merge.o +LIB_OBJS += quote.o +LIB_OBJS += range-diff.o +LIB_OBJS += reachable.o +LIB_OBJS += read-cache.o +LIB_OBJS += rebase-interactive.o +LIB_OBJS += rebase.o +LIB_OBJS += ref-filter.o +LIB_OBJS += reflog-walk.o +LIB_OBJS += reflog.o +LIB_OBJS += refs.o +LIB_OBJS += refs/debug.o +LIB_OBJS += refs/files-backend.o +LIB_OBJS += refs/reftable-backend.o +LIB_OBJS += refs/iterator.o +LIB_OBJS += refs/packed-backend.o +LIB_OBJS += refs/ref-cache.o +LIB_OBJS += refspec.o +LIB_OBJS += reftable/basics.o +LIB_OBJS += reftable/block.o +LIB_OBJS += reftable/blocksource.o +LIB_OBJS += reftable/error.o +LIB_OBJS += reftable/fsck.o +LIB_OBJS += reftable/iter.o +LIB_OBJS += reftable/merged.o +LIB_OBJS += reftable/pq.o +LIB_OBJS += reftable/record.o +LIB_OBJS += reftable/stack.o +LIB_OBJS += reftable/system.o +LIB_OBJS += reftable/table.o +LIB_OBJS += reftable/tree.o +LIB_OBJS += reftable/writer.o +LIB_OBJS += remote.o +LIB_OBJS += repack.o +LIB_OBJS += repack-cruft.o +LIB_OBJS += repack-filtered.o +LIB_OBJS += repack-geometry.o +LIB_OBJS += repack-midx.o +LIB_OBJS += repack-promisor.o +LIB_OBJS += replace-object.o +LIB_OBJS += repo-settings.o +LIB_OBJS += repository.o +LIB_OBJS += rerere.o +LIB_OBJS += reset.o +LIB_OBJS += resolve-undo.o +LIB_OBJS += revision.o +LIB_OBJS += run-command.o +LIB_OBJS += send-pack.o +LIB_OBJS += sequencer.o +LIB_OBJS += serve.o +LIB_OBJS += server-info.o +LIB_OBJS += setup.o +LIB_OBJS += shallow.o +LIB_OBJS += sideband.o +LIB_OBJS += sigchain.o +LIB_OBJS += sparse-index.o +LIB_OBJS += split-index.o +LIB_OBJS += stable-qsort.o +LIB_OBJS += statinfo.o +LIB_OBJS += strbuf.o +LIB_OBJS += streaming.o +LIB_OBJS += string-list.o +LIB_OBJS += strmap.o +LIB_OBJS += strvec.o +LIB_OBJS += sub-process.o +LIB_OBJS += submodule-config.o +LIB_OBJS += submodule.o +LIB_OBJS += symlinks.o +LIB_OBJS += tag.o +LIB_OBJS += tempfile.o +LIB_OBJS += thread-utils.o +LIB_OBJS += tmp-objdir.o +LIB_OBJS += trace.o +LIB_OBJS += trace2.o +LIB_OBJS += trace2/tr2_cfg.o +LIB_OBJS += trace2/tr2_cmd_name.o +LIB_OBJS += trace2/tr2_ctr.o +LIB_OBJS += trace2/tr2_dst.o +LIB_OBJS += trace2/tr2_sid.o +LIB_OBJS += trace2/tr2_sysenv.o +LIB_OBJS += trace2/tr2_tbuf.o +LIB_OBJS += trace2/tr2_tgt_event.o +LIB_OBJS += trace2/tr2_tgt_normal.o +LIB_OBJS += trace2/tr2_tgt_perf.o +LIB_OBJS += trace2/tr2_tls.o +LIB_OBJS += trace2/tr2_tmr.o +LIB_OBJS += trailer.o +LIB_OBJS += transport-helper.o +LIB_OBJS += transport.o +LIB_OBJS += tree-diff.o +LIB_OBJS += tree-walk.o +LIB_OBJS += tree.o +LIB_OBJS += unpack-trees.o +LIB_OBJS += upload-pack.o +LIB_OBJS += url.o +LIB_OBJS += urlmatch.o +LIB_OBJS += usage.o +LIB_OBJS += userdiff.o +LIB_OBJS += utf8.o +ifndef WITH_RUST +LIB_OBJS += varint.o +endif +LIB_OBJS += version.o +LIB_OBJS += versioncmp.o +LIB_OBJS += walker.o +LIB_OBJS += wildmatch.o +LIB_OBJS += worktree.o +LIB_OBJS += wrapper.o +LIB_OBJS += write-or-die.o +LIB_OBJS += ws.o +LIB_OBJS += wt-status.o +LIB_OBJS += xdiff-interface.o +LIB_OBJS += xdiff/xdiffi.o +LIB_OBJS += xdiff/xemit.o +LIB_OBJS += xdiff/xhistogram.o +LIB_OBJS += xdiff/xmerge.o +LIB_OBJS += xdiff/xpatience.o +LIB_OBJS += xdiff/xprepare.o +LIB_OBJS += xdiff/xutils.o + +BUILTIN_OBJS += builtin/add.o +BUILTIN_OBJS += builtin/am.o +BUILTIN_OBJS += builtin/annotate.o +BUILTIN_OBJS += builtin/apply.o +BUILTIN_OBJS += builtin/archive.o +BUILTIN_OBJS += builtin/backfill.o +BUILTIN_OBJS += builtin/bisect.o +BUILTIN_OBJS += builtin/blame.o +BUILTIN_OBJS += builtin/branch.o +BUILTIN_OBJS += builtin/bugreport.o +BUILTIN_OBJS += builtin/bundle.o +BUILTIN_OBJS += builtin/cat-file.o +BUILTIN_OBJS += builtin/check-attr.o +BUILTIN_OBJS += builtin/check-ignore.o +BUILTIN_OBJS += builtin/check-mailmap.o +BUILTIN_OBJS += builtin/check-ref-format.o +BUILTIN_OBJS += builtin/checkout--worker.o +BUILTIN_OBJS += builtin/checkout-index.o +BUILTIN_OBJS += builtin/checkout.o +BUILTIN_OBJS += builtin/clean.o +BUILTIN_OBJS += builtin/clone.o +BUILTIN_OBJS += builtin/column.o +BUILTIN_OBJS += builtin/commit-graph.o +BUILTIN_OBJS += builtin/commit-tree.o +BUILTIN_OBJS += builtin/commit.o +BUILTIN_OBJS += builtin/config.o +BUILTIN_OBJS += builtin/count-objects.o +BUILTIN_OBJS += builtin/credential-cache--daemon.o +BUILTIN_OBJS += builtin/credential-cache.o +BUILTIN_OBJS += builtin/credential-store.o +BUILTIN_OBJS += builtin/credential.o +BUILTIN_OBJS += builtin/describe.o +BUILTIN_OBJS += builtin/diagnose.o +BUILTIN_OBJS += builtin/diff-files.o +BUILTIN_OBJS += builtin/diff-index.o +BUILTIN_OBJS += builtin/diff-pairs.o +BUILTIN_OBJS += builtin/diff-tree.o +BUILTIN_OBJS += builtin/diff.o +BUILTIN_OBJS += builtin/difftool.o +BUILTIN_OBJS += builtin/fast-export.o +BUILTIN_OBJS += builtin/fast-import.o +BUILTIN_OBJS += builtin/fetch-pack.o +BUILTIN_OBJS += builtin/fetch.o +BUILTIN_OBJS += builtin/fmt-merge-msg.o +BUILTIN_OBJS += builtin/for-each-ref.o +BUILTIN_OBJS += builtin/for-each-repo.o +BUILTIN_OBJS += builtin/fsck.o +BUILTIN_OBJS += builtin/fsmonitor--daemon.o +BUILTIN_OBJS += builtin/gc.o +BUILTIN_OBJS += builtin/get-tar-commit-id.o +BUILTIN_OBJS += builtin/grep.o +BUILTIN_OBJS += builtin/hash-object.o +BUILTIN_OBJS += builtin/help.o +BUILTIN_OBJS += builtin/hook.o +BUILTIN_OBJS += builtin/index-pack.o +BUILTIN_OBJS += builtin/init-db.o +BUILTIN_OBJS += builtin/interpret-trailers.o +BUILTIN_OBJS += builtin/last-modified.o +BUILTIN_OBJS += builtin/log.o +BUILTIN_OBJS += builtin/ls-files.o +BUILTIN_OBJS += builtin/ls-remote.o +BUILTIN_OBJS += builtin/ls-tree.o +BUILTIN_OBJS += builtin/mailinfo.o +BUILTIN_OBJS += builtin/mailsplit.o +BUILTIN_OBJS += builtin/merge-base.o +BUILTIN_OBJS += builtin/merge-file.o +BUILTIN_OBJS += builtin/merge-index.o +BUILTIN_OBJS += builtin/merge-ours.o +BUILTIN_OBJS += builtin/merge-recursive.o +BUILTIN_OBJS += builtin/merge-tree.o +BUILTIN_OBJS += builtin/merge.o +BUILTIN_OBJS += builtin/mktag.o +BUILTIN_OBJS += builtin/mktree.o +BUILTIN_OBJS += builtin/multi-pack-index.o +BUILTIN_OBJS += builtin/mv.o +BUILTIN_OBJS += builtin/name-rev.o +BUILTIN_OBJS += builtin/notes.o +BUILTIN_OBJS += builtin/pack-objects.o +ifndef WITH_BREAKING_CHANGES +BUILTIN_OBJS += builtin/pack-redundant.o +endif +BUILTIN_OBJS += builtin/pack-refs.o +BUILTIN_OBJS += builtin/patch-id.o +BUILTIN_OBJS += builtin/prune-packed.o +BUILTIN_OBJS += builtin/prune.o +BUILTIN_OBJS += builtin/pull.o +BUILTIN_OBJS += builtin/push.o +BUILTIN_OBJS += builtin/range-diff.o +BUILTIN_OBJS += builtin/read-tree.o +BUILTIN_OBJS += builtin/rebase.o +BUILTIN_OBJS += builtin/receive-pack.o +BUILTIN_OBJS += builtin/reflog.o +BUILTIN_OBJS += builtin/refs.o +BUILTIN_OBJS += builtin/remote-ext.o +BUILTIN_OBJS += builtin/remote-fd.o +BUILTIN_OBJS += builtin/remote.o +BUILTIN_OBJS += builtin/repack.o +BUILTIN_OBJS += builtin/replace.o +BUILTIN_OBJS += builtin/replay.o +BUILTIN_OBJS += builtin/repo.o +BUILTIN_OBJS += builtin/rerere.o +BUILTIN_OBJS += builtin/reset.o +BUILTIN_OBJS += builtin/rev-list.o +BUILTIN_OBJS += builtin/rev-parse.o +BUILTIN_OBJS += builtin/revert.o +BUILTIN_OBJS += builtin/rm.o +BUILTIN_OBJS += builtin/send-pack.o +BUILTIN_OBJS += builtin/shortlog.o +BUILTIN_OBJS += builtin/show-branch.o +BUILTIN_OBJS += builtin/show-index.o +BUILTIN_OBJS += builtin/show-ref.o +BUILTIN_OBJS += builtin/sparse-checkout.o +BUILTIN_OBJS += builtin/stash.o +BUILTIN_OBJS += builtin/stripspace.o +BUILTIN_OBJS += builtin/submodule--helper.o +BUILTIN_OBJS += builtin/symbolic-ref.o +BUILTIN_OBJS += builtin/tag.o +BUILTIN_OBJS += builtin/unpack-file.o +BUILTIN_OBJS += builtin/unpack-objects.o +BUILTIN_OBJS += builtin/update-index.o +BUILTIN_OBJS += builtin/update-ref.o +BUILTIN_OBJS += builtin/update-server-info.o +BUILTIN_OBJS += builtin/upload-archive.o +BUILTIN_OBJS += builtin/upload-pack.o +BUILTIN_OBJS += builtin/var.o +BUILTIN_OBJS += builtin/verify-commit.o +BUILTIN_OBJS += builtin/verify-pack.o +BUILTIN_OBJS += builtin/verify-tag.o +BUILTIN_OBJS += builtin/worktree.o +BUILTIN_OBJS += builtin/write-tree.o + +# THIRD_PARTY_SOURCES is a list of patterns compatible with the +# $(filter) and $(filter-out) family of functions. They specify source +# files which are taken from some third-party source where we want to be +# less strict about issues such as coding style so we don't diverge from +# upstream unnecessarily (making merging in future changes easier). +THIRD_PARTY_SOURCES += compat/inet_ntop.c +THIRD_PARTY_SOURCES += compat/inet_pton.c +THIRD_PARTY_SOURCES += compat/nedmalloc/% +THIRD_PARTY_SOURCES += compat/obstack.% +THIRD_PARTY_SOURCES += compat/poll/% +THIRD_PARTY_SOURCES += compat/regex/% +THIRD_PARTY_SOURCES += sha1collisiondetection/% +THIRD_PARTY_SOURCES += sha1dc/% +THIRD_PARTY_SOURCES += $(UNIT_TEST_DIR)/clar/% +THIRD_PARTY_SOURCES += $(UNIT_TEST_DIR)/clar/clar/% + +CLAR_TEST_SUITES += u-ctype +CLAR_TEST_SUITES += u-dir +CLAR_TEST_SUITES += u-example-decorate +CLAR_TEST_SUITES += u-hash +CLAR_TEST_SUITES += u-hashmap +CLAR_TEST_SUITES += u-mem-pool +CLAR_TEST_SUITES += u-oid-array +CLAR_TEST_SUITES += u-oidmap +CLAR_TEST_SUITES += u-oidtree +CLAR_TEST_SUITES += u-prio-queue +CLAR_TEST_SUITES += u-reftable-basics +CLAR_TEST_SUITES += u-reftable-block +CLAR_TEST_SUITES += u-reftable-merged +CLAR_TEST_SUITES += u-reftable-pq +CLAR_TEST_SUITES += u-reftable-readwrite +CLAR_TEST_SUITES += u-reftable-stack +CLAR_TEST_SUITES += u-reftable-table +CLAR_TEST_SUITES += u-reftable-tree +CLAR_TEST_SUITES += u-strbuf +CLAR_TEST_SUITES += u-strcmp-offset +CLAR_TEST_SUITES += u-string-list +CLAR_TEST_SUITES += u-strvec +CLAR_TEST_SUITES += u-trailer +CLAR_TEST_SUITES += u-urlmatch-normalization +CLAR_TEST_SUITES += u-utf8-width +CLAR_TEST_PROG = $(UNIT_TEST_BIN)/unit-tests$(X) +CLAR_TEST_OBJS = $(patsubst %,$(UNIT_TEST_DIR)/%.o,$(CLAR_TEST_SUITES)) +CLAR_TEST_OBJS += $(UNIT_TEST_DIR)/clar/clar.o +CLAR_TEST_OBJS += $(UNIT_TEST_DIR)/lib-oid.o +CLAR_TEST_OBJS += $(UNIT_TEST_DIR)/lib-reftable.o +CLAR_TEST_OBJS += $(UNIT_TEST_DIR)/unit-test.o + +UNIT_TEST_OBJS += $(UNIT_TEST_DIR)/test-lib.o + +RUST_SOURCES += src/lib.rs +RUST_SOURCES += src/varint.rs + +GIT-VERSION-FILE: FORCE + @OLD=$$(cat $@ 2>/dev/null || :) && \ + $(call version_gen,"$(shell pwd)",GIT-VERSION-FILE.in,$@) && \ + NEW=$$(cat $@ 2>/dev/null || :) && \ + if test "$$OLD" != "$$NEW"; then echo "$$NEW" >&2; fi + +# We need to set GIT_VERSION_OVERRIDE before including the version file as +# otherwise any user-provided value for GIT_VERSION would have been overridden +# already. +GIT_VERSION_OVERRIDE := $(GIT_VERSION) +-include GIT-VERSION-FILE + +# what 'all' will build and 'install' will install in gitexecdir, +# excluding programs for built-in commands +ALL_PROGRAMS = $(PROGRAMS) $(SCRIPTS) +ALL_COMMANDS_TO_INSTALL = $(ALL_PROGRAMS) +ifeq (,$(SKIP_DASHED_BUILT_INS)) +ALL_COMMANDS_TO_INSTALL += $(BUILT_INS) +else +# git-upload-pack, git-receive-pack and git-upload-archive are special: they +# are _expected_ to be present in the `bin/` directory in their dashed form. +ALL_COMMANDS_TO_INSTALL += git-receive-pack$(X) +ALL_COMMANDS_TO_INSTALL += git-upload-archive$(X) +ALL_COMMANDS_TO_INSTALL += git-upload-pack$(X) +endif + +ALL_CFLAGS = $(DEVELOPER_CFLAGS) $(CPPFLAGS) $(CFLAGS) $(CFLAGS_APPEND) +ALL_LDFLAGS = $(LDFLAGS) $(LDFLAGS_APPEND) + +ifdef WITH_RUST +BASIC_CFLAGS += -DWITH_RUST +GITLIBS += $(RUST_LIB) +ifeq ($(uname_S),Windows) +EXTLIBS += -luserenv +endif +endif + +ifdef SANITIZE +SANITIZERS := $(foreach flag,$(subst $(comma),$(space),$(SANITIZE)),$(flag)) +BASIC_CFLAGS += -fsanitize=$(SANITIZE) -fno-sanitize-recover=$(SANITIZE) +BASIC_CFLAGS += -fno-omit-frame-pointer +ifneq ($(filter undefined,$(SANITIZERS)),) +BASIC_CFLAGS += -DSHA1DC_FORCE_ALIGNED_ACCESS +endif +ifneq ($(filter leak,$(SANITIZERS)),) +BASIC_CFLAGS += -O0 +SANITIZE_LEAK = YesCompiledWithIt +endif +ifneq ($(filter address,$(SANITIZERS)),) +NO_REGEX = NeededForASAN +SANITIZE_ADDRESS = YesCompiledWithIt +endif +endif + +ifndef sysconfdir +ifeq ($(prefix),/usr) +sysconfdir = /etc +else +sysconfdir = etc +endif +endif + +ifndef COMPUTE_HEADER_DEPENDENCIES +COMPUTE_HEADER_DEPENDENCIES = auto +endif + +ifeq ($(COMPUTE_HEADER_DEPENDENCIES),auto) +dep_check = $(shell $(CC) $(ALL_CFLAGS) \ + -Wno-pedantic \ + -c -MF /dev/null -MQ /dev/null -MMD -MP \ + -x c /dev/null -o /dev/null 2>&1; \ + echo $$?) +ifeq ($(dep_check),0) +override COMPUTE_HEADER_DEPENDENCIES = yes +else +override COMPUTE_HEADER_DEPENDENCIES = no +endif +endif + +ifeq ($(COMPUTE_HEADER_DEPENDENCIES),yes) +USE_COMPUTED_HEADER_DEPENDENCIES = YesPlease +else +ifneq ($(COMPUTE_HEADER_DEPENDENCIES),no) +$(error please set COMPUTE_HEADER_DEPENDENCIES to yes, no, or auto \ +(not "$(COMPUTE_HEADER_DEPENDENCIES)")) +endif +endif + +ifndef GENERATE_COMPILATION_DATABASE +GENERATE_COMPILATION_DATABASE = no +endif + +ifeq ($(GENERATE_COMPILATION_DATABASE),yes) +compdb_check = $(shell $(CC) $(ALL_CFLAGS) \ + -Wno-pedantic \ + -c -MJ /dev/null \ + -x c /dev/null -o /dev/null 2>&1; \ + echo $$?) +ifneq ($(compdb_check),0) +override GENERATE_COMPILATION_DATABASE = no +$(warning GENERATE_COMPILATION_DATABASE is set to "yes", but your compiler does not \ +support generating compilation database entries) +endif +else +ifneq ($(GENERATE_COMPILATION_DATABASE),no) +$(error please set GENERATE_COMPILATION_DATABASE to "yes" or "no" \ +(not "$(GENERATE_COMPILATION_DATABASE)")) +endif +endif + +ifdef SANE_TOOL_PATH +SANE_TOOL_PATH_SQ = $(subst ','\'',$(SANE_TOOL_PATH)) +BROKEN_PATH_FIX = s|^\# @BROKEN_PATH_FIX@$$|git_broken_path_fix "$(SANE_TOOL_PATH_SQ)"| +PATH := $(SANE_TOOL_PATH):${PATH} +else +BROKEN_PATH_FIX = /^\# @BROKEN_PATH_FIX@$$/d +endif + +ifeq (,$(HOST_CPU)) + BASIC_CFLAGS += -DGIT_HOST_CPU="\"$(firstword $(subst -, ,$(uname_M)))\"" +else + BASIC_CFLAGS += -DGIT_HOST_CPU="\"$(HOST_CPU)\"" +endif + +ifneq (,$(INLINE)) + BASIC_CFLAGS += -Dinline=$(INLINE) +endif + +ifneq (,$(SOCKLEN_T)) + BASIC_CFLAGS += -Dsocklen_t=$(SOCKLEN_T) +endif + +ifeq ($(uname_S),Darwin) + ifndef NO_FINK + ifeq ($(shell test -d /sw/lib && echo y),y) + BASIC_CFLAGS += -I/sw/include + BASIC_LDFLAGS += -L/sw/lib + endif + endif + ifndef NO_DARWIN_PORTS + ifeq ($(shell test -d /opt/local/lib && echo y),y) + BASIC_CFLAGS += -I/opt/local/include + BASIC_LDFLAGS += -L/opt/local/lib + endif + endif + ifndef NO_APPLE_COMMON_CRYPTO + NO_OPENSSL = YesPlease + APPLE_COMMON_CRYPTO = YesPlease + COMPAT_CFLAGS += -DAPPLE_COMMON_CRYPTO + endif + PTHREAD_LIBS = +endif + +ifdef NO_LIBGEN_H + COMPAT_CFLAGS += -DNO_LIBGEN_H + COMPAT_OBJS += compat/basename.o +endif + +ifdef USE_LIBPCRE1 +$(error The USE_LIBPCRE1 build option has been removed, use version 2 with USE_LIBPCRE) +endif + +USE_LIBPCRE2 ?= $(USE_LIBPCRE) + +ifneq (,$(USE_LIBPCRE2)) + BASIC_CFLAGS += -DUSE_LIBPCRE2 + EXTLIBS += -lpcre2-8 +endif + +ifdef LIBPCREDIR + BASIC_CFLAGS += -I$(LIBPCREDIR)/include + EXTLIBS += $(call libpath_template,$(LIBPCREDIR)/$(lib)) +endif + +ifdef HAVE_ALLOCA_H + BASIC_CFLAGS += -DHAVE_ALLOCA_H +endif + +IMAP_SEND_BUILDDEPS = +IMAP_SEND_LDFLAGS = + +ifdef NO_CURL + BASIC_CFLAGS += -DNO_CURL + REMOTE_CURL_PRIMARY = + REMOTE_CURL_ALIASES = + REMOTE_CURL_NAMES = + EXCLUDED_PROGRAMS += git-http-fetch git-http-push +else + ifdef CURLDIR + # Try "-Wl,-rpath=$(CURLDIR)/$(lib)" in such a case. + CURL_CFLAGS = -I$(CURLDIR)/include + CURL_LIBCURL = $(call libpath_template,$(CURLDIR)/$(lib)) + else + CURL_CFLAGS = + CURL_LIBCURL = + endif + + ifndef CURL_LDFLAGS + CURL_LDFLAGS = $(eval CURL_LDFLAGS := $$(shell $$(CURL_CONFIG) --libs))$(CURL_LDFLAGS) + endif + CURL_LIBCURL += $(CURL_LDFLAGS) + + ifndef CURL_CFLAGS + CURL_CFLAGS = $(eval CURL_CFLAGS := $$(shell $$(CURL_CONFIG) --cflags))$(CURL_CFLAGS) + endif + BASIC_CFLAGS += $(CURL_CFLAGS) + + REMOTE_CURL_PRIMARY = git-remote-http$X + REMOTE_CURL_ALIASES = git-remote-https$X git-remote-ftp$X git-remote-ftps$X + REMOTE_CURL_NAMES = $(REMOTE_CURL_PRIMARY) $(REMOTE_CURL_ALIASES) + PROGRAM_OBJS += http-fetch.o + PROGRAMS += $(REMOTE_CURL_NAMES) + ifndef NO_EXPAT + PROGRAM_OBJS += http-push.o + endif + curl_check := $(shell (echo 072200; $(CURL_CONFIG) --vernum | sed -e '/^70[BC]/s/^/0/') 2>/dev/null | sort -r | sed -ne 2p) + ifeq "$(curl_check)" "072200" + USE_CURL_FOR_IMAP_SEND = YesPlease + endif + ifdef USE_CURL_FOR_IMAP_SEND + BASIC_CFLAGS += -DUSE_CURL_FOR_IMAP_SEND + IMAP_SEND_BUILDDEPS = http.o + IMAP_SEND_LDFLAGS += $(CURL_LIBCURL) + endif + ifndef NO_EXPAT + ifdef EXPATDIR + BASIC_CFLAGS += -I$(EXPATDIR)/include + EXPAT_LIBEXPAT = $(call libpath_template,$(EXPATDIR)/$(lib)) -lexpat + else + EXPAT_LIBEXPAT = -lexpat + endif + ifdef EXPAT_NEEDS_XMLPARSE_H + BASIC_CFLAGS += -DEXPAT_NEEDS_XMLPARSE_H + endif + endif +endif +IMAP_SEND_LDFLAGS += $(OPENSSL_LINK) $(OPENSSL_LIBSSL) $(LIB_4_CRYPTO) + +ifdef ZLIB_NG + BASIC_CFLAGS += -DHAVE_ZLIB_NG + ifdef ZLIB_NG_PATH + BASIC_CFLAGS += -I$(ZLIB_NG_PATH)/include + EXTLIBS += $(call libpath_template,$(ZLIB_NG_PATH)/$(lib)) + endif + EXTLIBS += -lz-ng +else + ifdef ZLIB_PATH + BASIC_CFLAGS += -I$(ZLIB_PATH)/include + EXTLIBS += $(call libpath_template,$(ZLIB_PATH)/$(lib)) + endif + EXTLIBS += -lz +endif + +ifndef NO_OPENSSL + OPENSSL_LIBSSL = -lssl + ifdef OPENSSLDIR + BASIC_CFLAGS += -I$(OPENSSLDIR)/include + OPENSSL_LINK = $(call libpath_template,$(OPENSSLDIR)/$(lib)) + else + OPENSSL_LINK = + endif + ifdef NEEDS_CRYPTO_WITH_SSL + OPENSSL_LIBSSL += -lcrypto + endif +else + BASIC_CFLAGS += -DNO_OPENSSL + OPENSSL_LIBSSL = +endif +ifdef NO_OPENSSL + LIB_4_CRYPTO = +else +ifdef NEEDS_SSL_WITH_CRYPTO + LIB_4_CRYPTO = $(OPENSSL_LINK) -lcrypto -lssl +else + LIB_4_CRYPTO = $(OPENSSL_LINK) -lcrypto +endif +ifdef APPLE_COMMON_CRYPTO + LIB_4_CRYPTO += -framework Security -framework CoreFoundation +endif +endif +ifndef NO_ICONV + ifdef NEEDS_LIBICONV + ifdef ICONVDIR + BASIC_CFLAGS += -I$(ICONVDIR)/include + ICONV_LINK = $(call libpath_template,$(ICONVDIR)/$(lib)) + else + ICONV_LINK = + endif + ifdef NEEDS_LIBINTL_BEFORE_LIBICONV + ICONV_LINK += -lintl + endif + EXTLIBS += $(ICONV_LINK) -liconv + endif +endif +ifdef ICONV_OMITS_BOM + BASIC_CFLAGS += -DICONV_OMITS_BOM +endif +ifdef NEEDS_LIBGEN + EXTLIBS += -lgen +endif +ifndef NO_GETTEXT +ifndef LIBC_CONTAINS_LIBINTL + EXTLIBS += -lintl +endif +endif +ifdef NEEDS_SOCKET + EXTLIBS += -lsocket +endif +ifdef NEEDS_NSL + EXTLIBS += -lnsl +endif +ifdef NEEDS_RESOLV + EXTLIBS += -lresolv +endif +ifdef NO_D_TYPE_IN_DIRENT + BASIC_CFLAGS += -DNO_D_TYPE_IN_DIRENT +endif +ifdef NO_GECOS_IN_PWENT + BASIC_CFLAGS += -DNO_GECOS_IN_PWENT +endif +ifdef NO_ST_BLOCKS_IN_STRUCT_STAT + BASIC_CFLAGS += -DNO_ST_BLOCKS_IN_STRUCT_STAT +endif +ifdef USE_NSEC + BASIC_CFLAGS += -DUSE_NSEC +endif +ifdef USE_ST_TIMESPEC + BASIC_CFLAGS += -DUSE_ST_TIMESPEC +endif +ifdef NO_NORETURN + BASIC_CFLAGS += -DNO_NORETURN +endif +ifdef NO_NSEC + BASIC_CFLAGS += -DNO_NSEC +endif +ifdef SNPRINTF_RETURNS_BOGUS + COMPAT_CFLAGS += -DSNPRINTF_RETURNS_BOGUS + COMPAT_OBJS += compat/snprintf.o +endif +ifdef FREAD_READS_DIRECTORIES + COMPAT_CFLAGS += -DFREAD_READS_DIRECTORIES + COMPAT_OBJS += compat/fopen.o +endif +ifdef OPEN_RETURNS_EINTR + COMPAT_CFLAGS += -DOPEN_RETURNS_EINTR +endif +ifdef NO_SYMLINK_HEAD + BASIC_CFLAGS += -DNO_SYMLINK_HEAD +endif +ifdef NO_GETTEXT + BASIC_CFLAGS += -DNO_GETTEXT + USE_GETTEXT_SCHEME ?= fallthrough +endif +ifdef NO_POLL + NO_POLL_H = YesPlease + NO_SYS_POLL_H = YesPlease + COMPAT_CFLAGS += -DNO_POLL -Icompat/poll + COMPAT_OBJS += compat/poll/poll.o +endif +ifdef NO_STRCASESTR + COMPAT_CFLAGS += -DNO_STRCASESTR + COMPAT_OBJS += compat/strcasestr.o +endif +ifdef NO_STRLCPY + COMPAT_CFLAGS += -DNO_STRLCPY + COMPAT_OBJS += compat/strlcpy.o +endif +ifdef NO_STRTOUMAX + COMPAT_CFLAGS += -DNO_STRTOUMAX + COMPAT_OBJS += compat/strtoumax.o compat/strtoimax.o +endif +ifdef NO_STRTOULL + COMPAT_CFLAGS += -DNO_STRTOULL +endif +ifdef NO_SETENV + COMPAT_CFLAGS += -DNO_SETENV + COMPAT_OBJS += compat/setenv.o +endif +ifdef NO_MKDTEMP + COMPAT_CFLAGS += -DNO_MKDTEMP + COMPAT_OBJS += compat/mkdtemp.o +endif +ifdef MKDIR_WO_TRAILING_SLASH + COMPAT_CFLAGS += -DMKDIR_WO_TRAILING_SLASH + COMPAT_OBJS += compat/mkdir.o +endif +ifdef NO_UNSETENV + COMPAT_CFLAGS += -DNO_UNSETENV + COMPAT_OBJS += compat/unsetenv.o +endif +ifdef NO_SYS_SELECT_H + BASIC_CFLAGS += -DNO_SYS_SELECT_H +endif +ifdef NO_POLL_H + BASIC_CFLAGS += -DNO_POLL_H +endif +ifdef NO_SYS_POLL_H + BASIC_CFLAGS += -DNO_SYS_POLL_H +endif +ifdef NEEDS_SYS_PARAM_H + BASIC_CFLAGS += -DNEEDS_SYS_PARAM_H +endif +ifdef NO_INTTYPES_H + BASIC_CFLAGS += -DNO_INTTYPES_H +endif +ifdef NO_INITGROUPS + BASIC_CFLAGS += -DNO_INITGROUPS +endif +ifdef NO_MMAP + COMPAT_CFLAGS += -DNO_MMAP + COMPAT_OBJS += compat/mmap.o +else + ifdef USE_WIN32_MMAP + COMPAT_CFLAGS += -DUSE_WIN32_MMAP + COMPAT_OBJS += compat/win32mmap.o + endif +endif +ifdef MMAP_PREVENTS_DELETE + BASIC_CFLAGS += -DMMAP_PREVENTS_DELETE +endif +ifdef OBJECT_CREATION_USES_RENAMES + COMPAT_CFLAGS += -DOBJECT_CREATION_MODE=1 +endif +ifdef NO_STRUCT_ITIMERVAL + COMPAT_CFLAGS += -DNO_STRUCT_ITIMERVAL + NO_SETITIMER = YesPlease +endif +ifdef NO_SETITIMER + COMPAT_CFLAGS += -DNO_SETITIMER +endif +ifdef NO_PREAD + COMPAT_CFLAGS += -DNO_PREAD + COMPAT_OBJS += compat/pread.o +endif +ifdef NO_FAST_WORKING_DIRECTORY + BASIC_CFLAGS += -DNO_FAST_WORKING_DIRECTORY +endif +ifdef NO_TRUSTABLE_FILEMODE + BASIC_CFLAGS += -DNO_TRUSTABLE_FILEMODE +endif +ifdef NEEDS_MODE_TRANSLATION + COMPAT_CFLAGS += -DNEEDS_MODE_TRANSLATION + COMPAT_OBJS += compat/stat.o +endif +ifdef NO_IPV6 + BASIC_CFLAGS += -DNO_IPV6 +endif +ifdef NO_INTPTR_T + COMPAT_CFLAGS += -DNO_INTPTR_T +endif +ifdef NO_SOCKADDR_STORAGE +ifdef NO_IPV6 + BASIC_CFLAGS += -Dsockaddr_storage=sockaddr_in +else + BASIC_CFLAGS += -Dsockaddr_storage=sockaddr_in6 +endif +endif +ifdef NO_INET_NTOP + LIB_OBJS += compat/inet_ntop.o + BASIC_CFLAGS += -DNO_INET_NTOP +endif +ifdef NO_INET_PTON + LIB_OBJS += compat/inet_pton.o + BASIC_CFLAGS += -DNO_INET_PTON +endif +ifdef NO_UNIX_SOCKETS + BASIC_CFLAGS += -DNO_UNIX_SOCKETS +else + LIB_OBJS += unix-socket.o + LIB_OBJS += unix-stream-server.o +endif + +# Simple IPC requires threads and platform-specific IPC support. +# Only platforms that have both should include these source files +# in the build. +# +# On Windows-based systems, Simple IPC requires threads and Windows +# Named Pipes. These are always available, so Simple IPC support +# is optional. +# +# On Unix-based systems, Simple IPC requires pthreads and Unix +# domain sockets. So support is only enabled when both are present. +# +ifdef USE_WIN32_IPC + BASIC_CFLAGS += -DSUPPORTS_SIMPLE_IPC + LIB_OBJS += compat/simple-ipc/ipc-shared.o + LIB_OBJS += compat/simple-ipc/ipc-win32.o +else +ifndef NO_PTHREADS +ifndef NO_UNIX_SOCKETS + BASIC_CFLAGS += -DSUPPORTS_SIMPLE_IPC + LIB_OBJS += compat/simple-ipc/ipc-shared.o + LIB_OBJS += compat/simple-ipc/ipc-unix-socket.o +endif +endif +endif + +ifdef NO_ICONV + BASIC_CFLAGS += -DNO_ICONV +endif + +ifdef OLD_ICONV + BASIC_CFLAGS += -DOLD_ICONV +endif + +ifdef NO_DEFLATE_BOUND + BASIC_CFLAGS += -DNO_DEFLATE_BOUND +endif + +ifdef NO_POSIX_GOODIES + BASIC_CFLAGS += -DNO_POSIX_GOODIES +endif + +ifdef APPLE_COMMON_CRYPTO_SHA1 + # Apple CommonCrypto requires chunking + SHA1_MAX_BLOCK_SIZE = 1024L*1024L*1024L +endif + +ifdef PPC_SHA1 +$(error the PPC_SHA1 flag has been removed along with the PowerPC-specific SHA-1 implementation.) +endif + +ifdef OPENSSL_SHA1 + EXTLIBS += $(LIB_4_CRYPTO) + BASIC_CFLAGS += -DSHA1_OPENSSL +else +ifdef BLK_SHA1 + LIB_OBJS += block-sha1/sha1.o + BASIC_CFLAGS += -DSHA1_BLK +else +ifdef APPLE_COMMON_CRYPTO_SHA1 + COMPAT_CFLAGS += -DCOMMON_DIGEST_FOR_OPENSSL + BASIC_CFLAGS += -DSHA1_APPLE +else + BASIC_CFLAGS += -DSHA1_DC + LIB_OBJS += sha1dc_git.o +ifdef DC_SHA1_EXTERNAL + ifdef DC_SHA1_SUBMODULE + ifneq ($(DC_SHA1_SUBMODULE),auto) +$(error Only set DC_SHA1_EXTERNAL or DC_SHA1_SUBMODULE, not both) + endif + endif + BASIC_CFLAGS += -DDC_SHA1_EXTERNAL + EXTLIBS += -lsha1detectcoll +else +ifdef DC_SHA1_SUBMODULE + LIB_OBJS += sha1collisiondetection/lib/sha1.o + LIB_OBJS += sha1collisiondetection/lib/ubc_check.o + BASIC_CFLAGS += -DDC_SHA1_SUBMODULE +else + LIB_OBJS += sha1dc/sha1.o + LIB_OBJS += sha1dc/ubc_check.o +endif + BASIC_CFLAGS += \ + -DSHA1DC_NO_STANDARD_INCLUDES \ + -DSHA1DC_INIT_SAFE_HASH_DEFAULT=0 \ + -DSHA1DC_CUSTOM_INCLUDE_SHA1_C="\"git-compat-util.h\"" \ + -DSHA1DC_CUSTOM_INCLUDE_UBC_CHECK_C="\"git-compat-util.h\"" +endif +endif +endif +endif + +ifdef OPENSSL_SHA1_UNSAFE +ifndef OPENSSL_SHA1 + EXTLIBS += $(LIB_4_CRYPTO) + BASIC_CFLAGS += -DSHA1_OPENSSL_UNSAFE +endif +else +ifdef BLK_SHA1_UNSAFE +ifndef BLK_SHA1 + LIB_OBJS += block-sha1/sha1.o + BASIC_CFLAGS += -DSHA1_BLK_UNSAFE +endif +else +ifdef APPLE_COMMON_CRYPTO_SHA1_UNSAFE +ifndef APPLE_COMMON_CRYPTO_SHA1 + COMPAT_CFLAGS += -DCOMMON_DIGEST_FOR_OPENSSL + BASIC_CFLAGS += -DSHA1_APPLE_UNSAFE +endif +endif +endif +endif + +ifdef OPENSSL_SHA256 + EXTLIBS += $(LIB_4_CRYPTO) + BASIC_CFLAGS += -DSHA256_OPENSSL +else +ifdef NETTLE_SHA256 + BASIC_CFLAGS += -DSHA256_NETTLE + EXTLIBS += -lnettle +else +ifdef GCRYPT_SHA256 + BASIC_CFLAGS += -DSHA256_GCRYPT + EXTLIBS += -lgcrypt +else + LIB_OBJS += sha256/block/sha256.o + BASIC_CFLAGS += -DSHA256_BLK +endif +endif +endif + +ifdef SHA1_MAX_BLOCK_SIZE + LIB_OBJS += compat/sha1-chunked.o + BASIC_CFLAGS += -DSHA1_MAX_BLOCK_SIZE="$(SHA1_MAX_BLOCK_SIZE)" +endif +ifdef NO_HSTRERROR + COMPAT_CFLAGS += -DNO_HSTRERROR + COMPAT_OBJS += compat/hstrerror.o +endif +ifdef NO_MEMMEM + COMPAT_CFLAGS += -DNO_MEMMEM + COMPAT_OBJS += compat/memmem.o +endif +ifdef NO_GETPAGESIZE + COMPAT_CFLAGS += -DNO_GETPAGESIZE +endif +ifdef INTERNAL_QSORT + COMPAT_CFLAGS += -DINTERNAL_QSORT +endif +ifdef HAVE_ISO_QSORT_S + COMPAT_CFLAGS += -DHAVE_ISO_QSORT_S +else + COMPAT_OBJS += compat/qsort_s.o +endif +ifdef RUNTIME_PREFIX + COMPAT_CFLAGS += -DRUNTIME_PREFIX +endif + +ifdef NO_PTHREADS + BASIC_CFLAGS += -DNO_PTHREADS +else + BASIC_CFLAGS += $(PTHREAD_CFLAGS) + EXTLIBS += $(PTHREAD_LIBS) +endif + +ifdef HAVE_PATHS_H + BASIC_CFLAGS += -DHAVE_PATHS_H +endif + +ifdef HAVE_LIBCHARSET_H + BASIC_CFLAGS += -DHAVE_LIBCHARSET_H + EXTLIBS += $(CHARSET_LIB) +endif + +ifdef HAVE_STRINGS_H + BASIC_CFLAGS += -DHAVE_STRINGS_H +endif + +ifdef HAVE_DEV_TTY + BASIC_CFLAGS += -DHAVE_DEV_TTY +endif + +ifdef DIR_HAS_BSD_GROUP_SEMANTICS + COMPAT_CFLAGS += -DDIR_HAS_BSD_GROUP_SEMANTICS +endif +ifdef UNRELIABLE_FSTAT + BASIC_CFLAGS += -DUNRELIABLE_FSTAT +endif +ifdef NO_REGEX + COMPAT_CFLAGS += -Icompat/regex + COMPAT_OBJS += compat/regex/regex.o +else +ifdef USE_ENHANCED_BASIC_REGULAR_EXPRESSIONS + COMPAT_CFLAGS += -DUSE_ENHANCED_BASIC_REGULAR_EXPRESSIONS + COMPAT_OBJS += compat/regcomp_enhanced.o +endif +endif +ifdef NATIVE_CRLF + BASIC_CFLAGS += -DNATIVE_CRLF +endif + +ifdef USE_NED_ALLOCATOR + COMPAT_CFLAGS += -Icompat/nedmalloc + COMPAT_OBJS += compat/nedmalloc/nedmalloc.o + OVERRIDE_STRDUP = YesPlease +endif + +ifdef OVERRIDE_STRDUP + COMPAT_CFLAGS += -DOVERRIDE_STRDUP + COMPAT_OBJS += compat/strdup.o +endif + +ifdef GIT_TEST_CMP_USE_COPIED_CONTEXT + export GIT_TEST_CMP_USE_COPIED_CONTEXT +endif + +ifndef NO_MSGFMT_EXTENDED_OPTIONS + MSGFMT += --check +endif + +ifdef HAVE_CLOCK_GETTIME + BASIC_CFLAGS += -DHAVE_CLOCK_GETTIME +endif + +ifdef HAVE_CLOCK_MONOTONIC + BASIC_CFLAGS += -DHAVE_CLOCK_MONOTONIC +endif + +ifdef HAVE_SYNC_FILE_RANGE + BASIC_CFLAGS += -DHAVE_SYNC_FILE_RANGE +endif + +ifdef HAVE_SYSINFO + BASIC_CFLAGS += -DHAVE_SYSINFO +endif + +ifdef HAVE_BSD_SYSCTL + BASIC_CFLAGS += -DHAVE_BSD_SYSCTL +endif + +ifdef HAVE_GETDELIM + BASIC_CFLAGS += -DHAVE_GETDELIM +endif + +ifneq ($(findstring arc4random,$(CSPRNG_METHOD)),) + BASIC_CFLAGS += -DHAVE_ARC4RANDOM +endif + +ifneq ($(findstring libbsd,$(CSPRNG_METHOD)),) + BASIC_CFLAGS += -DHAVE_ARC4RANDOM_LIBBSD + EXTLIBS += -lbsd +endif + +ifneq ($(findstring getrandom,$(CSPRNG_METHOD)),) + BASIC_CFLAGS += -DHAVE_GETRANDOM +endif + +ifneq ($(findstring getentropy,$(CSPRNG_METHOD)),) + BASIC_CFLAGS += -DHAVE_GETENTROPY +endif + +ifneq ($(findstring rtlgenrandom,$(CSPRNG_METHOD)),) + BASIC_CFLAGS += -DHAVE_RTLGENRANDOM +endif + +ifneq ($(findstring openssl,$(CSPRNG_METHOD)),) + BASIC_CFLAGS += -DHAVE_OPENSSL_CSPRNG + EXTLIBS += -lcrypto -lssl +endif + +ifndef HAVE_PLATFORM_PROCINFO + COMPAT_OBJS += compat/stub/procinfo.o +endif + +ifdef RUNTIME_PREFIX + + ifdef HAVE_BSD_KERN_PROC_SYSCTL + BASIC_CFLAGS += -DHAVE_BSD_KERN_PROC_SYSCTL + endif + + ifneq ($(PROCFS_EXECUTABLE_PATH),) + pep_SQ = $(subst ','\'',$(PROCFS_EXECUTABLE_PATH)) + BASIC_CFLAGS += '-DPROCFS_EXECUTABLE_PATH="$(pep_SQ)"' + endif + + ifdef HAVE_NS_GET_EXECUTABLE_PATH + BASIC_CFLAGS += -DHAVE_NS_GET_EXECUTABLE_PATH + endif + + ifdef HAVE_ZOS_GET_EXECUTABLE_PATH + BASIC_CFLAGS += -DHAVE_ZOS_GET_EXECUTABLE_PATH + endif + + ifdef HAVE_WPGMPTR + BASIC_CFLAGS += -DHAVE_WPGMPTR + endif + +endif + +ifdef FILENO_IS_A_MACRO + COMPAT_CFLAGS += -DFILENO_IS_A_MACRO + COMPAT_OBJS += compat/fileno.o +endif + +ifdef NEED_ACCESS_ROOT_HANDLER + COMPAT_CFLAGS += -DNEED_ACCESS_ROOT_HANDLER + COMPAT_OBJS += compat/access.o +endif + +ifdef FSMONITOR_DAEMON_BACKEND + COMPAT_CFLAGS += -DHAVE_FSMONITOR_DAEMON_BACKEND + COMPAT_OBJS += compat/fsmonitor/fsm-listen-$(FSMONITOR_DAEMON_BACKEND).o + COMPAT_OBJS += compat/fsmonitor/fsm-health-$(FSMONITOR_DAEMON_BACKEND).o + COMPAT_OBJS += compat/fsmonitor/fsm-ipc-$(FSMONITOR_DAEMON_BACKEND).o +endif + +ifdef FSMONITOR_OS_SETTINGS + COMPAT_CFLAGS += -DHAVE_FSMONITOR_OS_SETTINGS + COMPAT_OBJS += compat/fsmonitor/fsm-settings-$(FSMONITOR_OS_SETTINGS).o + COMPAT_OBJS += compat/fsmonitor/fsm-path-utils-$(FSMONITOR_OS_SETTINGS).o +endif + +ifdef WITH_BREAKING_CHANGES + BASIC_CFLAGS += -DWITH_BREAKING_CHANGES +endif + +ifdef CHECK_ASSERTION_SIDE_EFFECTS + BASIC_CFLAGS += -DCHECK_ASSERTION_SIDE_EFFECTS +endif + +ifdef INCLUDE_LIBGIT_RS + # Enable symbol hiding in contrib/libgit-sys/libgitpub.a without making + # us rebuild the whole tree every time we run a Rust build. + BASIC_CFLAGS += -fvisibility=hidden +endif + +ifeq ($(TCLTK_PATH),) +NO_TCLTK = NoThanks +endif + +ifeq ($(PERL_PATH),) +NO_PERL = NoThanks +endif + +ifeq ($(PYTHON_PATH),) +NO_PYTHON = NoThanks +endif + +ifndef PAGER_ENV +PAGER_ENV = LESS=FRX LV=-c +endif + +ifdef NO_INSTALL_HARDLINKS + export NO_INSTALL_HARDLINKS +endif + +### profile feedback build +# + +# Can adjust this to be a global directory if you want to do extended +# data gathering +PROFILE_DIR := $(CURDIR) + +ifeq ("$(PROFILE)","GEN") + BASIC_CFLAGS += -fprofile-generate=$(PROFILE_DIR) -DNO_NORETURN=1 + EXTLIBS += -lgcov + export CCACHE_DISABLE = t + V = 1 +else +ifneq ("$(PROFILE)","") + BASIC_CFLAGS += -fprofile-use=$(PROFILE_DIR) -fprofile-correction -DNO_NORETURN=1 + export CCACHE_DISABLE = t + V = 1 +endif +endif + +# Shell quote (do not use $(call) to accommodate ancient setups); + +ETC_GITCONFIG_SQ = $(subst ','\'',$(ETC_GITCONFIG)) +ETC_GITATTRIBUTES_SQ = $(subst ','\'',$(ETC_GITATTRIBUTES)) + +DESTDIR_SQ = $(subst ','\'',$(DESTDIR)) +NO_GETTEXT_SQ = $(subst ','\'',$(NO_GETTEXT)) +bindir_SQ = $(subst ','\'',$(bindir)) +bindir_relative_SQ = $(subst ','\'',$(bindir_relative)) +mandir_SQ = $(subst ','\'',$(mandir)) +mandir_relative_SQ = $(subst ','\'',$(mandir_relative)) +infodir_relative_SQ = $(subst ','\'',$(infodir_relative)) +bash_completion_dir_SQ = $(subst ','\'',$(bash_completion_dir)) +perllibdir_SQ = $(subst ','\'',$(perllibdir)) +localedir_SQ = $(subst ','\'',$(localedir)) +localedir_relative_SQ = $(subst ','\'',$(localedir_relative)) +gitexecdir_SQ = $(subst ','\'',$(gitexecdir)) +gitexecdir_relative_SQ = $(subst ','\'',$(gitexecdir_relative)) +template_dir_SQ = $(subst ','\'',$(template_dir)) +htmldir_relative_SQ = $(subst ','\'',$(htmldir_relative)) +prefix_SQ = $(subst ','\'',$(prefix)) +perllibdir_relative_SQ = $(subst ','\'',$(perllibdir_relative)) +gitwebdir_SQ = $(subst ','\'',$(gitwebdir)) +gitwebstaticdir_SQ = $(subst ','\'',$(gitwebstaticdir)) + +SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH)) +TEST_SHELL_PATH_SQ = $(subst ','\'',$(TEST_SHELL_PATH)) +PERL_PATH_SQ = $(subst ','\'',$(PERL_PATH)) +PYTHON_PATH_SQ = $(subst ','\'',$(PYTHON_PATH)) +TCLTK_PATH_SQ = $(subst ','\'',$(TCLTK_PATH)) +DIFF_SQ = $(subst ','\'',$(DIFF)) +PERLLIB_EXTRA_SQ = $(subst ','\'',$(PERLLIB_EXTRA)) + +# RUNTIME_PREFIX's resolution logic requires resource paths to be expressed +# relative to each other and share an installation path. +# +# This is a dependency in: +# - Git's binary RUNTIME_PREFIX logic in (see "exec_cmd.c"). +# - The runtime prefix Perl header (see +# "perl/header_templates/runtime_prefix.template.pl"). +ifdef RUNTIME_PREFIX + +ifneq ($(filter /%,$(firstword $(gitexecdir_relative))),) +$(error RUNTIME_PREFIX requires a relative gitexecdir, not: $(gitexecdir)) +endif + +ifneq ($(filter /%,$(firstword $(localedir_relative))),) +$(error RUNTIME_PREFIX requires a relative localedir, not: $(localedir)) +endif + +ifndef NO_PERL +ifneq ($(filter /%,$(firstword $(perllibdir_relative))),) +$(error RUNTIME_PREFIX requires a relative perllibdir, not: $(perllibdir)) +endif +endif + +endif + +# We must filter out any object files from $(GITLIBS), +# as it is typically used like: +# +# foo: foo.o $(GITLIBS) +# $(CC) $(filter %.o,$^) $(LIBS) +# +# where we use it as a dependency. Since we also pull object files +# from the dependency list, that would make each entry appear twice. +LIBS = $(filter-out %.o, $(GITLIBS)) $(EXTLIBS) + +BASIC_CFLAGS += $(COMPAT_CFLAGS) +LIB_OBJS += $(COMPAT_OBJS) + +# Quote for C + +ifdef DEFAULT_EDITOR +DEFAULT_EDITOR_CQ = "$(subst ",\",$(subst \,\\,$(DEFAULT_EDITOR)))" +DEFAULT_EDITOR_CQ_SQ = $(subst ','\'',$(DEFAULT_EDITOR_CQ)) + +BASIC_CFLAGS += -DDEFAULT_EDITOR='$(DEFAULT_EDITOR_CQ_SQ)' +endif + +ifdef DEFAULT_PAGER +DEFAULT_PAGER_CQ = "$(subst ",\",$(subst \,\\,$(DEFAULT_PAGER)))" +DEFAULT_PAGER_CQ_SQ = $(subst ','\'',$(DEFAULT_PAGER_CQ)) + +BASIC_CFLAGS += -DDEFAULT_PAGER='$(DEFAULT_PAGER_CQ_SQ)' +endif + +ifdef SHELL_PATH +SHELL_PATH_CQ = "$(subst ",\",$(subst \,\\,$(SHELL_PATH)))" +SHELL_PATH_CQ_SQ = $(subst ','\'',$(SHELL_PATH_CQ)) + +BASIC_CFLAGS += -DSHELL_PATH='$(SHELL_PATH_CQ_SQ)' +endif + +GIT_USER_AGENT_SQ = $(subst ','\'',$(GIT_USER_AGENT)) +GIT_USER_AGENT_CQ = "$(subst ",\",$(subst \,\\,$(GIT_USER_AGENT)))" +GIT_USER_AGENT_CQ_SQ = $(subst ','\'',$(GIT_USER_AGENT_CQ)) +GIT-USER-AGENT: FORCE + @if test x'$(GIT_USER_AGENT_SQ)' != x"`cat GIT-USER-AGENT 2>/dev/null`"; then \ + echo '$(GIT_USER_AGENT_SQ)' >GIT-USER-AGENT; \ + fi + +ifdef DEFAULT_HELP_FORMAT +BASIC_CFLAGS += -DDEFAULT_HELP_FORMAT='"$(DEFAULT_HELP_FORMAT)"' +endif + +ALL_CFLAGS += $(BASIC_CFLAGS) +ALL_LDFLAGS += $(BASIC_LDFLAGS) + +export DIFF TAR INSTALL DESTDIR SHELL_PATH + + +### Build rules + +SHELL = $(SHELL_PATH) + +all:: shell_compatibility_test + +ifeq "$(PROFILE)" "BUILD" +all:: profile +endif + +profile:: profile-clean + $(MAKE) PROFILE=GEN all + $(MAKE) PROFILE=GEN -j1 test + @if test -n "$$GIT_PERF_REPO" || test -d .git; then \ + $(MAKE) PROFILE=GEN -j1 perf; \ + else \ + echo "Skipping profile of perf tests..."; \ + fi + $(MAKE) PROFILE=USE all + +profile-fast: profile-clean + $(MAKE) PROFILE=GEN all + $(MAKE) PROFILE=GEN -j1 perf + $(MAKE) PROFILE=USE all + + +all:: $(ALL_COMMANDS_TO_INSTALL) $(SCRIPT_LIB) $(OTHER_PROGRAMS) GIT-BUILD-OPTIONS +ifneq (,$X) + $(QUIET_BUILT_IN)$(foreach p,$(patsubst %$X,%,$(filter %$X,$(ALL_COMMANDS_TO_INSTALL) $(OTHER_PROGRAMS))), if test ! -d '$p' && test ! '$p' -ef '$p$X'; then $(RM) '$p'; fi;) +endif + +all:: +ifndef NO_TCLTK + $(QUIET_SUBDIR0)git-gui $(QUIET_SUBDIR1) gitexecdir='$(gitexec_instdir_SQ)' all + $(QUIET_SUBDIR0)gitk-git $(QUIET_SUBDIR1) all +endif + $(QUIET_SUBDIR0)templates $(QUIET_SUBDIR1) SHELL_PATH='$(SHELL_PATH_SQ)' PERL_PATH='$(PERL_PATH_SQ)' + +# If you add a new fuzzer, please also make sure to run it in +# ci/run-build-and-minimal-fuzzers.sh so that we make sure it still links and +# runs in the future. +FUZZ_OBJS += oss-fuzz/dummy-cmd-main.o +FUZZ_OBJS += oss-fuzz/fuzz-commit-graph.o +FUZZ_OBJS += oss-fuzz/fuzz-config.o +FUZZ_OBJS += oss-fuzz/fuzz-credential-from-url-gently.o +FUZZ_OBJS += oss-fuzz/fuzz-date.o +FUZZ_OBJS += oss-fuzz/fuzz-pack-headers.o +FUZZ_OBJS += oss-fuzz/fuzz-pack-idx.o +FUZZ_OBJS += oss-fuzz/fuzz-parse-attr-line.o +FUZZ_OBJS += oss-fuzz/fuzz-url-decode-mem.o +.PHONY: fuzz-objs +fuzz-objs: $(FUZZ_OBJS) + +# Always build fuzz objects even if not testing, to prevent bit-rot. +all:: $(FUZZ_OBJS) + +FUZZ_PROGRAMS += $(patsubst %.o,%,$(filter-out %dummy-cmd-main.o,$(FUZZ_OBJS))) + +# Build fuzz programs when possible, even without the necessary fuzzing support, +# to prevent bit-rot. +ifdef LINK_FUZZ_PROGRAMS +all:: $(FUZZ_PROGRAMS) +endif + +please_set_SHELL_PATH_to_a_more_modern_shell: + @$$(:) + +shell_compatibility_test: please_set_SHELL_PATH_to_a_more_modern_shell + +strip: $(PROGRAMS) git$X + $(STRIP) $(STRIP_OPTS) $^ + +### Target-specific flags and dependencies + +# The generic compilation pattern rule and automatically +# computed header dependencies (falling back to a dependency on +# LIB_H) are enough to describe how most targets should be built, +# but some targets are special enough to need something a little +# different. +# +# - When a source file "foo.c" #includes a generated header file, +# we need to list that dependency for the "foo.o" target. +# +# We also list it from other targets that are built from foo.c +# like "foo.sp" and "foo.s", even though that is easy to forget +# to do because the generated header is already present around +# after a regular build attempt. +# +# - Some code depends on configuration kept in makefile +# variables. The target-specific variable EXTRA_CPPFLAGS can +# be used to convey that information to the C preprocessor +# using -D options. +# +# The "foo.o" target should have a corresponding dependency on +# a file that changes when the value of the makefile variable +# changes. For example, targets making use of the +# $(GIT_VERSION) variable depend on GIT-VERSION-FILE. +# +# Technically the ".sp" and ".s" targets do not need this +# dependency because they are force-built, but they get the +# same dependency for consistency. This way, you do not have to +# know how each target is implemented. And it means the +# dependencies here will not need to change if the force-build +# details change some day. + +git.sp git.s git.o: GIT-PREFIX +git.sp git.s git.o: EXTRA_CPPFLAGS = \ + '-DGIT_HTML_PATH="$(htmldir_relative_SQ)"' \ + '-DGIT_MAN_PATH="$(mandir_relative_SQ)"' \ + '-DGIT_INFO_PATH="$(infodir_relative_SQ)"' + +git$X: git.o GIT-LDFLAGS $(BUILTIN_OBJS) $(GITLIBS) + $(QUIET_LINK)$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) \ + $(filter %.o,$^) $(LIBS) + +help.sp help.s help.o: command-list.h +builtin/bugreport.sp builtin/bugreport.s builtin/bugreport.o: hook-list.h + +builtin/help.sp builtin/help.s builtin/help.o: config-list.h GIT-PREFIX +builtin/help.sp builtin/help.s builtin/help.o: EXTRA_CPPFLAGS = \ + '-DGIT_HTML_PATH="$(htmldir_relative_SQ)"' \ + '-DGIT_MAN_PATH="$(mandir_relative_SQ)"' \ + '-DGIT_INFO_PATH="$(infodir_relative_SQ)"' + +PAGER_ENV_SQ = $(subst ','\'',$(PAGER_ENV)) +PAGER_ENV_CQ = "$(subst ",\",$(subst \,\\,$(PAGER_ENV)))" +PAGER_ENV_CQ_SQ = $(subst ','\'',$(PAGER_ENV_CQ)) +pager.sp pager.s pager.o: EXTRA_CPPFLAGS = \ + -DPAGER_ENV='$(PAGER_ENV_CQ_SQ)' + +version-def.h: version-def.h.in GIT-VERSION-GEN GIT-VERSION-FILE GIT-USER-AGENT + $(QUIET_GEN)$(call version_gen,"$(shell pwd)",$<,$@) + +version.sp version.s version.o: version-def.h + +$(BUILT_INS): git$X + $(QUIET_BUILT_IN)$(RM) $@ && \ + ln $< $@ 2>/dev/null || \ + ln -s $< $@ 2>/dev/null || \ + cp $< $@ + +config-list.h: generate-configlist.sh + +config-list.h: Documentation/*config.adoc Documentation/config/*.adoc + $(QUIET_GEN)$(SHELL_PATH) ./generate-configlist.sh . $@ + +command-list.h: generate-cmdlist.sh command-list.txt + +command-list.h: $(wildcard Documentation/git*.adoc) + $(QUIET_GEN)$(SHELL_PATH) ./generate-cmdlist.sh \ + $(patsubst %,--exclude-program %,$(EXCLUDED_PROGRAMS)) \ + . $@ + +hook-list.h: generate-hooklist.sh Documentation/githooks.adoc + $(QUIET_GEN)$(SHELL_PATH) ./generate-hooklist.sh . $@ + +SCRIPT_DEFINES = $(SHELL_PATH_SQ):$(DIFF_SQ):\ + $(localedir_SQ):$(USE_GETTEXT_SCHEME):$(SANE_TOOL_PATH_SQ):\ + $(gitwebdir_SQ):$(PERL_PATH_SQ):$(PAGER_ENV):\ + $(perllibdir_SQ) +GIT-SCRIPT-DEFINES: FORCE + @FLAGS='$(SCRIPT_DEFINES)'; \ + if test x"$$FLAGS" != x"`cat $@ 2>/dev/null`" ; then \ + echo >&2 " * new script parameters"; \ + echo "$$FLAGS" >$@; \ + fi + +$(SCRIPT_SH_GEN) $(SCRIPT_LIB) : % : %.sh generate-script.sh GIT-BUILD-OPTIONS GIT-SCRIPT-DEFINES + $(QUIET_GEN)./generate-script.sh "$<" "$@+" ./GIT-BUILD-OPTIONS && \ + mv $@+ $@ + +git.rc: git.rc.in GIT-VERSION-GEN GIT-VERSION-FILE + $(QUIET_GEN)$(call version_gen,"$(shell pwd)",$<,$@) + +git.res: git.rc GIT-PREFIX + $(QUIET_RC)$(RC) -i $< -o $@ + +# This makes sure we depend on the NO_PERL setting itself. +$(SCRIPT_PERL_GEN): GIT-BUILD-OPTIONS + +# Used for substitution in Perl modules. Disabled when using RUNTIME_PREFIX +# since the locale directory is injected. +perl_localedir_SQ = $(localedir_SQ) + +ifndef NO_PERL +PERL_HEADER_TEMPLATE = perl/header_templates/fixed_prefix.template.pl +PERL_DEFINES = +PERL_DEFINES += $(PERL_PATH_SQ) +PERL_DEFINES += $(PERLLIB_EXTRA_SQ) +PERL_DEFINES += $(perllibdir_SQ) +PERL_DEFINES += $(RUNTIME_PREFIX) +PERL_DEFINES += $(NO_PERL_CPAN_FALLBACKS) +PERL_DEFINES += $(NO_GETTEXT) + +# Support Perl runtime prefix. In this mode, a different header is installed +# into Perl scripts. +ifdef RUNTIME_PREFIX + +PERL_HEADER_TEMPLATE = perl/header_templates/runtime_prefix.template.pl + +# Don't export a fixed $(localedir) path; it will be resolved by the Perl header +# at runtime. +perl_localedir_SQ = + +endif + +PERL_DEFINES += $(gitexecdir) $(perllibdir) $(localedir) + +$(SCRIPT_PERL_GEN): % : %.perl generate-perl.sh GIT-PERL-DEFINES GIT-PERL-HEADER GIT-VERSION-FILE + $(QUIET_GEN)$(SHELL_PATH) generate-perl.sh ./GIT-BUILD-OPTIONS ./GIT-VERSION-FILE GIT-PERL-HEADER "$<" "$@+" && \ + mv $@+ $@ + +PERL_DEFINES := $(subst $(space),:,$(PERL_DEFINES)) +GIT-PERL-DEFINES: FORCE + @FLAGS='$(PERL_DEFINES)'; \ + if test x"$$FLAGS" != x"`cat $@ 2>/dev/null`" ; then \ + echo >&2 " * new perl-specific parameters"; \ + echo "$$FLAGS" >$@; \ + fi + +GIT-PERL-HEADER: $(PERL_HEADER_TEMPLATE) GIT-PERL-DEFINES Makefile + $(QUIET_GEN) \ + INSTLIBDIR='$(perllibdir_SQ)' && \ + INSTLIBDIR_EXTRA='$(PERLLIB_EXTRA_SQ)' && \ + INSTLIBDIR="$$INSTLIBDIR$${INSTLIBDIR_EXTRA:+:$$INSTLIBDIR_EXTRA}" && \ + sed -e 's=@PATHSEP@=$(pathsep)=g' \ + -e "s=@INSTLIBDIR@=$$INSTLIBDIR=g" \ + -e 's=@PERLLIBDIR_REL@=$(perllibdir_relative_SQ)=g' \ + -e 's=@GITEXECDIR_REL@=$(gitexecdir_relative_SQ)=g' \ + -e 's=@LOCALEDIR_REL@=$(localedir_relative_SQ)=g' \ + $< >$@+ && \ + mv $@+ $@ + +.PHONY: perllibdir +perllibdir: + @echo '$(perllibdir_SQ)' + +git-instaweb: git-instaweb.sh generate-script.sh GIT-BUILD-OPTIONS GIT-SCRIPT-DEFINES + $(QUIET_GEN)./generate-script.sh "$<" "$@+" ./GIT-BUILD-OPTIONS && \ + chmod +x $@+ && \ + mv $@+ $@ +else # NO_PERL +$(SCRIPT_PERL_GEN) git-instaweb: % : unimplemented.sh + $(QUIET_GEN) \ + sed -e '1s|#!.*/sh|#!$(SHELL_PATH_SQ)|' \ + -e 's|@REASON@|NO_PERL=$(NO_PERL)|g' \ + unimplemented.sh >$@+ && \ + chmod +x $@+ && \ + mv $@+ $@ +endif # NO_PERL + +# This makes sure we depend on the NO_PYTHON setting itself. +$(SCRIPT_PYTHON_GEN): GIT-BUILD-OPTIONS + +ifndef NO_PYTHON +$(SCRIPT_PYTHON_GEN): generate-python.sh +$(SCRIPT_PYTHON_GEN): % : %.py + $(QUIET_GEN)$(SHELL_PATH) generate-python.sh ./GIT-BUILD-OPTIONS "$<" "$@" +else # NO_PYTHON +$(SCRIPT_PYTHON_GEN): % : unimplemented.sh + $(QUIET_GEN) \ + sed -e '1s|#!.*/sh|#!$(SHELL_PATH_SQ)|' \ + -e 's|@REASON@|NO_PYTHON=$(NO_PYTHON)|g' \ + unimplemented.sh >$@+ && \ + chmod +x $@+ && \ + mv $@+ $@ +endif # NO_PYTHON + +CONFIGURE_RECIPE = sed -e 's/@GIT_VERSION@/$(GIT_VERSION)/g' \ + configure.ac >configure.ac+ && \ + autoconf -o configure configure.ac+ && \ + $(RM) configure.ac+ + +configure: configure.ac GIT-VERSION-FILE + $(QUIET_GEN)$(CONFIGURE_RECIPE) + +ifdef AUTOCONFIGURED +# We avoid depending on 'configure' here, because it gets rebuilt +# every time GIT-VERSION-FILE is modified, only to update the embedded +# version number string, which config.status does not care about. We +# do want to recheck when the platform/environment detection logic +# changes, hence this depends on configure.ac. +config.status: configure.ac + $(QUIET_GEN)$(CONFIGURE_RECIPE) && \ + if test -f config.status; then \ + ./config.status --recheck; \ + else \ + ./configure; \ + fi +reconfigure config.mak.autogen: config.status + $(QUIET_GEN)./config.status +.PHONY: reconfigure # This is a convenience target. +endif + +TEST_OBJS := $(patsubst %$X,%.o,$(TEST_PROGRAMS)) $(patsubst %,t/helper/%,$(TEST_BUILTINS_OBJS)) + +.PHONY: test-objs +test-objs: $(TEST_OBJS) + +GIT_OBJS += $(LIB_OBJS) +GIT_OBJS += $(BUILTIN_OBJS) +GIT_OBJS += common-main.o +GIT_OBJS += git.o +.PHONY: git-objs +git-objs: $(GIT_OBJS) + +SCALAR_OBJS += scalar.o +.PHONY: scalar-objs +scalar-objs: $(SCALAR_OBJS) + +OBJECTS += $(GIT_OBJS) +OBJECTS += $(SCALAR_OBJS) +OBJECTS += $(PROGRAM_OBJS) +OBJECTS += $(TEST_OBJS) +OBJECTS += $(FUZZ_OBJS) +OBJECTS += $(REFTABLE_TEST_OBJS) +OBJECTS += $(UNIT_TEST_OBJS) +OBJECTS += $(CLAR_TEST_OBJS) +OBJECTS += $(patsubst %,$(UNIT_TEST_DIR)/%.o,$(UNIT_TEST_PROGRAMS)) + +ifdef INCLUDE_LIBGIT_RS + OBJECTS += contrib/libgit-sys/public_symbol_export.o +endif + +ifndef NO_CURL + OBJECTS += http.o http-walker.o remote-curl.o +endif + +.PHONY: objects +objects: $(OBJECTS) + +dep_files := $(foreach f,$(OBJECTS),$(dir $f).depend/$(notdir $f).d) +dep_dirs := $(addsuffix .depend,$(sort $(dir $(OBJECTS)))) + +ifeq ($(COMPUTE_HEADER_DEPENDENCIES),yes) +$(dep_dirs): + @mkdir -p $@ + +missing_dep_dirs := $(filter-out $(wildcard $(dep_dirs)),$(dep_dirs)) +dep_file = $(dir $@).depend/$(notdir $@).d +dep_args = -MF $(dep_file) -MQ $@ -MMD -MP +endif + +ifneq ($(COMPUTE_HEADER_DEPENDENCIES),yes) +missing_dep_dirs = +dep_args = +endif + +compdb_dir = compile_commands + +ifeq ($(GENERATE_COMPILATION_DATABASE),yes) +missing_compdb_dir = $(filter-out $(wildcard $(compdb_dir)), $(compdb_dir)) +$(missing_compdb_dir): + @mkdir -p $@ + +compdb_file = $(compdb_dir)/$(subst /,-,$@.json) +compdb_args = -MJ $(compdb_file) +else +missing_compdb_dir = +compdb_args = +endif + +$(OBJECTS): %.o: %.c GIT-CFLAGS $(missing_dep_dirs) $(missing_compdb_dir) + $(QUIET_CC)$(CC) -o $*.o -c $(dep_args) $(compdb_args) $(ALL_CFLAGS) $(EXTRA_CPPFLAGS) $< + +%.s: %.c GIT-CFLAGS FORCE + $(QUIET_CC)$(CC) -o $@ -S $(ALL_CFLAGS) $(EXTRA_CPPFLAGS) $< + +ifdef USE_COMPUTED_HEADER_DEPENDENCIES +# Take advantage of gcc's on-the-fly dependency generation +# See . +dep_files_present := $(wildcard $(dep_files)) +ifneq ($(dep_files_present),) +include $(dep_files_present) +endif +else +$(OBJECTS): $(LIB_H) $(GENERATED_H) +endif + +ifeq ($(GENERATE_COMPILATION_DATABASE),yes) +all:: compile_commands.json +compile_commands.json: + $(QUIET_GEN)sed -e '1s/^/[/' -e '$$s/,$$/]/' $(compdb_dir)/*.o.json > $@+ + @if test -s $@+; then mv $@+ $@; else $(RM) $@+; fi +endif + +exec-cmd.sp exec-cmd.s exec-cmd.o: GIT-PREFIX +exec-cmd.sp exec-cmd.s exec-cmd.o: EXTRA_CPPFLAGS = \ + '-DGIT_EXEC_PATH="$(gitexecdir_SQ)"' \ + '-DGIT_LOCALE_PATH="$(localedir_relative_SQ)"' \ + '-DBINDIR="$(bindir_relative_SQ)"' \ + '-DFALLBACK_RUNTIME_PREFIX="$(prefix_SQ)"' + +setup.sp setup.s setup.o: GIT-PREFIX +setup.sp setup.s setup.o: EXTRA_CPPFLAGS = \ + -DDEFAULT_GIT_TEMPLATE_DIR='"$(template_dir_SQ)"' + +config.sp config.s config.o: GIT-PREFIX +config.sp config.s config.o: EXTRA_CPPFLAGS = \ + -DETC_GITCONFIG='"$(ETC_GITCONFIG_SQ)"' + +attr.sp attr.s attr.o: GIT-PREFIX +attr.sp attr.s attr.o: EXTRA_CPPFLAGS = \ + -DETC_GITATTRIBUTES='"$(ETC_GITATTRIBUTES_SQ)"' + +gettext.sp gettext.s gettext.o: GIT-PREFIX +gettext.sp gettext.s gettext.o: EXTRA_CPPFLAGS = \ + -DGIT_LOCALE_PATH='"$(localedir_relative_SQ)"' + +http-push.sp http.sp http-walker.sp remote-curl.sp imap-send.sp: SP_EXTRA_FLAGS += \ + -DCURL_DISABLE_TYPECHECK + +pack-revindex.sp: SP_EXTRA_FLAGS += -Wno-memcpy-max-count + +ifdef NO_EXPAT +http-walker.sp http-walker.s http-walker.o: EXTRA_CPPFLAGS = -DNO_EXPAT +endif + +ifdef NO_REGEX +compat/regex/regex.sp compat/regex/regex.o: EXTRA_CPPFLAGS = \ + -DGAWK -DNO_MBSUPPORT +endif + +ifdef USE_NED_ALLOCATOR +compat/nedmalloc/nedmalloc.sp compat/nedmalloc/nedmalloc.o: EXTRA_CPPFLAGS = \ + -DNDEBUG -DREPLACE_SYSTEM_ALLOCATOR +compat/nedmalloc/nedmalloc.sp: SP_EXTRA_FLAGS += -Wno-non-pointer-null +endif + +headless-git.o: compat/win32/headless.c GIT-CFLAGS + $(QUIET_CC)$(CC) $(ALL_CFLAGS) $(COMPAT_CFLAGS) \ + -fno-stack-protector -o $@ -c -Wall -Wwrite-strings $< + +headless-git$X: headless-git.o git.res GIT-LDFLAGS + $(QUIET_LINK)$(CC) $(ALL_CFLAGS) $(ALL_LDFLAGS) -mwindows -o $@ $< git.res + +git-%$X: %.o GIT-LDFLAGS $(GITLIBS) + $(QUIET_LINK)$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) $(LIBS) + +git-imap-send$X: imap-send.o $(IMAP_SEND_BUILDDEPS) GIT-LDFLAGS $(GITLIBS) + $(QUIET_LINK)$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) \ + $(IMAP_SEND_LDFLAGS) $(LIBS) + +git-http-fetch$X: http.o http-walker.o http-fetch.o GIT-LDFLAGS $(GITLIBS) + $(QUIET_LINK)$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) \ + $(CURL_LIBCURL) $(LIBS) +git-http-push$X: http.o http-push.o GIT-LDFLAGS $(GITLIBS) + $(QUIET_LINK)$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) \ + $(CURL_LIBCURL) $(EXPAT_LIBEXPAT) $(LIBS) + +$(REMOTE_CURL_ALIASES): $(REMOTE_CURL_PRIMARY) + $(QUIET_LNCP)$(RM) $@ && \ + ln $< $@ 2>/dev/null || \ + ln -s $< $@ 2>/dev/null || \ + cp $< $@ + +$(REMOTE_CURL_PRIMARY): remote-curl.o http.o http-walker.o GIT-LDFLAGS $(GITLIBS) + $(QUIET_LINK)$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) \ + $(CURL_LIBCURL) $(EXPAT_LIBEXPAT) $(LIBS) + +scalar$X: scalar.o GIT-LDFLAGS $(GITLIBS) + $(QUIET_LINK)$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) \ + $(filter %.o,$^) $(LIBS) + +$(LIB_FILE): $(LIB_OBJS) + $(QUIET_AR)$(RM) $@ && $(AR) $(ARFLAGS) $@ $^ + +$(RUST_LIB): Cargo.toml $(RUST_SOURCES) + $(QUIET_CARGO)cargo build $(CARGO_ARGS) + +.PHONY: rust +rust: $(RUST_LIB) + +export DEFAULT_EDITOR DEFAULT_PAGER + +Documentation/GIT-EXCLUDED-PROGRAMS: FORCE + @EXCLUDED='EXCLUDED_PROGRAMS := $(EXCLUDED_PROGRAMS)'; \ + if test x"$$EXCLUDED" != \ + x"`cat Documentation/GIT-EXCLUDED-PROGRAMS 2>/dev/null`" ; then \ + echo >&2 " * new documentation flags"; \ + echo "$$EXCLUDED" >Documentation/GIT-EXCLUDED-PROGRAMS; \ + fi + +.PHONY: doc man man-perl html info pdf +doc: man-perl + $(MAKE) -C Documentation all + +man: man-perl + $(MAKE) -C Documentation man + +man-perl: perl/build/man/man3/Git.3pm + +html: + $(MAKE) -C Documentation html + +info: + $(MAKE) -C Documentation info + +pdf: + $(MAKE) -C Documentation pdf + +XGETTEXT_FLAGS = \ + --force-po \ + --add-comments=TRANSLATORS: \ + --msgid-bugs-address="Git Mailing List " \ + --package-name=Git +XGETTEXT_FLAGS_C = $(XGETTEXT_FLAGS) --language=C \ + --keyword=_ --keyword=N_ --keyword="Q_:1,2" +XGETTEXT_FLAGS_SH = $(XGETTEXT_FLAGS) --language=Shell \ + --keyword=gettextln --keyword=eval_gettextln +XGETTEXT_FLAGS_PERL = $(XGETTEXT_FLAGS) --language=Perl \ + --keyword=__ --keyword=N__ --keyword="__n:1,2" +MSGMERGE_FLAGS = --add-location --backup=off --update +LOCALIZED_C = $(sort $(FOUND_C_SOURCES) $(FOUND_H_SOURCES) $(GENERATED_H)) +LOCALIZED_SH = $(sort $(SCRIPT_SH) git-sh-setup.sh) +LOCALIZED_PERL = $(sort $(SCRIPT_PERL)) + +ifdef XGETTEXT_INCLUDE_TESTS +LOCALIZED_C += t/t0200/test.c +LOCALIZED_SH += t/t0200/test.sh +LOCALIZED_PERL += t/t0200/test.perl +endif + +## We generate intermediate .build/pot/po/%.po files containing a +## extract of the translations we find in each file in the source +## tree. We will assemble them using msgcat to create the final +## "po/git.pot" file. +LOCALIZED_ALL_GEN_PO = + +LOCALIZED_C_GEN_PO = $(LOCALIZED_C:%=.build/pot/po/%.po) +LOCALIZED_ALL_GEN_PO += $(LOCALIZED_C_GEN_PO) + +LOCALIZED_SH_GEN_PO = $(LOCALIZED_SH:%=.build/pot/po/%.po) +LOCALIZED_ALL_GEN_PO += $(LOCALIZED_SH_GEN_PO) + +LOCALIZED_PERL_GEN_PO = $(LOCALIZED_PERL:%=.build/pot/po/%.po) +LOCALIZED_ALL_GEN_PO += $(LOCALIZED_PERL_GEN_PO) + +## Gettext tools cannot work with our own custom PRItime type, so +## we replace PRItime with PRIuMAX. We need to update this to +## PRIdMAX if we switch to a signed type later. +$(LOCALIZED_C_GEN_PO): .build/pot/po/%.po: % + $(call mkdir_p_parent_template) + $(QUIET_XGETTEXT) \ + if grep -q PRItime $<; then \ + (\ + sed -e 's|PRItime|PRIuMAX|g' <$< \ + >.build/pot/po/$< && \ + cd .build/pot/po && \ + $(XGETTEXT) --omit-header \ + -o $(@:.build/pot/po/%=%) \ + $(XGETTEXT_FLAGS_C) $< && \ + rm $<; \ + ); \ + else \ + $(XGETTEXT) --omit-header \ + -o $@ $(XGETTEXT_FLAGS_C) $<; \ + fi + +$(LOCALIZED_SH_GEN_PO): .build/pot/po/%.po: % + $(call mkdir_p_parent_template) + $(QUIET_XGETTEXT)$(XGETTEXT) --omit-header \ + -o$@ $(XGETTEXT_FLAGS_SH) $< + +$(LOCALIZED_PERL_GEN_PO): .build/pot/po/%.po: % + $(call mkdir_p_parent_template) + $(QUIET_XGETTEXT)$(XGETTEXT) --omit-header \ + -o$@ $(XGETTEXT_FLAGS_PERL) $< + +define gen_pot_header +$(XGETTEXT) $(XGETTEXT_FLAGS_C) \ + -o - /dev/null | \ +sed -e 's|charset=CHARSET|charset=UTF-8|' \ + -e 's|\(Last-Translator: \)FULL NAME <.*>|\1make by the Makefile|' \ + -e 's|\(Language-Team: \)LANGUAGE <.*>|\1Git Mailing List |' \ + >$@ && \ +echo '"Plural-Forms: nplurals=INTEGER; plural=EXPRESSION;\\n"' >>$@ +endef + +.build/pot/git.header: $(LOCALIZED_ALL_GEN_PO) + $(call mkdir_p_parent_template) + $(QUIET_GEN)$(gen_pot_header) + +po/git.pot: .build/pot/git.header $(LOCALIZED_ALL_GEN_PO) + $(QUIET_GEN)$(MSGCAT) $^ >$@ + +.PHONY: pot +pot: po/git.pot + +define check_po_file_envvar + $(if $(PO_FILE), \ + $(if $(filter po/%.po,$(PO_FILE)), , \ + $(error PO_FILE should match pattern: "po/%.po")), \ + $(error PO_FILE is not defined)) +endef + +.PHONY: po-update +po-update: po/git.pot + $(check_po_file_envvar) + @if test ! -e $(PO_FILE); then \ + echo >&2 "error: $(PO_FILE) does not exist"; \ + echo >&2 'To create an initial po file, use: "make po-init PO_FILE=po/XX.po"'; \ + exit 1; \ + fi + $(QUIET_MSGMERGE)$(MSGMERGE) $(MSGMERGE_FLAGS) $(PO_FILE) po/git.pot + +.PHONY: check-pot +check-pot: $(LOCALIZED_ALL_GEN_PO) + +### TODO FIXME: Translating everything in these files is a bad +### heuristic for "core", as we'll translate obscure error() messages +### along with commonly seen i18n messages. A better heuristic would +### be to e.g. use spatch to first remove error/die/warning +### etc. messages. +LOCALIZED_C_CORE = +LOCALIZED_C_CORE += builtin/checkout.c +LOCALIZED_C_CORE += builtin/clone.c +LOCALIZED_C_CORE += builtin/index-pack.c +LOCALIZED_C_CORE += builtin/push.c +LOCALIZED_C_CORE += builtin/reset.c +LOCALIZED_C_CORE += remote.c +LOCALIZED_C_CORE += wt-status.c + +LOCALIZED_C_CORE_GEN_PO = $(LOCALIZED_C_CORE:%=.build/pot/po/%.po) + +.build/pot/git-core.header: $(LOCALIZED_C_CORE_GEN_PO) + $(call mkdir_p_parent_template) + $(QUIET_GEN)$(gen_pot_header) + +po/git-core.pot: .build/pot/git-core.header $(LOCALIZED_C_CORE_GEN_PO) + $(QUIET_GEN)$(MSGCAT) $^ >$@ + +.PHONY: po-init +po-init: po/git-core.pot + $(check_po_file_envvar) + @if test -e $(PO_FILE); then \ + echo >&2 "error: $(PO_FILE) exists already"; \ + exit 1; \ + fi + $(QUIET_MSGINIT)msginit \ + --input=$< \ + --output=$(PO_FILE) \ + --no-translator \ + --locale=$(PO_FILE:po/%.po=%) + +## po/*.po files & their rules +ifdef NO_GETTEXT +POFILES := +MOFILES := +else +POFILES := $(wildcard po/*.po) +MOFILES := $(patsubst po/%.po,po/build/locale/%/LC_MESSAGES/git.mo,$(POFILES)) + +all:: $(MOFILES) +endif + +po/build/locale/%/LC_MESSAGES/git.mo: po/%.po + $(call mkdir_p_parent_template) + $(QUIET_MSGFMT)$(MSGFMT) -o $@ $< + +LIB_PERL := $(wildcard perl/Git.pm perl/Git/*.pm perl/Git/*/*.pm perl/Git/*/*/*.pm) +LIB_PERL_GEN := $(patsubst perl/%.pm,perl/build/lib/%.pm,$(LIB_PERL)) +LIB_CPAN := $(wildcard perl/FromCPAN/*.pm perl/FromCPAN/*/*.pm) +LIB_CPAN_GEN := $(patsubst perl/%.pm,perl/build/lib/%.pm,$(LIB_CPAN)) + +ifndef NO_PERL +all:: $(LIB_PERL_GEN) +ifndef NO_PERL_CPAN_FALLBACKS +all:: $(LIB_CPAN_GEN) +endif +NO_PERL_CPAN_FALLBACKS_SQ = $(subst ','\'',$(NO_PERL_CPAN_FALLBACKS)) +endif + +perl/build/lib/%.pm: perl/%.pm generate-perl.sh GIT-BUILD-OPTIONS GIT-VERSION-FILE GIT-PERL-DEFINES + $(call mkdir_p_parent_template) + $(QUIET_GEN)$(SHELL_PATH) generate-perl.sh ./GIT-BUILD-OPTIONS ./GIT-VERSION-FILE GIT-PERL-HEADER "$<" "$@" + +perl/build/man/man3/Git.3pm: perl/Git.pm + $(call mkdir_p_parent_template) + $(QUIET_GEN)pod2man $< $@ + +$(ETAGS_TARGET): $(FOUND_SOURCE_FILES) + $(QUIET_GEN)$(RM) $@+ && \ + echo $(FOUND_SOURCE_FILES) | xargs etags -a -o $@+ && \ + mv $@+ $@ + +tags: $(FOUND_SOURCE_FILES) + $(QUIET_GEN)$(RM) $@+ && \ + echo $(FOUND_SOURCE_FILES) | xargs ctags -a -o $@+ && \ + mv $@+ $@ + +cscope.out: $(FOUND_SOURCE_FILES) + $(QUIET_GEN)$(RM) $@+ && \ + echo $(FOUND_SOURCE_FILES) | xargs cscope -f$@+ -b && \ + mv $@+ $@ + +.PHONY: cscope +cscope: cscope.out + +### Detect prefix changes +TRACK_PREFIX = $(bindir_SQ):$(gitexecdir_SQ):$(template_dir_SQ):$(prefix_SQ):\ + $(localedir_SQ) + +GIT-PREFIX: FORCE + @FLAGS='$(TRACK_PREFIX)'; \ + if test x"$$FLAGS" != x"`cat GIT-PREFIX 2>/dev/null`" ; then \ + echo >&2 " * new prefix flags"; \ + echo "$$FLAGS" >GIT-PREFIX; \ + fi + +TRACK_CFLAGS = $(CC):$(subst ','\'',$(ALL_CFLAGS)):$(USE_GETTEXT_SCHEME) + +GIT-CFLAGS: FORCE + @FLAGS='$(TRACK_CFLAGS)'; \ + if test x"$$FLAGS" != x"`cat GIT-CFLAGS 2>/dev/null`" ; then \ + echo >&2 " * new build flags"; \ + echo "$$FLAGS" >GIT-CFLAGS; \ + fi + +TRACK_LDFLAGS = $(subst ','\'',$(ALL_LDFLAGS)) + +GIT-LDFLAGS: FORCE + @FLAGS='$(TRACK_LDFLAGS)'; \ + if test x"$$FLAGS" != x"`cat GIT-LDFLAGS 2>/dev/null`" ; then \ + echo >&2 " * new link flags"; \ + echo "$$FLAGS" >GIT-LDFLAGS; \ + fi + +ifdef RUNTIME_PREFIX +RUNTIME_PREFIX_OPTION = true +else +RUNTIME_PREFIX_OPTION = false +endif + +# We need to apply sq twice, once to protect from the shell +# that runs GIT-BUILD-OPTIONS, and then again to protect it +# and the first level quoting from the shell that runs "echo". +GIT-BUILD-OPTIONS: FORCE + @sed \ + -e "s!@BROKEN_PATH_FIX@!\'$(BROKEN_PATH_FIX)\'!" \ + -e "s|@DIFF@|\'$(DIFF)\'|" \ + -e "s|@FSMONITOR_DAEMON_BACKEND@|\'$(FSMONITOR_DAEMON_BACKEND)\'|" \ + -e "s|@FSMONITOR_OS_SETTINGS@|\'$(FSMONITOR_OS_SETTINGS)\'|" \ + -e "s|@GITWEBDIR@|\'$(gitwebdir_SQ)\'|" \ + -e "s|@GIT_INTEROP_MAKE_OPTS@|\'$(GIT_INTEROP_MAKE_OPTS)\'|" \ + -e "s|@GIT_PERF_LARGE_REPO@|\'$(GIT_PERF_LARGE_REPO)\'|" \ + -e "s|@GIT_PERF_MAKE_COMMAND@|\'$(GIT_PERF_MAKE_COMMAND)\'|" \ + -e "s|@GIT_PERF_MAKE_OPTS@|\'$(GIT_PERF_MAKE_OPTS)\'|" \ + -e "s|@GIT_PERF_REPEAT_COUNT@|\'$(GIT_PERF_REPEAT_COUNT)\'|" \ + -e "s|@GIT_PERF_REPO@|\'$(GIT_PERF_REPO)\'|" \ + -e "s|@GIT_SOURCE_DIR@|\'$(shell pwd)\'|" \ + -e "s|@GIT_TEST_CMP@|\'$(GIT_TEST_CMP)\'|" \ + -e "s|@GIT_TEST_CMP_USE_COPIED_CONTEXT@|\'$(GIT_TEST_CMP_USE_COPIED_CONTEXT)\'|" \ + -e "s|@GIT_TEST_GITPERLLIB@|\'$(shell pwd)/perl/build/lib\'|" \ + -e "s|@GIT_TEST_INDEX_VERSION@|\'$(GIT_TEST_INDEX_VERSION)\'|" \ + -e "s|@GIT_TEST_OPTS@|\'$(GIT_TEST_OPTS)\'|" \ + -e "s|@GIT_TEST_PERL_FATAL_WARNINGS@|\'$(GIT_TEST_PERL_FATAL_WARNINGS)\'|" \ + -e "s|@GIT_TEST_TEMPLATE_DIR@|\'$(shell pwd)/templates/blt\'|" \ + -e "s|@GIT_TEST_TEXTDOMAINDIR@|\'$(shell pwd)/po/build/locale\'|" \ + -e "s|@GIT_TEST_UTF8_LOCALE@|\'$(GIT_TEST_UTF8_LOCALE)\'|" \ + -e "s|@LOCALEDIR@|\'$(localedir_SQ)\'|" \ + -e "s|@NO_CURL@|\'$(NO_CURL)\'|" \ + -e "s|@NO_EXPAT@|\'$(NO_EXPAT)\'|" \ + -e "s|@NO_GETTEXT@|\'$(NO_GETTEXT)\'|" \ + -e "s|@NO_GITWEB@|\'$(NO_GITWEB)\'|" \ + -e "s|@NO_ICONV@|\'$(NO_ICONV)\'|" \ + -e "s|@NO_PERL@|\'$(NO_PERL)\'|" \ + -e "s|@NO_PERL_CPAN_FALLBACKS@|\'$(NO_PERL_CPAN_FALLBACKS_SQ)\'|" \ + -e "s|@NO_PTHREADS@|\'$(NO_PTHREADS)\'|" \ + -e "s|@NO_PYTHON@|\'$(NO_PYTHON)\'|" \ + -e "s|@NO_REGEX@|\'$(NO_REGEX)\'|" \ + -e "s|@NO_UNIX_SOCKETS@|\'$(NO_UNIX_SOCKETS)\'|" \ + -e "s|@PAGER_ENV@|\'$(PAGER_ENV)\'|" \ + -e "s|@PERL_LOCALEDIR@|\'$(perl_localedir_SQ)\'|" \ + -e "s|@PERL_PATH@|\'$(PERL_PATH_SQ)\'|" \ + -e "s|@PYTHON_PATH@|\'$(PYTHON_PATH_SQ)\'|" \ + -e "s|@RUNTIME_PREFIX@|\'$(RUNTIME_PREFIX_OPTION)\'|" \ + -e "s|@SANITIZE_ADDRESS@|\'$(SANITIZE_ADDRESS)\'|" \ + -e "s|@SANITIZE_LEAK@|\'$(SANITIZE_LEAK)\'|" \ + -e "s|@SHELL_PATH@|\'$(SHELL_PATH_SQ)\'|" \ + -e "s|@TAR@|\'$(TAR)\'|" \ + -e "s|@TEST_OUTPUT_DIRECTORY@|\'$(TEST_OUTPUT_DIRECTORY)\'|" \ + -e "s|@TEST_SHELL_PATH@|\'$(TEST_SHELL_PATH_SQ)\'|" \ + -e "s|@USE_GETTEXT_SCHEME@|\'$(USE_GETTEXT_SCHEME)\'|" \ + -e "s|@USE_LIBPCRE2@|\'$(USE_LIBPCRE2)\'|" \ + -e "s|@WITH_BREAKING_CHANGES@|\'$(WITH_BREAKING_CHANGES)\'|" \ + -e "s|@X@|\'$(X)\'|" \ + GIT-BUILD-OPTIONS.in >$@+ + @if grep -q '^[A-Z][A-Z_]*=@.*@$$' $@+; then echo "Unsubstituted build options in $@" >&2 && exit 1; fi + @if cmp $@+ $@ >/dev/null 2>&1; then $(RM) $@+; else mv $@+ $@; fi + @if test -f GIT-BUILD-DIR; then rm GIT-BUILD-DIR; fi + +### Detect Python interpreter path changes +ifndef NO_PYTHON +TRACK_PYTHON = $(subst ','\'',-DPYTHON_PATH='$(PYTHON_PATH_SQ)') + +GIT-PYTHON-VARS: FORCE + @VARS='$(TRACK_PYTHON)'; \ + if test x"$$VARS" != x"`cat $@ 2>/dev/null`" ; then \ + echo >&2 " * new Python interpreter location"; \ + echo "$$VARS" >$@; \ + fi +endif + +test_bindir_programs := $(patsubst %,bin-wrappers/%,$(BINDIR_PROGRAMS_NEED_X) $(BINDIR_PROGRAMS_NO_X) $(TEST_PROGRAMS_NEED_X)) + +all:: $(TEST_PROGRAMS) $(test_bindir_programs) $(UNIT_TEST_PROGS) $(CLAR_TEST_PROG) + +$(test_bindir_programs): bin-wrappers/%: bin-wrappers/wrap-for-bin.sh + $(QUIET_GEN)sed -e '1s|#!.*/sh|#!$(SHELL_PATH_SQ)|' \ + -e 's|@BUILD_DIR@|$(shell pwd)|' \ + -e 's|@GIT_TEXTDOMAINDIR@|$(shell pwd)/po/build/locale|' \ + -e 's|@GITPERLLIB@|$(shell pwd)/perl/build/lib|' \ + -e 's|@MERGE_TOOLS_DIR@|$(shell pwd)/mergetools|' \ + -e 's|@TEMPLATE_DIR@|$(shell pwd)/templates/blt|' \ + -e 's|@PROG@|$(shell pwd)/$(patsubst test-%,t/helper/test-%,$(@F))$(if $(filter-out $(BINDIR_PROGRAMS_NO_X),$(@F)),$(X),)|' < $< > $@ && \ + chmod +x $@ + +# GNU make supports exporting all variables by "export" without parameters. +# However, the environment gets quite big, and some programs have problems +# with that. + +export NO_SVN_TESTS +export TEST_NO_MALLOC_CHECK + +### Testing rules + +test: all + $(MAKE) -C t/ all + +perf: all + $(MAKE) -C t/perf/ all + +.PHONY: test perf + +.PRECIOUS: $(TEST_OBJS) + +t/helper/test-tool$X: $(patsubst %,t/helper/%,$(TEST_BUILTINS_OBJS)) $(UNIT_TEST_DIR)/test-lib.o + +t/helper/test-%$X: t/helper/test-%.o GIT-LDFLAGS $(GITLIBS) + $(QUIET_LINK)$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) $(filter %.a,$^) $(LIBS) + +check-sha1:: t/helper/test-tool$X + t/helper/test-sha1.sh + +SP_SRC = $(filter-out $(THIRD_PARTY_SOURCES),$(patsubst %.o,%.c,$(OBJECTS))) +SP_OBJ = $(patsubst %.c,%.sp,$(SP_SRC)) + +$(SP_OBJ): %.sp: %.c %.o $(GENERATED_H) + $(QUIET_SP)cgcc -no-compile $(ALL_CFLAGS) $(EXTRA_CPPFLAGS) \ + -Wsparse-error \ + $(SPARSE_FLAGS) $(SP_EXTRA_FLAGS) $< && \ + >$@ + +.PHONY: sparse +sparse: $(SP_OBJ) + +EXCEPT_HDRS := $(GENERATED_H) unicode-width.h compat/% xdiff/% $(UNIT_TEST_DIR)/clar/% $(UNIT_TEST_DIR)/clar/clar/% +ifndef OPENSSL_SHA1 + EXCEPT_HDRS += sha1/openssl.h +endif +ifndef OPENSSL_SHA256 + EXCEPT_HDRS += sha256/openssl.h +endif +ifndef NETTLE_SHA256 + EXCEPT_HDRS += sha256/nettle.h +endif +ifndef GCRYPT_SHA256 + EXCEPT_HDRS += sha256/gcrypt.h +endif +CHK_HDRS = $(filter-out $(EXCEPT_HDRS),$(LIB_H)) +HCO = $(patsubst %.h,%.hco,$(CHK_HDRS)) +HCC = $(HCO:hco=hcc) + +%.hcc: %.h + @echo '#include "git-compat-util.h"' >$@ + @echo '#include "$<"' >>$@ + +$(HCO): %.hco: %.hcc $(GENERATED_H) FORCE + $(QUIET_HDR)$(CC) $(ALL_CFLAGS) -o /dev/null -c -xc $< + +# TODO: deprecate 'hdr-check' in lieu of 'check-headers' in Git 2.51+ +.PHONY: hdr-check check-headers $(HCO) +hdr-check: $(HCO) +check-headers: hdr-check + +.PHONY: style +style: + git clang-format --style file --diff --extensions c,h + +.PHONY: check +check: + @if sparse; \ + then \ + echo >&2 "Use 'make sparse' instead"; \ + $(MAKE) --no-print-directory sparse; \ + else \ + echo >&2 "Did you mean 'make test'?"; \ + exit 1; \ + fi + +COCCI_GEN_ALL = .build/contrib/coccinelle/ALL.cocci +COCCI_GLOB = $(wildcard contrib/coccinelle/*.cocci) +COCCI_RULES_TRACKED = $(COCCI_GLOB:%=.build/%) +COCCI_RULES_TRACKED_NO_PENDING = $(filter-out %.pending.cocci,$(COCCI_RULES_TRACKED)) +COCCI_RULES = +COCCI_RULES += $(COCCI_GEN_ALL) +COCCI_RULES += $(COCCI_RULES_TRACKED) +COCCI_NAMES = +COCCI_NAMES += $(COCCI_RULES:.build/contrib/coccinelle/%.cocci=%) + +COCCICHECK_PENDING = $(filter %.pending.cocci,$(COCCI_RULES)) +COCCICHECK = $(filter-out $(COCCICHECK_PENDING),$(COCCI_RULES)) + +COCCICHECK_PATCHES = $(COCCICHECK:%=%.patch) +COCCICHECK_PATCHES_PENDING = $(COCCICHECK_PENDING:%=%.patch) + +COCCICHECK_PATCHES_INTREE = $(COCCICHECK_PATCHES:.build/%=%) +COCCICHECK_PATCHES_PENDING_INTREE = $(COCCICHECK_PATCHES_PENDING:.build/%=%) + +# It's expensive to compute the many=many rules below, only eval them +# on $(MAKECMDGOALS) that match these $(COCCI_RULES) +COCCI_RULES_GLOB = +COCCI_RULES_GLOB += cocci% +COCCI_RULES_GLOB += .build/contrib/coccinelle/% +COCCI_RULES_GLOB += $(COCCICHECK_PATCHES) +COCCI_RULES_GLOB += $(COCCICHEC_PATCHES_PENDING) +COCCI_RULES_GLOB += $(COCCICHECK_PATCHES_INTREE) +COCCI_RULES_GLOB += $(COCCICHECK_PATCHES_PENDING_INTREE) +COCCI_GOALS = $(filter $(COCCI_RULES_GLOB),$(MAKECMDGOALS)) + +COCCI_TEST_RES = $(wildcard contrib/coccinelle/tests/*.res) + +$(COCCI_RULES_TRACKED): .build/% : % + $(call mkdir_p_parent_template) + $(QUIET_CP)cp $< $@ + +.build/contrib/coccinelle/FOUND_H_SOURCES: $(FOUND_H_SOURCES) + $(call mkdir_p_parent_template) + $(QUIET_GEN) >$@ + +$(COCCI_GEN_ALL): $(COCCI_RULES_TRACKED_NO_PENDING) + $(call mkdir_p_parent_template) + $(QUIET_SPATCH_CAT)cat $^ >$@ + +ifeq ($(COMPUTE_HEADER_DEPENDENCIES),no) +SPATCH_USE_O_DEPENDENCIES = +endif +define cocci-rule + +## Rule for .build/$(1).patch/$(2); Params: +# $(1) = e.g. ".build/contrib/coccinelle/free.cocci" +# $(2) = e.g. "grep.c" +# $(3) = e.g. "grep.o" +COCCI_$(1:.build/contrib/coccinelle/%.cocci=%) += $(1).d/$(2).patch +$(1).d/$(2).patch: GIT-SPATCH-DEFINES +$(1).d/$(2).patch: $(if $(and $(SPATCH_USE_O_DEPENDENCIES),$(wildcard $(3))),$(3),.build/contrib/coccinelle/FOUND_H_SOURCES) +$(1).d/$(2).patch: $(1) +$(1).d/$(2).patch: $(1).d/%.patch : % + $$(call mkdir_p_parent_template) + $$(QUIET_SPATCH)if ! $$(SPATCH) $$(SPATCH_FLAGS) \ + $$(SPATCH_INCLUDE_FLAGS) \ + --sp-file $(1) --patch . $$< \ + >$$@ 2>$$@.log; \ + then \ + echo "ERROR when applying '$(1)' to '$$<'; '$$@.log' follows:"; \ + cat $$@.log; \ + exit 1; \ + fi +endef + +define cocci-matrix + +$(foreach s,$(COCCI_SOURCES),$(call cocci-rule,$(c),$(s),$(s:%.c=%.o))) +endef + +ifdef COCCI_GOALS +$(eval $(foreach c,$(COCCI_RULES),$(call cocci-matrix,$(c)))) +endif + +define spatch-rule + +.build/contrib/coccinelle/$(1).cocci.patch: $$(COCCI_$(1)) + $$(QUIET_SPATCH_CAT)cat $$^ >$$@ && \ + if test -s $$@; \ + then \ + echo ' ' SPATCH result: $$@; \ + fi +contrib/coccinelle/$(1).cocci.patch: .build/contrib/coccinelle/$(1).cocci.patch + $$(QUIET_CP)cp $$< $$@ + +endef + +ifdef COCCI_GOALS +$(eval $(foreach n,$(COCCI_NAMES),$(call spatch-rule,$(n)))) +endif + +COCCI_TEST_RES_GEN = $(addprefix .build/,$(COCCI_TEST_RES)) +$(COCCI_TEST_RES_GEN): GIT-SPATCH-DEFINES +$(COCCI_TEST_RES_GEN): .build/%.res : %.c +$(COCCI_TEST_RES_GEN): .build/%.res : %.res +ifdef SPATCH_CONCAT_COCCI +$(COCCI_TEST_RES_GEN): .build/contrib/coccinelle/tests/%.res : $(COCCI_GEN_ALL) +else +$(COCCI_TEST_RES_GEN): .build/contrib/coccinelle/tests/%.res : contrib/coccinelle/%.cocci +endif + $(call mkdir_p_parent_template) + $(QUIET_SPATCH_TEST)$(SPATCH) $(SPATCH_TEST_FLAGS) \ + --very-quiet --no-show-diff \ + --sp-file $< -o $@ \ + $(@:.build/%.res=%.c) && \ + cmp $(@:.build/%=%) $@ || \ + git -P diff --no-index $(@:.build/%=%) $@ 2>/dev/null; \ + +.PHONY: coccicheck-test +coccicheck-test: $(COCCI_TEST_RES_GEN) + +coccicheck: coccicheck-test + +ifdef SPATCH_CONCAT_COCCI +COCCICHECK_PATCH_MUST_BE_EMPTY_FILES = contrib/coccinelle/ALL.cocci.patch +else +COCCICHECK_PATCH_MUST_BE_EMPTY_FILES = $(COCCICHECK_PATCHES_INTREE) +endif +coccicheck: $(COCCICHECK_PATCH_MUST_BE_EMPTY_FILES) + ! grep -q ^ $(COCCICHECK_PATCH_MUST_BE_EMPTY_FILES) /dev/null + +# See contrib/coccinelle/README +coccicheck-pending: coccicheck-test +coccicheck-pending: $(COCCICHECK_PATCHES_PENDING_INTREE) + +.PHONY: coccicheck coccicheck-pending + +# "Sub"-Makefiles, not really because they can't be run stand-alone, +# only there to contain directory-specific rules and variables +## gitweb/Makefile inclusion: +MAK_DIR_GITWEB = gitweb/ +include gitweb/Makefile + +.PHONY: gitweb +gitweb: $(MAK_DIR_GITWEB_ALL) +ifndef NO_GITWEB +all:: gitweb +endif + +### Installation rules + +ifneq ($(filter /%,$(firstword $(template_dir))),) +template_instdir = $(template_dir) +else +template_instdir = $(prefix)/$(template_dir) +endif +export template_instdir + +ifneq ($(filter /%,$(firstword $(gitexecdir))),) +gitexec_instdir = $(gitexecdir) +else +gitexec_instdir = $(prefix)/$(gitexecdir) +endif +gitexec_instdir_SQ = $(subst ','\'',$(gitexec_instdir)) +export gitexec_instdir + +ifneq ($(filter /%,$(firstword $(mergetoolsdir))),) +mergetools_instdir = $(mergetoolsdir) +else +mergetools_instdir = $(prefix)/$(mergetoolsdir) +endif +mergetools_instdir_SQ = $(subst ','\'',$(mergetools_instdir)) + +install_bindir_xprograms := $(patsubst %,%$X,$(BINDIR_PROGRAMS_NEED_X)) +install_bindir_programs := $(install_bindir_xprograms) $(BINDIR_PROGRAMS_NO_X) + +.PHONY: profile-install profile-fast-install +profile-install: profile + $(MAKE) install + +profile-fast-install: profile-fast + $(MAKE) install + +INSTALL_STRIP = + +install: all + $(INSTALL) -d -m 755 '$(DESTDIR_SQ)$(bindir_SQ)' + $(INSTALL) -d -m 755 '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' + $(INSTALL) $(INSTALL_STRIP) $(PROGRAMS) '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' + $(INSTALL) $(SCRIPTS) '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' + $(INSTALL) -m 644 $(SCRIPT_LIB) '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' + $(INSTALL) $(INSTALL_STRIP) $(install_bindir_xprograms) '$(DESTDIR_SQ)$(bindir_SQ)' + $(INSTALL) $(BINDIR_PROGRAMS_NO_X) '$(DESTDIR_SQ)$(bindir_SQ)' + +ifdef MSVC + # We DO NOT install the individual foo.o.pdb files because they + # have already been rolled up into the exe's pdb file. + # We DO NOT have pdb files for the builtin commands (like git-status.exe) + # because it is just a copy/hardlink of git.exe, rather than a unique binary. + $(INSTALL) $(patsubst %.exe,%.pdb,$(filter-out $(BUILT_INS),$(patsubst %,%$X,$(BINDIR_PROGRAMS_NEED_X)))) '$(DESTDIR_SQ)$(bindir_SQ)' + $(INSTALL) $(patsubst %.exe,%.pdb,$(filter-out $(BUILT_INS) $(REMOTE_CURL_ALIASES),$(PROGRAMS))) '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' +ifndef DEBUG + $(INSTALL) $(vcpkg_rel_bin)/*.dll '$(DESTDIR_SQ)$(bindir_SQ)' + $(INSTALL) $(vcpkg_rel_bin)/*.pdb '$(DESTDIR_SQ)$(bindir_SQ)' +else + $(INSTALL) $(vcpkg_dbg_bin)/*.dll '$(DESTDIR_SQ)$(bindir_SQ)' + $(INSTALL) $(vcpkg_dbg_bin)/*.pdb '$(DESTDIR_SQ)$(bindir_SQ)' +endif +endif + $(MAKE) -C templates DESTDIR='$(DESTDIR_SQ)' install + $(INSTALL) -d -m 755 '$(DESTDIR_SQ)$(mergetools_instdir_SQ)' + $(INSTALL) -m 644 mergetools/* '$(DESTDIR_SQ)$(mergetools_instdir_SQ)' +ifndef NO_GETTEXT + $(INSTALL) -d -m 755 '$(DESTDIR_SQ)$(localedir_SQ)' + (cd po/build/locale && $(TAR) cf - .) | \ + (cd '$(DESTDIR_SQ)$(localedir_SQ)' && umask 022 && $(TAR) xof -) +endif +ifndef NO_PERL + $(INSTALL) -d -m 755 '$(DESTDIR_SQ)$(perllibdir_SQ)' + (cd perl/build/lib && $(TAR) cf - .) | \ + (cd '$(DESTDIR_SQ)$(perllibdir_SQ)' && umask 022 && $(TAR) xof -) +endif +ifndef NO_TCLTK + $(MAKE) -C gitk-git install + $(MAKE) -C git-gui gitexecdir='$(gitexec_instdir_SQ)' install +endif +ifneq (,$X) + $(foreach p,$(patsubst %$X,%,$(filter %$X,$(ALL_COMMANDS_TO_INSTALL) $(OTHER_PROGRAMS))), test '$(DESTDIR_SQ)$(gitexec_instdir_SQ)/$p' -ef '$(DESTDIR_SQ)$(gitexec_instdir_SQ)/$p$X' || $(RM) '$(DESTDIR_SQ)$(gitexec_instdir_SQ)/$p';) +endif +ifndef NO_BASH_COMPLETION + $(INSTALL) -d -m 755 '$(DESTDIR_SQ)$(bash_completion_dir_SQ)' && \ + $(INSTALL) -m 644 contrib/completion/git-completion.bash '$(DESTDIR_SQ)$(bash_completion_dir_SQ)/git' +endif + + bindir=$$(cd '$(DESTDIR_SQ)$(bindir_SQ)' && pwd) && \ + execdir=$$(cd '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' && pwd) && \ + destdir_from_execdir_SQ=$$(echo '$(gitexecdir_relative_SQ)' | sed -e 's|[^/][^/]*|..|g') && \ + { test "$$bindir/" = "$$execdir/" || \ + for p in $(OTHER_PROGRAMS) $(filter $(install_bindir_programs),$(ALL_PROGRAMS)); do \ + $(RM) "$$execdir/$$p" && \ + test -n "$(INSTALL_SYMLINKS)" && \ + ln -s "$$destdir_from_execdir_SQ/$(bindir_relative_SQ)/$$p" "$$execdir/$$p" || \ + { test -z "$(NO_INSTALL_HARDLINKS)$(NO_CROSS_DIRECTORY_HARDLINKS)" && \ + ln "$$bindir/$$p" "$$execdir/$$p" 2>/dev/null || \ + cp "$$bindir/$$p" "$$execdir/$$p" || exit; } \ + done; \ + } && \ + for p in $(filter $(install_bindir_programs),$(BUILT_INS)); do \ + $(RM) "$$bindir/$$p" && \ + test -n "$(INSTALL_SYMLINKS)" && \ + ln -s "git$X" "$$bindir/$$p" || \ + { test -z "$(NO_INSTALL_HARDLINKS)" && \ + ln "$$bindir/git$X" "$$bindir/$$p" 2>/dev/null || \ + ln -s "git$X" "$$bindir/$$p" 2>/dev/null || \ + cp "$$bindir/git$X" "$$bindir/$$p" || exit; }; \ + done && \ + for p in $(BUILT_INS); do \ + $(RM) "$$execdir/$$p" && \ + if test -z "$(SKIP_DASHED_BUILT_INS)"; \ + then \ + test -n "$(INSTALL_SYMLINKS)" && \ + ln -s "$$destdir_from_execdir_SQ/$(bindir_relative_SQ)/git$X" "$$execdir/$$p" || \ + { test -z "$(NO_INSTALL_HARDLINKS)" && \ + ln "$$execdir/git$X" "$$execdir/$$p" 2>/dev/null || \ + ln -s "git$X" "$$execdir/$$p" 2>/dev/null || \ + cp "$$execdir/git$X" "$$execdir/$$p" || exit; }; \ + fi \ + done && \ + remote_curl_aliases="$(REMOTE_CURL_ALIASES)" && \ + for p in $$remote_curl_aliases; do \ + $(RM) "$$execdir/$$p" && \ + test -n "$(INSTALL_SYMLINKS)" && \ + ln -s "git-remote-http$X" "$$execdir/$$p" || \ + { test -z "$(NO_INSTALL_HARDLINKS)" && \ + ln "$$execdir/git-remote-http$X" "$$execdir/$$p" 2>/dev/null || \ + ln -s "git-remote-http$X" "$$execdir/$$p" 2>/dev/null || \ + cp "$$execdir/git-remote-http$X" "$$execdir/$$p" || exit; } \ + done + +.PHONY: install-doc install-man install-man-perl install-html install-info install-pdf +.PHONY: quick-install-doc quick-install-man quick-install-html + +install-doc: install-man-perl + $(MAKE) -C Documentation install + +install-man: install-man-perl + $(MAKE) -C Documentation install-man + +install-man-perl: man-perl + $(INSTALL) -d -m 755 '$(DESTDIR_SQ)$(mandir_SQ)/man3' + (cd perl/build/man/man3 && $(TAR) cf - .) | \ + (cd '$(DESTDIR_SQ)$(mandir_SQ)/man3' && umask 022 && $(TAR) xof -) + +install-html: + $(MAKE) -C Documentation install-html + +install-info: + $(MAKE) -C Documentation install-info + +install-pdf: + $(MAKE) -C Documentation install-pdf + +quick-install-doc: + $(MAKE) -C Documentation quick-install + +quick-install-man: + $(MAKE) -C Documentation quick-install-man + +quick-install-html: + $(MAKE) -C Documentation quick-install-html + + + +### Maintainer's dist rules + +GIT_TARNAME = git-$(GIT_VERSION) +GIT_ARCHIVE_EXTRA_FILES = \ + --prefix=$(GIT_TARNAME)/ \ + --add-file=configure \ + --add-file=.dist-tmp-dir/version \ + --prefix=$(GIT_TARNAME)/git-gui/ \ + --add-file=.dist-tmp-dir/git-gui/version +ifdef DC_SHA1_SUBMODULE +GIT_ARCHIVE_EXTRA_FILES += \ + --prefix=$(GIT_TARNAME)/sha1collisiondetection/ \ + --add-file=sha1collisiondetection/LICENSE.txt \ + --prefix=$(GIT_TARNAME)/sha1collisiondetection/lib/ \ + --add-file=sha1collisiondetection/lib/sha1.c \ + --add-file=sha1collisiondetection/lib/sha1.h \ + --add-file=sha1collisiondetection/lib/ubc_check.c \ + --add-file=sha1collisiondetection/lib/ubc_check.h +endif +dist: git-archive$(X) configure + @$(RM) -r .dist-tmp-dir + @mkdir .dist-tmp-dir + @echo $(GIT_VERSION) > .dist-tmp-dir/version + @$(MAKE) -C git-gui TARDIR=../.dist-tmp-dir/git-gui dist-version + ./git-archive --format=tar \ + $(GIT_ARCHIVE_EXTRA_FILES) \ + --prefix=$(GIT_TARNAME)/ HEAD^{tree} > $(GIT_TARNAME).tar + @$(RM) -r .dist-tmp-dir + gzip -f -9 $(GIT_TARNAME).tar + +rpm:: + @echo >&2 "Use distro packaged sources to run rpmbuild" + @false +.PHONY: rpm + +ifneq ($(INCLUDE_DLLS_IN_ARTIFACTS),) +OTHER_PROGRAMS += $(shell echo *.dll t/helper/*.dll t/unit-tests/bin/*.dll) +endif + +artifacts-tar:: $(ALL_COMMANDS_TO_INSTALL) $(SCRIPT_LIB) $(OTHER_PROGRAMS) \ + GIT-BUILD-OPTIONS $(TEST_PROGRAMS) $(test_bindir_programs) \ + $(UNIT_TEST_PROGS) $(CLAR_TEST_PROG) $(MOFILES) + $(QUIET_SUBDIR0)templates $(QUIET_SUBDIR1) \ + SHELL_PATH='$(SHELL_PATH_SQ)' PERL_PATH='$(PERL_PATH_SQ)' + test -n "$(ARTIFACTS_DIRECTORY)" + mkdir -p "$(ARTIFACTS_DIRECTORY)" + $(TAR) czf "$(ARTIFACTS_DIRECTORY)/artifacts.tar.gz" $^ templates/blt/ +.PHONY: artifacts-tar + +htmldocs = git-htmldocs-$(GIT_VERSION) +manpages = git-manpages-$(GIT_VERSION) +.PHONY: dist-doc distclean +dist-doc: git$X + $(RM) -r .doc-tmp-dir + mkdir .doc-tmp-dir + $(MAKE) -C Documentation WEBDOC_DEST=../.doc-tmp-dir install-webdoc + ./git -C .doc-tmp-dir init + ./git -C .doc-tmp-dir add . + ./git -C .doc-tmp-dir commit -m htmldocs + ./git -C .doc-tmp-dir archive --format=tar --prefix=./ HEAD^{tree} \ + > $(htmldocs).tar + gzip -n -9 -f $(htmldocs).tar + : + $(RM) -r .doc-tmp-dir + mkdir -p .doc-tmp-dir/man1 .doc-tmp-dir/man5 .doc-tmp-dir/man7 + $(MAKE) -C Documentation DESTDIR=./ \ + man1dir=../.doc-tmp-dir/man1 \ + man5dir=../.doc-tmp-dir/man5 \ + man7dir=../.doc-tmp-dir/man7 \ + install + ./git -C .doc-tmp-dir init + ./git -C .doc-tmp-dir add . + ./git -C .doc-tmp-dir commit -m manpages + ./git -C .doc-tmp-dir archive --format=tar --prefix=./ HEAD^{tree} \ + > $(manpages).tar + gzip -n -9 -f $(manpages).tar + $(RM) -r .doc-tmp-dir + +### Cleaning rules + +distclean: clean + $(RM) configure + $(RM) config.log config.status config.cache + $(RM) config.mak.autogen config.mak.append + $(RM) -r autom4te.cache + +profile-clean: + $(RM) $(addsuffix *.gcda,$(addprefix $(PROFILE_DIR)/, $(object_dirs))) + $(RM) $(addsuffix *.gcno,$(addprefix $(PROFILE_DIR)/, $(object_dirs))) + +cocciclean: + $(RM) GIT-SPATCH-DEFINES + $(RM) -r .build/contrib/coccinelle + $(RM) contrib/coccinelle/*.cocci.patch + +clean: profile-clean coverage-clean cocciclean + $(RM) -r .build $(UNIT_TEST_BIN) + $(RM) GIT-TEST-SUITES + $(RM) po/git.pot po/git-core.pot + $(RM) git.rc git.res + $(RM) $(OBJECTS) + $(RM) headless-git.o + $(RM) $(LIB_FILE) + $(RM) $(ALL_PROGRAMS) $(SCRIPT_LIB) $(BUILT_INS) $(OTHER_PROGRAMS) + $(RM) $(TEST_PROGRAMS) + $(RM) $(FUZZ_PROGRAMS) + $(RM) $(SP_OBJ) + $(RM) $(HCC) + $(RM) -r Cargo.lock target/ + $(RM) version-def.h + $(RM) -r $(dep_dirs) $(compdb_dir) compile_commands.json + $(RM) $(test_bindir_programs) + $(RM) -r po/build/ + $(RM) *.pyc *.pyo */*.pyc */*.pyo $(GENERATED_H) $(ETAGS_TARGET) tags cscope* + $(RM) -r .dist-tmp-dir .doc-tmp-dir + $(RM) $(GIT_TARNAME).tar.gz + $(RM) $(htmldocs).tar.gz $(manpages).tar.gz + $(MAKE) -C Documentation/ clean + $(RM) Documentation/GIT-EXCLUDED-PROGRAMS + $(RM) -r contrib/libgit-sys/target contrib/libgit-rs/target + $(RM) contrib/libgit-sys/partial_symbol_export.o + $(RM) contrib/libgit-sys/hidden_symbol_export.o + $(RM) contrib/libgit-sys/libgitpub.a +ifndef NO_PERL + $(RM) -r perl/build/ +endif + $(MAKE) -C templates/ clean + $(MAKE) -C t/ clean +ifndef NO_TCLTK + $(MAKE) -C gitk-git clean + $(MAKE) -C git-gui clean +endif + $(RM) GIT-VERSION-FILE GIT-CFLAGS GIT-LDFLAGS GIT-BUILD-OPTIONS + $(RM) GIT-USER-AGENT GIT-PREFIX + $(RM) GIT-SCRIPT-DEFINES GIT-PERL-DEFINES GIT-PERL-HEADER GIT-PYTHON-VARS +ifdef MSVC + $(RM) $(patsubst %.o,%.o.pdb,$(OBJECTS)) + $(RM) headless-git.o.pdb + $(RM) $(patsubst %.exe,%.pdb,$(OTHER_PROGRAMS)) + $(RM) $(patsubst %.exe,%.iobj,$(OTHER_PROGRAMS)) + $(RM) $(patsubst %.exe,%.ipdb,$(OTHER_PROGRAMS)) + $(RM) $(patsubst %.exe,%.pdb,$(PROGRAMS)) + $(RM) $(patsubst %.exe,%.iobj,$(PROGRAMS)) + $(RM) $(patsubst %.exe,%.ipdb,$(PROGRAMS)) + $(RM) $(patsubst %.exe,%.pdb,$(TEST_PROGRAMS)) + $(RM) $(patsubst %.exe,%.iobj,$(TEST_PROGRAMS)) + $(RM) $(patsubst %.exe,%.ipdb,$(TEST_PROGRAMS)) + $(RM) compat/vcbuild/MSVC-DEFS-GEN +endif + +.PHONY: all install profile-clean cocciclean clean strip +.PHONY: shell_compatibility_test please_set_SHELL_PATH_to_a_more_modern_shell +.PHONY: FORCE + +### Check documentation +# +ALL_COMMANDS = $(ALL_COMMANDS_TO_INSTALL) $(SCRIPT_LIB) +ALL_COMMANDS += git +ALL_COMMANDS += git-citool +ALL_COMMANDS += git-gui +ALL_COMMANDS += gitk +ALL_COMMANDS += gitweb +ALL_COMMANDS += scalar + +.PHONY: check-docs +check-docs:: + $(MAKE) -C Documentation lint-docs + +### Make sure built-ins do not have dups and listed in git.c +# +check-builtins:: + ./check-builtins.sh + +### Test suite coverage testing +# +.PHONY: coverage coverage-clean coverage-compile coverage-test coverage-report +.PHONY: coverage-untested-functions cover_db cover_db_html +.PHONY: coverage-clean-results + +coverage: + $(MAKE) coverage-test + $(MAKE) coverage-untested-functions + +object_dirs := $(sort $(dir $(OBJECTS))) +coverage-clean-results: + $(RM) $(addsuffix *.gcov,$(object_dirs)) + $(RM) $(addsuffix *.gcda,$(object_dirs)) + $(RM) coverage-untested-functions + $(RM) -r cover_db/ + $(RM) -r cover_db_html/ + $(RM) coverage-test.made + +coverage-clean: coverage-clean-results + $(RM) $(addsuffix *.gcno,$(object_dirs)) + +COVERAGE_CFLAGS = $(CFLAGS) -O0 -ftest-coverage -fprofile-arcs +COVERAGE_LDFLAGS = $(CFLAGS) -O0 -lgcov +GCOVFLAGS = --preserve-paths --branch-probabilities --all-blocks + +coverage-compile: + $(MAKE) CFLAGS="$(COVERAGE_CFLAGS)" LDFLAGS="$(COVERAGE_LDFLAGS)" all + +coverage-test: coverage-clean-results coverage-compile + $(MAKE) CFLAGS="$(COVERAGE_CFLAGS)" LDFLAGS="$(COVERAGE_LDFLAGS)" \ + DEFAULT_TEST_TARGET=test -j1 test + touch coverage-test.made + +coverage-test.made: + $(MAKE) coverage-test + +coverage-prove: coverage-clean-results coverage-compile + $(MAKE) CFLAGS="$(COVERAGE_CFLAGS)" LDFLAGS="$(COVERAGE_LDFLAGS)" \ + DEFAULT_TEST_TARGET=prove GIT_PROVE_OPTS="$(GIT_PROVE_OPTS) -j1" \ + -j1 test + +coverage-report: coverage-test.made + $(QUIET_GCOV)for dir in $(object_dirs); do \ + $(GCOV) $(GCOVFLAGS) --object-directory=$$dir $$dir*.c || exit; \ + done + +coverage-untested-functions: coverage-report + grep '^function.*called 0 ' *.c.gcov \ + | sed -e 's/\([^:]*\)\.gcov: *function \([^ ]*\) called.*/\1: \2/' \ + > coverage-untested-functions + +cover_db: coverage-report + gcov2perl -db cover_db *.gcov + +cover_db_html: cover_db + cover -report html -outputdir cover_db_html cover_db + + +### Fuzz testing +# +# Building fuzz targets generally requires a special set of compiler flags that +# are not necessarily appropriate for general builds, and that vary greatly +# depending on the compiler version used. +# +# An example command to build against libFuzzer from LLVM 11.0.0: +# +# make CC=clang FUZZ_CXX=clang++ \ +# CFLAGS="-fsanitize=fuzzer-no-link,address" \ +# LIB_FUZZING_ENGINE="-fsanitize=fuzzer,address" \ +# fuzz-all +# +FUZZ_CXX ?= $(CC) +FUZZ_CXXFLAGS ?= $(ALL_CFLAGS) + +.PHONY: fuzz-all +fuzz-all: $(FUZZ_PROGRAMS) + +$(FUZZ_PROGRAMS): %: %.o oss-fuzz/dummy-cmd-main.o $(GITLIBS) GIT-LDFLAGS + $(QUIET_LINK)$(FUZZ_CXX) $(FUZZ_CXXFLAGS) -o $@ $(ALL_LDFLAGS) \ + -Wl,--allow-multiple-definition \ + $(filter %.o,$^) $(filter %.a,$^) $(LIBS) $(LIB_FUZZING_ENGINE) + +$(UNIT_TEST_PROGS): $(UNIT_TEST_BIN)/%$X: $(UNIT_TEST_DIR)/%.o $(UNIT_TEST_OBJS) \ + $(GITLIBS) GIT-LDFLAGS + $(call mkdir_p_parent_template) + $(QUIET_LINK)$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) \ + $(filter %.o,$^) $(filter %.a,$^) $(LIBS) + +GIT-TEST-SUITES: FORCE + @FLAGS='$(CLAR_TEST_SUITES)'; \ + if test x"$$FLAGS" != x"`cat GIT-TEST-SUITES 2>/dev/null`" ; then \ + echo >&2 " * new test suites"; \ + echo "$$FLAGS" >GIT-TEST-SUITES; \ + fi + +$(UNIT_TEST_DIR)/clar-decls.h: $(patsubst %,$(UNIT_TEST_DIR)/%.c,$(CLAR_TEST_SUITES)) $(UNIT_TEST_DIR)/generate-clar-decls.sh GIT-TEST-SUITES + $(QUIET_GEN)$(SHELL_PATH) $(UNIT_TEST_DIR)/generate-clar-decls.sh "$@" $(filter %.c,$^) +$(UNIT_TEST_DIR)/clar.suite: $(UNIT_TEST_DIR)/clar-decls.h $(UNIT_TEST_DIR)/generate-clar-suites.sh + $(QUIET_GEN)$(SHELL_PATH) $(UNIT_TEST_DIR)/generate-clar-suites.sh $< $(UNIT_TEST_DIR)/clar.suite +$(UNIT_TEST_DIR)/clar/clar.o: $(UNIT_TEST_DIR)/clar.suite +$(CLAR_TEST_OBJS): $(UNIT_TEST_DIR)/clar-decls.h +$(CLAR_TEST_OBJS): EXTRA_CPPFLAGS = -I$(UNIT_TEST_DIR) +$(CLAR_TEST_PROG): $(UNIT_TEST_DIR)/clar.suite $(CLAR_TEST_OBJS) $(GITLIBS) GIT-LDFLAGS + $(call mkdir_p_parent_template) + $(QUIET_LINK)$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) $(LIBS) + +.PHONY: build-unit-tests unit-tests +build-unit-tests: $(UNIT_TEST_PROGS) $(CLAR_TEST_PROG) +unit-tests: $(UNIT_TEST_PROGS) $(CLAR_TEST_PROG) t/helper/test-tool$X + $(MAKE) -C t/ unit-tests + +.PHONY: libgit-sys libgit-rs +libgit-sys: + $(QUIET)cargo build --manifest-path contrib/libgit-sys/Cargo.toml +libgit-rs: libgit-sys + $(QUIET)cargo build --manifest-path contrib/libgit-rs/Cargo.toml +ifdef INCLUDE_LIBGIT_RS +all:: libgit-rs +endif + +LIBGIT_PUB_OBJS += contrib/libgit-sys/public_symbol_export.o +LIBGIT_PUB_OBJS += libgit.a + +LIBGIT_PARTIAL_EXPORT = contrib/libgit-sys/partial_symbol_export.o + +LIBGIT_HIDDEN_EXPORT = contrib/libgit-sys/hidden_symbol_export.o + +$(LIBGIT_PARTIAL_EXPORT): $(LIBGIT_PUB_OBJS) + $(LD) -r $^ -o $@ + +$(LIBGIT_HIDDEN_EXPORT): $(LIBGIT_PARTIAL_EXPORT) + $(OBJCOPY) --localize-hidden $^ $@ + +contrib/libgit-sys/libgitpub.a: $(LIBGIT_HIDDEN_EXPORT) + $(AR) $(ARFLAGS) $@ $^ diff --git a/config.mak.uname b/config.mak.uname new file mode 100644 index 0000000..1691c6a --- /dev/null +++ b/config.mak.uname @@ -0,0 +1,794 @@ +# Platform specific Makefile tweaks based on uname detection + +# Define NO_SAFESEH if you need MSVC/Visual Studio to ignore the lack of +# Microsoft's Safe Exception Handling in libraries (such as zlib). +# Typically required for VS2013+/32-bit compilation on Vista+ versions. + +uname_S := $(shell sh -c 'uname -s 2>/dev/null || echo not') +uname_M := $(shell sh -c 'uname -m 2>/dev/null || echo not') +uname_O := $(shell sh -c 'uname -o 2>/dev/null || echo not') +uname_R := $(shell sh -c 'uname -r 2>/dev/null || echo not') +uname_V := $(shell sh -c 'uname -v 2>/dev/null || echo not') + +ifneq ($(findstring MINGW,$(uname_S)),) + uname_S := MINGW +endif + +ifdef MSVC + # avoid the MingW and Cygwin configuration sections + uname_S := Windows + uname_O := Windows + + # Generate and include makefile variables that point to the + # currently installed set of MSVC command line tools. +compat/vcbuild/MSVC-DEFS-GEN: compat/vcbuild/find_vs_env.bat + @"$<" | tr '\\' / >"$@" +include compat/vcbuild/MSVC-DEFS-GEN + + # See if vcpkg and the vcpkg-build versions of the third-party + # libraries that we use are installed. We include the result + # to get $(vcpkg_*) variables defined for the Makefile. +ifeq (,$(SKIP_VCPKG)) +compat/vcbuild/VCPKG-DEFS: compat/vcbuild/vcpkg_install.bat + @"$<" +include compat/vcbuild/VCPKG-DEFS +endif +endif + +# We choose to avoid "if .. else if .. else .. endif endif" +# because maintaining the nesting to match is a pain. If +# we had "elif" things would have been much nicer... + +ifeq ($(uname_S),OSF1) + # Need this for u_short definitions et al + BASIC_CFLAGS += -D_OSF_SOURCE + SOCKLEN_T = int + NO_STRTOULL = YesPlease + NO_NSEC = YesPlease +endif +ifeq ($(uname_S),Linux) + HAVE_ALLOCA_H = YesPlease + # override in config.mak if you have glibc >= 2.38 + NO_STRLCPY = YesPlease + CSPRNG_METHOD = getrandom + HAVE_PATHS_H = YesPlease + LIBC_CONTAINS_LIBINTL = YesPlease + HAVE_DEV_TTY = YesPlease + HAVE_CLOCK_GETTIME = YesPlease + HAVE_CLOCK_MONOTONIC = YesPlease + HAVE_SYNC_FILE_RANGE = YesPlease + HAVE_GETDELIM = YesPlease + FREAD_READS_DIRECTORIES = UnfortunatelyYes + HAVE_SYSINFO = YesPlease + PROCFS_EXECUTABLE_PATH = /proc/self/exe + HAVE_PLATFORM_PROCINFO = YesPlease + COMPAT_OBJS += compat/linux/procinfo.o + # centos7/rhel7 provides gcc 4.8.5 and zlib 1.2.7. + ifneq ($(findstring .el7.,$(uname_R)),) + BASIC_CFLAGS += -std=c99 + endif + LINK_FUZZ_PROGRAMS = YesPlease +endif +ifeq ($(uname_S),GNU/kFreeBSD) + HAVE_ALLOCA_H = YesPlease + NO_STRLCPY = YesPlease + HAVE_PATHS_H = YesPlease + DIR_HAS_BSD_GROUP_SEMANTICS = YesPlease + LIBC_CONTAINS_LIBINTL = YesPlease + FREAD_READS_DIRECTORIES = UnfortunatelyYes +endif +ifeq ($(uname_S),UnixWare) + CC = cc + NEEDS_SOCKET = YesPlease + NEEDS_NSL = YesPlease + NEEDS_SSL_WITH_CRYPTO = YesPlease + NEEDS_LIBICONV = YesPlease + SHELL_PATH = /usr/local/bin/bash + NO_IPV6 = YesPlease + NO_HSTRERROR = YesPlease + BASIC_CFLAGS += -Kthread + BASIC_CFLAGS += -I/usr/local/include + BASIC_LDFLAGS += -L/usr/local/lib + INSTALL = ginstall + TAR = gtar + NO_STRCASESTR = YesPlease + NO_MEMMEM = YesPlease +endif +ifeq ($(uname_S),SCO_SV) + ifeq ($(uname_R),3.2) + CFLAGS = -O2 + endif + ifeq ($(uname_R),5) + CC = cc + BASIC_CFLAGS += -Kthread + endif + NEEDS_SOCKET = YesPlease + NEEDS_NSL = YesPlease + NEEDS_SSL_WITH_CRYPTO = YesPlease + NEEDS_LIBICONV = YesPlease + SHELL_PATH = /usr/bin/bash + NO_IPV6 = YesPlease + NO_HSTRERROR = YesPlease + BASIC_CFLAGS += -I/usr/local/include + BASIC_LDFLAGS += -L/usr/local/lib + NO_STRCASESTR = YesPlease + NO_MEMMEM = YesPlease + INSTALL = ginstall + TAR = gtar +endif +ifeq ($(uname_S),Darwin) + NEEDS_CRYPTO_WITH_SSL = YesPlease + NEEDS_SSL_WITH_CRYPTO = YesPlease + NEEDS_LIBICONV = YesPlease + # Note: $(uname_R) gives us the underlying Darwin version. + # - MacOS 10.0.* and MacOS 10.1.0 = Darwin 1.* + # - MacOS 10.x.* = Darwin (x+4).* for (1 <= x) + # i.e. "begins with [15678] and a dot" means "10.4.* or older". + ifeq ($(shell expr "$(uname_R)" : '[15678]\.'),2) + OLD_ICONV = UnfortunatelyYes + NO_APPLE_COMMON_CRYPTO = YesPlease + endif + ifeq ($(shell expr "$(uname_R)" : '[15]\.'),2) + NO_STRLCPY = YesPlease + endif + ifeq ($(shell test "`expr "$(uname_R)" : '\([0-9][0-9]*\)\.'`" -ge 11 && echo 1),1) + HAVE_GETDELIM = YesPlease + endif + ifeq ($(shell test "`expr "$(uname_R)" : '\([0-9][0-9]*\)\.'`" -ge 20 && echo 1),1) + OPEN_RETURNS_EINTR = UnfortunatelyYes + endif + NO_MEMMEM = YesPlease + USE_ST_TIMESPEC = YesPlease + HAVE_DEV_TTY = YesPlease + COMPAT_OBJS += compat/precompose_utf8.o + BASIC_CFLAGS += -DPRECOMPOSE_UNICODE + BASIC_CFLAGS += -DPROTECT_HFS_DEFAULT=1 + HAVE_BSD_SYSCTL = YesPlease + FREAD_READS_DIRECTORIES = UnfortunatelyYes + HAVE_NS_GET_EXECUTABLE_PATH = YesPlease + CSPRNG_METHOD = arc4random + USE_ENHANCED_BASIC_REGULAR_EXPRESSIONS = YesPlease + + # Workaround for `gettext` being keg-only and not even being linked via + # `brew link --force gettext`, should be obsolete as of + # https://github.com/Homebrew/homebrew-core/pull/53489 + ifeq ($(shell test -d /usr/local/opt/gettext/ && echo y),y) + BASIC_CFLAGS += -I/usr/local/include -I/usr/local/opt/gettext/include + BASIC_LDFLAGS += -L/usr/local/lib -L/usr/local/opt/gettext/lib + ifeq ($(shell test -x /usr/local/opt/gettext/bin/msgfmt && echo y),y) + MSGFMT = /usr/local/opt/gettext/bin/msgfmt + endif + # On newer ARM-based machines the default installation path has changed to + # /opt/homebrew. Include it in our search paths so that the user does not + # have to configure this manually. + # + # Note that we do not employ the same workaround as above where we manually + # add gettext. The issue was fixed more than three years ago by now, and at + # that point there haven't been any ARM-based Macs yet. + else ifeq ($(shell test -d /opt/homebrew/ && echo y),y) + BASIC_CFLAGS += -I/opt/homebrew/include + BASIC_LDFLAGS += -L/opt/homebrew/lib + ifeq ($(shell test -x /opt/homebrew/bin/msgfmt && echo y),y) + MSGFMT = /opt/homebrew/bin/msgfmt + endif + endif + + # The builtin FSMonitor on MacOS builds upon Simple-IPC. Both require + # Unix domain sockets and PThreads. + ifndef NO_PTHREADS + ifndef NO_UNIX_SOCKETS + FSMONITOR_DAEMON_BACKEND = darwin + FSMONITOR_OS_SETTINGS = darwin + endif + endif + + BASIC_LDFLAGS += -framework CoreServices +endif +ifeq ($(uname_S),SunOS) + NEEDS_SOCKET = YesPlease + NEEDS_NSL = YesPlease + SHELL_PATH = /bin/bash + SANE_TOOL_PATH = /usr/xpg6/bin:/usr/xpg4/bin + HAVE_ALLOCA_H = YesPlease + NO_REGEX = YesPlease + NO_MSGFMT_EXTENDED_OPTIONS = YesPlease + HAVE_DEV_TTY = YesPlease + ifeq ($(uname_R),5.6) + SOCKLEN_T = int + NO_HSTRERROR = YesPlease + NO_IPV6 = YesPlease + NO_SOCKADDR_STORAGE = YesPlease + NO_UNSETENV = YesPlease + NO_MKDTEMP = YesPlease + NO_MEMMEM = YesPlease + NO_SETENV = YesPlease + NO_STRCASESTR = YesPlease + NO_STRLCPY = YesPlease + NO_STRTOUMAX = YesPlease + GIT_TEST_CMP = cmp + endif + ifeq ($(uname_R),5.7) + NEEDS_RESOLV = YesPlease + NO_IPV6 = YesPlease + NO_SOCKADDR_STORAGE = YesPlease + NO_UNSETENV = YesPlease + NO_MKDTEMP = YesPlease + NO_MEMMEM = YesPlease + NO_SETENV = YesPlease + NO_STRCASESTR = YesPlease + NO_STRLCPY = YesPlease + NO_STRTOUMAX = YesPlease + GIT_TEST_CMP = cmp + endif + ifeq ($(uname_R),5.8) + NO_UNSETENV = YesPlease + NO_MKDTEMP = YesPlease + NO_MEMMEM = YesPlease + NO_SETENV = YesPlease + NO_STRCASESTR = YesPlease + NO_STRTOUMAX = YesPlease + GIT_TEST_CMP = cmp + endif + ifeq ($(uname_R),5.9) + NO_UNSETENV = YesPlease + NO_MKDTEMP = YesPlease + NO_MEMMEM = YesPlease + NO_SETENV = YesPlease + NO_STRCASESTR = YesPlease + NO_STRTOUMAX = YesPlease + GIT_TEST_CMP = cmp + endif + ifeq ($(uname_R),5.10) + NO_UNSETENV = YesPlease + NO_MKDTEMP = YesPlease + NO_MEMMEM = YesPlease + NO_SETENV = YesPlease + NO_STRCASESTR = YesPlease + GIT_TEST_CMP = cmp + endif + ifeq ($(uname_R),5.11) + NO_UNSETENV = YesPlease + NO_SETENV = YesPlease + GIT_TEST_CMP = cmp + endif + INSTALL = /usr/ucb/install + TAR = gtar + BASIC_CFLAGS += -D__EXTENSIONS__ -D__sun__ +endif +ifeq ($(uname_O),Cygwin) + ifeq ($(shell expr "$(uname_R)" : '1\.[1-6]\.'),4) + NO_D_TYPE_IN_DIRENT = YesPlease + NO_STRCASESTR = YesPlease + NO_MEMMEM = YesPlease + NO_SYMLINK_HEAD = YesPlease + NO_IPV6 = YesPlease + OLD_ICONV = UnfortunatelyYes + # There are conflicting reports about this. + # On some boxes NO_MMAP is needed, and not so elsewhere. + # Try commenting this out if you suspect MMAP is more efficient + NO_MMAP = YesPlease + else + ifeq ($(shell expr "$(uname_R)" : '1\.7\.'),4) + NO_REGEX = UnfortunatelyYes + endif + endif + HAVE_DEV_TTY = YesPlease + HAVE_GETDELIM = YesPlease + HAVE_CLOCK_GETTIME = YesPlease + HAVE_CLOCK_MONOTONIC = YesPlease + HAVE_SYSINFO = YesPlease + CSPRNG_METHOD = arc4random + HAVE_ALLOCA_H = YesPlease + NEEDS_LIBICONV = YesPlease + NO_FAST_WORKING_DIRECTORY = UnfortunatelyYes + NO_ST_BLOCKS_IN_STRUCT_STAT = YesPlease + X = .exe + UNRELIABLE_FSTAT = UnfortunatelyYes + OBJECT_CREATION_USES_RENAMES = UnfortunatelyNeedsTo + MMAP_PREVENTS_DELETE = UnfortunatelyYes + COMPAT_OBJS += compat/win32/path-utils.o + FREAD_READS_DIRECTORIES = UnfortunatelyYes +endif +ifeq ($(uname_S),FreeBSD) + NEEDS_LIBICONV = YesPlease + # Versions up to 10.1 require OLD_ICONV; 10.2 and beyond don't. + # A typical version string looks like "10.2-RELEASE". + ifeq ($(shell expr "$(uname_R)" : '[1-9]\.'),2) + OLD_ICONV = YesPlease + endif + ifeq ($(firstword $(subst -, ,$(uname_R))),10.0) + OLD_ICONV = YesPlease + endif + ifeq ($(firstword $(subst -, ,$(uname_R))),10.1) + OLD_ICONV = YesPlease + endif + ifeq ($(shell v=$(uname_R) && test $${v%%.*} -lt 12 && echo 1),1) + NO_MEMMEM = UnfortunatelyYes + endif + BASIC_CFLAGS += -I/usr/local/include + BASIC_LDFLAGS += -L/usr/local/lib + DIR_HAS_BSD_GROUP_SEMANTICS = YesPlease + USE_ST_TIMESPEC = YesPlease + PYTHON_PATH = /usr/local/bin/python + PERL_PATH = /usr/local/bin/perl + HAVE_PATHS_H = YesPlease + HAVE_BSD_SYSCTL = YesPlease + HAVE_BSD_KERN_PROC_SYSCTL = YesPlease + CSPRNG_METHOD = arc4random + PAGER_ENV = LESS=FRX LV=-c MORE=FRX + FREAD_READS_DIRECTORIES = UnfortunatelyYes + FILENO_IS_A_MACRO = UnfortunatelyYes +endif +ifeq ($(uname_S),OpenBSD) + DIR_HAS_BSD_GROUP_SEMANTICS = YesPlease + USE_ST_TIMESPEC = YesPlease + NEEDS_LIBICONV = YesPlease + BASIC_CFLAGS += -I/usr/local/include + BASIC_LDFLAGS += -L/usr/local/lib + HAVE_PATHS_H = YesPlease + HAVE_BSD_SYSCTL = YesPlease + CSPRNG_METHOD = arc4random + FREAD_READS_DIRECTORIES = UnfortunatelyYes + FILENO_IS_A_MACRO = UnfortunatelyYes +endif +ifeq ($(uname_S),MirBSD) + NO_STRCASESTR = YesPlease + NO_MEMMEM = YesPlease + USE_ST_TIMESPEC = YesPlease + NEEDS_LIBICONV = YesPlease + HAVE_PATHS_H = YesPlease + HAVE_BSD_SYSCTL = YesPlease + CSPRNG_METHOD = arc4random +endif +ifeq ($(uname_S),NetBSD) + ifeq ($(shell expr "$(uname_R)" : '[01]\.'),2) + NEEDS_LIBICONV = YesPlease + endif + BASIC_CFLAGS += -I/usr/pkg/include + BASIC_LDFLAGS += -L/usr/pkg/lib $(CC_LD_DYNPATH)/usr/pkg/lib + USE_ST_TIMESPEC = YesPlease + HAVE_PATHS_H = YesPlease + HAVE_BSD_SYSCTL = YesPlease + HAVE_BSD_KERN_PROC_SYSCTL = YesPlease + CSPRNG_METHOD = arc4random + PROCFS_EXECUTABLE_PATH = /proc/curproc/exe +endif +ifeq ($(uname_S),AIX) + DEFAULT_PAGER = more + NO_STRCASESTR = YesPlease + NO_MEMMEM = YesPlease + NO_MKDTEMP = YesPlease + NO_STRLCPY = YesPlease + NO_NSEC = YesPlease + NO_REGEX = NeedsStartEnd + FREAD_READS_DIRECTORIES = UnfortunatelyYes + INTERNAL_QSORT = UnfortunatelyYes + NEEDS_LIBICONV = YesPlease + BASIC_CFLAGS += -D_LARGE_FILES + FILENO_IS_A_MACRO = UnfortunatelyYes + NEED_ACCESS_ROOT_HANDLER = UnfortunatelyYes + ifeq ($(shell expr "$(uname_V)" : '[1234]'),1) + NO_PTHREADS = YesPlease + else + PTHREAD_LIBS = -lpthread + endif + ifeq ($(shell expr "$(uname_V).$(uname_R)" : '5\.1'),3) + INLINE = '' + endif + GIT_TEST_CMP = cmp +endif +ifeq ($(uname_S),GNU) + # GNU/Hurd + HAVE_ALLOCA_H = YesPlease + NO_STRLCPY = YesPlease + HAVE_PATHS_H = YesPlease + LIBC_CONTAINS_LIBINTL = YesPlease + FREAD_READS_DIRECTORIES = UnfortunatelyYes +endif +ifeq ($(uname_S),IRIX) + NO_SETENV = YesPlease + NO_UNSETENV = YesPlease + NO_STRCASESTR = YesPlease + NO_MEMMEM = YesPlease + NO_MKDTEMP = YesPlease + # When compiled with the MIPSpro 7.4.4m compiler, and without pthreads + # (i.e. NO_PTHREADS is set), and _with_ MMAP (i.e. NO_MMAP is not set), + # git dies with a segmentation fault when trying to access the first + # entry of a reflog. The conservative choice is made to always set + # NO_MMAP. If you suspect that your compiler is not affected by this + # issue, comment out the NO_MMAP statement. + NO_MMAP = YesPlease + NO_REGEX = YesPlease + SNPRINTF_RETURNS_BOGUS = YesPlease + SHELL_PATH = /usr/gnu/bin/bash + NEEDS_LIBGEN = YesPlease +endif +ifeq ($(uname_S),IRIX64) + NO_SETENV = YesPlease + NO_UNSETENV = YesPlease + NO_STRCASESTR = YesPlease + NO_MEMMEM = YesPlease + NO_MKDTEMP = YesPlease + # When compiled with the MIPSpro 7.4.4m compiler, and without pthreads + # (i.e. NO_PTHREADS is set), and _with_ MMAP (i.e. NO_MMAP is not set), + # git dies with a segmentation fault when trying to access the first + # entry of a reflog. The conservative choice is made to always set + # NO_MMAP. If you suspect that your compiler is not affected by this + # issue, comment out the NO_MMAP statement. + NO_MMAP = YesPlease + NO_REGEX = YesPlease + SNPRINTF_RETURNS_BOGUS = YesPlease + SHELL_PATH = /usr/gnu/bin/bash + NEEDS_LIBGEN = YesPlease +endif +ifeq ($(uname_S),HP-UX) + INLINE = __inline + NO_IPV6 = YesPlease + NO_SETENV = YesPlease + NO_STRCASESTR = YesPlease + NO_MEMMEM = YesPlease + NO_STRLCPY = YesPlease + NO_MKDTEMP = YesPlease + NO_UNSETENV = YesPlease + NO_HSTRERROR = YesPlease + NO_SYS_SELECT_H = YesPlease + SNPRINTF_RETURNS_BOGUS = YesPlease + NO_NSEC = YesPlease + ifeq ($(uname_R),B.11.00) + NO_INET_NTOP = YesPlease + NO_INET_PTON = YesPlease + endif + ifeq ($(uname_R),B.10.20) + # Override HP-UX 11.x setting: + INLINE = + SOCKLEN_T = size_t + NO_PREAD = YesPlease + NO_INET_NTOP = YesPlease + NO_INET_PTON = YesPlease + endif + GIT_TEST_CMP = cmp +endif +ifeq ($(uname_S),Windows) + GIT_VERSION := $(GIT_VERSION).MSVC + pathsep = ; + # Assume that this is built in Git for Windows' SDK + ifeq (MINGW32,$(MSYSTEM)) + prefix = /mingw32 + else + ifeq (CLANGARM64,$(MSYSTEM)) + prefix = /clangarm64 + else + prefix = /mingw64 + endif + endif + # Prepend MSVC 64-bit tool-chain to PATH. + # + # A regular Git Bash *does not* have cl.exe in its $PATH. As there is a + # link.exe next to, and required by, cl.exe, we have to prepend this + # onto the existing $PATH. + # + SANE_TOOL_PATH ?= $(msvc_bin_dir_msys) + HAVE_ALLOCA_H = YesPlease + NO_PREAD = YesPlease + NEEDS_CRYPTO_WITH_SSL = YesPlease + NO_LIBGEN_H = YesPlease + NO_POLL = YesPlease + NO_SYMLINK_HEAD = YesPlease + NO_IPV6 = YesPlease + NO_SETENV = YesPlease + NO_STRCASESTR = YesPlease + NO_STRLCPY = YesPlease + NO_MEMMEM = YesPlease + NEEDS_LIBICONV = YesPlease + NO_STRTOUMAX = YesPlease + NO_MKDTEMP = YesPlease + NO_INTTYPES_H = YesPlease + CSPRNG_METHOD = rtlgenrandom + # VS2015 with UCRT claims that snprintf and friends are C99 compliant, + # so we don't need this: + # + # SNPRINTF_RETURNS_BOGUS = YesPlease + + # The builtin FSMonitor requires Named Pipes and Threads on Windows. + # These are always available, so we do not have to conditionally + # support it. + FSMONITOR_DAEMON_BACKEND = win32 + FSMONITOR_OS_SETTINGS = win32 + + NO_SVN_TESTS = YesPlease + RUNTIME_PREFIX = YesPlease + HAVE_WPGMPTR = YesWeDo + NO_ST_BLOCKS_IN_STRUCT_STAT = YesPlease + USE_WIN32_IPC = YesPlease + USE_WIN32_MMAP = YesPlease + MMAP_PREVENTS_DELETE = UnfortunatelyYes + # USE_NED_ALLOCATOR = YesPlease + UNRELIABLE_FSTAT = UnfortunatelyYes + OBJECT_CREATION_USES_RENAMES = UnfortunatelyNeedsTo + NO_REGEX = YesPlease + NO_GETTEXT = YesPlease + NO_PYTHON = YesPlease + ETAGS_TARGET = ETAGS + NO_POSIX_GOODIES = UnfortunatelyYes + NATIVE_CRLF = YesPlease + DEFAULT_HELP_FORMAT = html +ifeq (/mingw64,$(subst 32,64,$(subst clangarm,mingw,$(prefix)))) + # Move system config into top-level /etc/ + ETC_GITCONFIG = ../etc/gitconfig + ETC_GITATTRIBUTES = ../etc/gitattributes +endif + + CC = compat/vcbuild/scripts/clink.pl + AR = compat/vcbuild/scripts/lib.pl + CFLAGS = + BASIC_CFLAGS = -nologo -I. -Icompat/vcbuild/include -DWIN32 -D_CONSOLE -DHAVE_STRING_H -D_CRT_SECURE_NO_WARNINGS -D_CRT_NONSTDC_NO_DEPRECATE + COMPAT_OBJS = compat/msvc.o compat/winansi.o \ + compat/win32/flush.o \ + compat/win32/path-utils.o \ + compat/win32/pthread.o compat/win32/syslog.o \ + compat/win32/trace2_win32_process_info.o \ + compat/win32/dirent.o + COMPAT_CFLAGS = -D__USE_MINGW_ACCESS -DDETECT_MSYS_TTY -DNOGDI -DHAVE_STRING_H -Icompat -Icompat/regex -Icompat/win32 -DSTRIP_EXTENSION=\".exe\" + BASIC_LDFLAGS = -IGNORE:4217 -IGNORE:4049 -NOLOGO -ENTRY:wmainCRTStartup -SUBSYSTEM:CONSOLE + # invalidcontinue.obj allows Git's source code to close the same file + # handle twice, or to access the osfhandle of an already-closed stdout + # See https://msdn.microsoft.com/en-us/library/ms235330.aspx + EXTLIBS = user32.lib advapi32.lib shell32.lib wininet.lib ws2_32.lib invalidcontinue.obj kernel32.lib ntdll.lib + PTHREAD_LIBS = + lib = + BASIC_CFLAGS += $(vcpkg_inc) $(sdk_includes) $(msvc_includes) +ifndef DEBUG + BASIC_CFLAGS += $(vcpkg_rel_lib) +else + BASIC_CFLAGS += $(vcpkg_dbg_lib) +endif + BASIC_CFLAGS += $(sdk_libs) $(msvc_libs) + +ifneq ($(USE_MSVC_CRTDBG),) + # Optionally enable memory leak reporting. + BASIC_CFLAGS += -DUSE_MSVC_CRTDBG +endif + # Always give "-Zi" to the compiler and "-debug" to linker (even in + # release mode) to force a PDB to be generated (like RelWithDebInfo). + BASIC_CFLAGS += -Zi + BASIC_LDFLAGS += -debug -Zf + +ifdef NO_SAFESEH + LDFLAGS += -SAFESEH:NO +endif + +ifndef DEBUG + BASIC_CFLAGS += -GL -Gy -O2 -Oy- -MD -DNDEBUG + BASIC_LDFLAGS += -release -LTCG /OPT:REF /OPT:ICF /INCREMENTAL:NO /DEBUGTYPE:CV,FIXUP + AR += -LTCG +else + BASIC_CFLAGS += -MDd -DDEBUG -D_DEBUG +endif + X = .exe + + EXTRA_PROGRAMS += headless-git$X + +compat/msvc.o: compat/msvc.c compat/mingw.c GIT-CFLAGS +endif +ifeq ($(uname_S),Interix) + NO_INITGROUPS = YesPlease + NO_IPV6 = YesPlease + NO_MEMMEM = YesPlease + NO_MKDTEMP = YesPlease + NO_STRTOUMAX = YesPlease + NO_NSEC = YesPlease + ifeq ($(uname_R),3.5) + NO_INET_NTOP = YesPlease + NO_INET_PTON = YesPlease + NO_SOCKADDR_STORAGE = YesPlease + endif + ifeq ($(uname_R),5.2) + NO_INET_NTOP = YesPlease + NO_INET_PTON = YesPlease + NO_SOCKADDR_STORAGE = YesPlease + endif +endif +ifeq ($(uname_S),Minix) + NO_IPV6 = YesPlease + NO_ST_BLOCKS_IN_STRUCT_STAT = YesPlease + NO_NSEC = YesPlease + NEEDS_LIBGEN = + NEEDS_CRYPTO_WITH_SSL = YesPlease + NEEDS_RESOLV = + NO_HSTRERROR = YesPlease + NO_MMAP = YesPlease + NO_CURL = + NO_EXPAT = +endif +ifeq ($(uname_S),NONSTOP_KERNEL) + # Needs some C99 features, "inline" is just one of them. + # INLINE='' would just replace one set of warnings with another and + # still not compile in c89 mode, due to non-const array initializations. + CC = cc -c99 + # Build down-rev compatible objects that don't use our new getopt_long. + ifeq ($(uname_R).$(uname_V),J06.21) + CC += -WRVU=J06.20 + endif + ifeq ($(uname_R).$(uname_V),L17.02) + CC += -WRVU=L16.05 + endif + # Disable all optimization, seems to result in bad code, with -O or -O2 + # or even -O1 (default), /usr/local/libexec/git-core/git-pack-objects + # abends on "git push". Needs more investigation. + CFLAGS = -g -O0 -Winline + # We'd want it to be here. + prefix = /usr/local + # perl and python must be in /usr/bin on NonStop - supplied by HPE + # with operating system in that managed directory. + PERL_PATH = /usr/bin/perl + PYTHON_PATH = /usr/bin/python + # The current /usr/coreutils/rm at lowest support level does not work + # with the git test structure. Long paths as in + # 'trash directory...' cause rm to terminate prematurely without fully + # removing the directory at OS releases J06.21 and L17.02. + # Default to the older rm until those two releases are deprecated. + RM = /bin/rm -f + NEEDS_CRYPTO_WITH_SSL = YesPlease + HAVE_DEV_TTY = YesPlease + HAVE_LIBCHARSET_H = YesPlease + HAVE_STRINGS_H = YesPlease + NEEDS_LIBICONV = YesPlease + NEEDS_LIBINTL_BEFORE_LIBICONV = YesPlease + NO_SYS_SELECT_H = UnfortunatelyYes + NO_D_TYPE_IN_DIRENT = YesPlease + NO_GETTEXT = YesPlease + NO_HSTRERROR = YesPlease + NO_STRCASESTR = YesPlease + NO_MEMMEM = YesPlease + NO_STRLCPY = YesPlease + NO_SETENV = YesPlease + NO_UNSETENV = YesPlease + NO_MKDTEMP = YesPlease + # Currently libiconv-1.9.1. + OLD_ICONV = UnfortunatelyYes + NO_REGEX = NeedsStartEnd + NO_PTHREADS = UnfortunatelyYes + FREAD_READS_DIRECTORIES = UnfortunatelyYes + + # Not detected (nor checked for) by './configure'. + # We don't have SA_RESTART on NonStop, unfortunalety. + COMPAT_CFLAGS += -DSA_RESTART=0 + # Apparently needed in compat/fnmatch/fnmatch.c. + COMPAT_CFLAGS += -DHAVE_STRING_H=1 + NO_ST_BLOCKS_IN_STRUCT_STAT = YesPlease + NO_NSEC = YesPlease + NO_PREAD = YesPlease + NO_MMAP = YesPlease + NO_POLL = YesPlease + NO_INTPTR_T = UnfortunatelyYes + CSPRNG_METHOD = openssl + SANE_TOOL_PATH = /usr/coreutils/bin:/usr/local/bin + SHELL_PATH = /usr/coreutils/bin/bash +endif +ifeq ($(uname_S),OS/390) + NO_SYS_POLL_H = YesPlease + NO_STRCASESTR = YesPlease + NO_REGEX = YesPlease + NO_MMAP = YesPlease + NO_NSEC = YesPlease + NO_STRLCPY = YesPlease + NO_MEMMEM = YesPlease + NO_GECOS_IN_PWENT = YesPlease + HAVE_STRINGS_H = YesPlease + NEEDS_MODE_TRANSLATION = YesPlease + HAVE_ZOS_GET_EXECUTABLE_PATH = YesPlease +endif +ifeq ($(uname_S),MINGW) + ifeq ($(shell expr "$(uname_R)" : '1\.'),2) + $(error "Building with MSys is no longer supported") + endif + pathsep = ; + HAVE_ALLOCA_H = YesPlease + NO_PREAD = YesPlease + NEEDS_CRYPTO_WITH_SSL = YesPlease + NO_LIBGEN_H = YesPlease + NO_POLL = YesPlease + NO_SYMLINK_HEAD = YesPlease + NO_SETENV = YesPlease + NO_STRCASESTR = YesPlease + NO_STRLCPY = YesPlease + NO_MEMMEM = YesPlease + NEEDS_LIBICONV = YesPlease + NO_STRTOUMAX = YesPlease + NO_MKDTEMP = YesPlease + NO_SVN_TESTS = YesPlease + + # The builtin FSMonitor requires Named Pipes and Threads on Windows. + # These are always available, so we do not have to conditionally + # support it. + FSMONITOR_DAEMON_BACKEND = win32 + FSMONITOR_OS_SETTINGS = win32 + + RUNTIME_PREFIX = YesPlease + HAVE_WPGMPTR = YesWeDo + NO_ST_BLOCKS_IN_STRUCT_STAT = YesPlease + USE_WIN32_IPC = YesPlease + USE_WIN32_MMAP = YesPlease + MMAP_PREVENTS_DELETE = UnfortunatelyYes + UNRELIABLE_FSTAT = UnfortunatelyYes + OBJECT_CREATION_USES_RENAMES = UnfortunatelyNeedsTo + NO_REGEX = YesPlease + ETAGS_TARGET = ETAGS + NO_POSIX_GOODIES = UnfortunatelyYes + DEFAULT_HELP_FORMAT = html + HAVE_PLATFORM_PROCINFO = YesPlease + CSPRNG_METHOD = rtlgenrandom + BASIC_LDFLAGS += -municode + COMPAT_CFLAGS += -DNOGDI -Icompat -Icompat/win32 + COMPAT_CFLAGS += -DSTRIP_EXTENSION=\".exe\" + COMPAT_OBJS += compat/mingw.o compat/winansi.o \ + compat/win32/trace2_win32_process_info.o \ + compat/win32/flush.o \ + compat/win32/path-utils.o \ + compat/win32/pthread.o compat/win32/syslog.o \ + compat/win32/dirent.o + BASIC_CFLAGS += -DWIN32 + EXTLIBS += -lws2_32 + GITLIBS += git.res + PTHREAD_LIBS = + RC = windres -O coff + NATIVE_CRLF = YesPlease + X = .exe + # MSys2 + prefix = /usr/ + # Enable DEP + BASIC_LDFLAGS += -Wl,--nxcompat + # Enable ASLR (unless debugging) + ifneq (,$(findstring -O,$(filter-out -O0 -Og,$(CFLAGS)))) + BASIC_LDFLAGS += -Wl,--dynamicbase + endif + ifeq (MINGW32,$(MSYSTEM)) + prefix = /mingw32 + HOST_CPU = i686 + BASIC_LDFLAGS += -Wl,--pic-executable,-e,_mainCRTStartup + endif + ifeq (MINGW64,$(MSYSTEM)) + prefix = /mingw64 + HOST_CPU = x86_64 + BASIC_LDFLAGS += -Wl,--pic-executable,-e,mainCRTStartup + else ifeq (CLANGARM64,$(MSYSTEM)) + prefix = /clangarm64 + HOST_CPU = aarch64 + BASIC_LDFLAGS += -Wl,--pic-executable,-e,mainCRTStartup + else + COMPAT_CFLAGS += -D_USE_32BIT_TIME_T + BASIC_LDFLAGS += -Wl,--large-address-aware + endif + CC = gcc + COMPAT_CFLAGS += -D__USE_MINGW_ANSI_STDIO=0 -DDETECT_MSYS_TTY \ + -fstack-protector-strong + EXTLIBS += -lntdll + EXTRA_PROGRAMS += headless-git$X + INSTALL = /bin/install + INTERNAL_QSORT = YesPlease + HAVE_LIBCHARSET_H = YesPlease + USE_GETTEXT_SCHEME = fallthrough + USE_LIBPCRE = YesPlease + ifneq (CLANGARM64,$(MSYSTEM)) + USE_NED_ALLOCATOR = YesPlease + endif + ifeq (/mingw64,$(subst 32,64,$(subst clangarm,mingw,$(prefix)))) + # Move system config into top-level /etc/ + ETC_GITCONFIG = ../etc/gitconfig + ETC_GITATTRIBUTES = ../etc/gitattributes + endif +endif +ifeq ($(uname_S),QNX) + COMPAT_CFLAGS += -DSA_RESTART=0 + EXPAT_NEEDS_XMLPARSE_H = YesPlease + HAVE_STRINGS_H = YesPlease + NEEDS_SOCKET = YesPlease + NO_GETPAGESIZE = YesPlease + NO_ICONV = YesPlease + NO_MEMMEM = YesPlease + NO_MKDTEMP = YesPlease + NO_NSEC = YesPlease + NO_PTHREADS = YesPlease + NO_STRCASESTR = YesPlease + NO_STRLCPY = YesPlease +endif diff --git a/gitweb/GITWEB-BUILD-OPTIONS.in b/gitweb/GITWEB-BUILD-OPTIONS.in new file mode 100644 index 0000000..41ac206 --- /dev/null +++ b/gitweb/GITWEB-BUILD-OPTIONS.in @@ -0,0 +1,24 @@ +PERL_PATH=@PERL_PATH@ +JSMIN=@JSMIN@ +CSSMIN=@CSSMIN@ +GIT_BINDIR=@GIT_BINDIR@ +GITWEB_CONFIG=@GITWEB_CONFIG@ +GITWEB_CONFIG_SYSTEM=@GITWEB_CONFIG_SYSTEM@ +GITWEB_CONFIG_COMMON=@GITWEB_CONFIG_COMMON@ +GITWEB_HOME_LINK_STR=@GITWEB_HOME_LINK_STR@ +GITWEB_SITENAME=@GITWEB_SITENAME@ +GITWEB_PROJECTROOT=@GITWEB_PROJECTROOT@ +GITWEB_PROJECT_MAXDEPTH=@GITWEB_PROJECT_MAXDEPTH@ +GITWEB_EXPORT_OK=@GITWEB_EXPORT_OK@ +GITWEB_STRICT_EXPORT=@GITWEB_STRICT_EXPORT@ +GITWEB_BASE_URL=@GITWEB_BASE_URL@ +GITWEB_LIST=@GITWEB_LIST@ +GITWEB_HOMETEXT=@GITWEB_HOMETEXT@ +GITWEB_CSS=@GITWEB_CSS@ +GITWEB_LOGO=@GITWEB_LOGO@ +GITWEB_FAVICON=@GITWEB_FAVICON@ +GITWEB_JS=@GITWEB_JS@ +GITWEB_SITE_HTML_HEAD_STRING=@GITWEB_SITE_HTML_HEAD_STRING@ +GITWEB_SITE_HEADER=@GITWEB_SITE_HEADER@ +GITWEB_SITE_FOOTER=@GITWEB_SITE_FOOTER@ +HIGHLIGHT_BIN=@HIGHLIGHT_BIN@ diff --git a/gitweb/INSTALL b/gitweb/INSTALL new file mode 100644 index 0000000..5bfa496 --- /dev/null +++ b/gitweb/INSTALL @@ -0,0 +1,328 @@ +GIT web Interface (gitweb) Installation +======================================= + +First you have to generate gitweb.cgi from gitweb.perl using +"make gitweb", then "make install-gitweb" appropriate files +(gitweb.cgi, gitweb.js, gitweb.css, git-logo.png and git-favicon.png) +to their destination. For example if git was (or is) installed with +/usr prefix and gitwebdir is /var/www/cgi-bin, you can do + + $ make prefix=/usr gitweb ;# as yourself + # make gitwebdir=/var/www/cgi-bin install-gitweb ;# as root + +Alternatively you can use autoconf generated ./configure script to +set up path to git binaries (via config.mak.autogen), so you can write +instead + + $ make configure ;# as yourself + $ ./configure --prefix=/usr ;# as yourself + $ make gitweb ;# as yourself + # make gitwebdir=/var/www/cgi-bin \ + install-gitweb ;# as root + +The above example assumes that your web server is configured to run +[executable] files in /var/www/cgi-bin/ as server scripts (as CGI +scripts). + + +Requirements +------------ + + - Core git tools + - Perl 5.8.1 + - Perl modules: CGI, Encode, Fcntl, File::Find, File::Basename. + - web server + +The following optional Perl modules are required for extra features + - CGI::Fast and FCGI - for running gitweb as FastCGI script + - HTML::TagCloud - for fancy tag cloud in project list view + - HTTP::Date or Time::ParseDate - to support If-Modified-Since for feeds + + +Build time configuration +------------------------ + +See also "How to configure gitweb for your local system" section below. + +- There are many configuration variables which affect building of + gitweb.cgi; see "default configuration for gitweb" section in main + (top dir) Makefile, and instructions for building gitweb target. + + One of the most important is where to find the git wrapper binary. Gitweb + tries to find the git wrapper at $(bindir)/git, so you have to set $bindir + when building gitweb.cgi, or $prefix from which $bindir is derived. If + you build and install gitweb together with the rest of the git suite, + there should be no problems. Otherwise, if git was for example + installed from a binary package, you have to set $prefix (or $bindir) + accordingly. + +- Another important issue is where are git repositories you want to make + available to gitweb. By default gitweb searches for repositories under + /pub/git; if you want to have projects somewhere else, like /home/git, + use GITWEB_PROJECTROOT build configuration variable. + + By default all git repositories under projectroot are visible and + available to gitweb. The list of projects is generated by default by + scanning the projectroot directory for git repositories. This can be + changed (configured) as described in "Gitweb repositories" section + below. + + Note that gitweb deals directly with the object database, and does not + need a working directory; the name of the project is the name of its + repository object database, usually projectname.git for bare + repositories. If you want to provide gitweb access to non-bare (live) + repositories, you can make projectname.git a symbolic link under + projectroot linking to projectname/.git (but it is just + a suggestion). + +- You can control where gitweb tries to find its main CSS style file, + its JavaScript file, its favicon and logo with the GITWEB_CSS, GITWEB_JS + GITWEB_FAVICON and GITWEB_LOGO build configuration variables. By default + gitweb tries to find them in the same directory as gitweb.cgi script. + +- You can optionally generate minified versions of gitweb.js and gitweb.css + by defining the JSMIN and CSSMIN build configuration variables. By default + the non-minified versions will be used. NOTE: if you enable this option, + substitute gitweb.min.js and gitweb.min.css for all uses of gitweb.js and + gitweb.css in the help files. + + +How to configure gitweb for your local system +--------------------------------------------- + +You can specify the following configuration variables when building GIT: + + * GIT_BINDIR + Points where to find the git executable. You should set it up to + the place where the git binary was installed (usually /usr/bin) if you + don't install git from sources together with gitweb. [Default: $(bindir)] + * GITWEB_SITENAME + Shown in the title of all generated pages, defaults to the server name + (SERVER_NAME CGI environment variable) if not set. [No default] + * GITWEB_PROJECTROOT + The root directory for all projects shown by gitweb. Must be set + correctly for gitweb to find repositories to display. See also + "Gitweb repositories" in the INSTALL file for gitweb. [Default: /pub/git] + * GITWEB_PROJECT_MAXDEPTH + The filesystem traversing limit for getting the project list; the number + is taken as depth relative to the projectroot. It is used when + GITWEB_LIST is a directory (or is not set; then project root is used). + This is meant to speed up project listing on large work trees by limiting + search depth. [Default: 2007] + * GITWEB_LIST + Points to a directory to scan for projects (defaults to project root + if not set / if empty) or to a file with explicit listing of projects + (together with projects' ownership). See "Generating projects list + using gitweb" in INSTALL file for gitweb to find out how to generate + such file from scan of a directory. [No default, which means use root + directory for projects] + * GITWEB_EXPORT_OK + Show repository only if this file exists (in repository). Only + effective if this variable evaluates to true. [No default / Not set] + * GITWEB_STRICT_EXPORT + Only allow viewing of repositories also shown on the overview page. + This for example makes GITWEB_EXPORT_OK to decide if repository is + available and not only if it is shown. If GITWEB_LIST points to + file with list of project, only those repositories listed would be + available for gitweb. [No default] + * GITWEB_HOMETEXT + Points to an .html file which is included on the gitweb project + overview page ('projects_list' view), if it exists. Relative to + gitweb.cgi script. [Default: indextext.html] + * GITWEB_SITE_HTML_HEAD_STRING + html snippet to include in the section of each page. [No default] + * GITWEB_SITE_HEADER + Filename of html text to include at top of each page. Relative to + gitweb.cgi script. [No default] + * GITWEB_SITE_FOOTER + Filename of html text to include at bottom of each page. Relative to + gitweb.cgi script. [No default] + * GITWEB_HOME_LINK_STR + String of the home link on top of all pages, leading to $home_link + (usually main gitweb page, which means projects list). Used as first + part of gitweb view "breadcrumb trail": / / . + [Default: projects] + * GITWEB_SITENAME + Name of your site or organization to appear in page titles. Set it + to something descriptive for clearer bookmarks etc. If not set + (if empty) gitweb uses "$SERVER_NAME Git", or "Untitled Git" if + SERVER_NAME CGI environment variable is not set (e.g. if running + gitweb as standalone script). [No default] + * GITWEB_BASE_URL + Git base URLs used for URL to where fetch project from, i.e. full + URL is "$git_base_url/$project". Shown on projects summary page. + Repository URL for project can be also configured per repository; this + takes precedence over URLs composed from base URL and a project name. + Note that you can setup multiple base URLs (for example one for + git:// protocol access, another for http:// access) from the gitweb + config file. [No default] + * GITWEB_CSS + Points to the location where you put gitweb.css on your web server + (or to be more generic, the URI of gitweb stylesheet). Relative to the + base URI of gitweb. Note that you can setup multiple stylesheets from + the gitweb config file. [Default: static/gitweb.css (or + static/gitweb.min.css if the CSSMIN variable is defined / CSS minifier + is used)] + * GITWEB_JS + Points to the location where you put gitweb.js on your web server + (or to be more generic URI of JavaScript code used by gitweb). + Relative to base URI of gitweb. [Default: static/gitweb.js (or + static/gitweb.min.js if JSMIN build variable is defined / JavaScript + minifier is used)] + * CSSMIN, JSMIN + Invocation of a CSS minifier or a JavaScript minifier, respectively, + working as a filter (source on standard input, minified result on + standard output). If set, it is used to generate a minified version of + 'static/gitweb.css' or 'static/gitweb.js', respectively. *Note* that + minified files would have *.min.css and *.min.js extension, which is + important if you also set GITWEB_CSS and/or GITWEB_JS. [No default] + * GITWEB_LOGO + Points to the location where you put git-logo.png on your web server + (or to be more generic URI of logo, 72x27 size, displayed in top right + corner of each gitweb page, and used as logo for Atom feed). Relative + to base URI of gitweb. [Default: static/git-logo.png] + * GITWEB_FAVICON + Points to the location where you put git-favicon.png on your web server + (or to be more generic URI of favicon, assumed to be image/png type; + web browsers that support favicons (website icons) may display them + in the browser's URL bar and next to site name in bookmarks). Relative + to base URI of gitweb. [Default: static/git-favicon.png] + * GITWEB_CONFIG + This Perl file will be loaded using 'do' and can be used to override any + of the options above as well as some other options -- see the "Runtime + gitweb configuration" section below, and top of 'gitweb.cgi' for their + full list and description. If the environment variable GITWEB_CONFIG + is set when gitweb.cgi is executed, then the file specified in the + environment variable will be loaded instead of the file specified + when gitweb.cgi was created. [Default: gitweb_config.perl] + * GITWEB_CONFIG_SYSTEM + This Perl file will be loaded using 'do' as a fallback if GITWEB_CONFIG + does not exist. If the environment variable GITWEB_CONFIG_SYSTEM is set + when gitweb.cgi is executed, then the file specified in the environment + variable will be loaded instead of the file specified when gitweb.cgi was + created. [Default: /etc/gitweb.conf] + * HIGHLIGHT_BIN + Path to the highlight executable to use (must be the one from + http://andre-simon.de/zip/download.php due to assumptions about parameters and output). + Useful if highlight is not installed on your webserver's PATH. + [Default: highlight] + +Build example +~~~~~~~~~~~~~ + +- To install gitweb to /var/www/cgi-bin/gitweb/, when git wrapper + is installed at /usr/local/bin/git, the repositories (projects) + we want to display are under /home/local/scm, and you do not use + minifiers, you can do + + make GITWEB_PROJECTROOT="/home/local/scm" \ + GITWEB_JS="gitweb/static/gitweb.js" \ + GITWEB_CSS="gitweb/static/gitweb.css" \ + GITWEB_LOGO="gitweb/static/git-logo.png" \ + GITWEB_FAVICON="gitweb/static/git-favicon.png" \ + bindir=/usr/local/bin \ + gitweb + + make gitwebdir=/var/www/cgi-bin/gitweb install-gitweb + + +Gitweb config file +------------------ + +See also "Runtime gitweb configuration" section in README file +for gitweb (in gitweb/README), and gitweb.conf(5) manpage. + +- You can configure gitweb further using the per-instance gitweb configuration file; + by default this is a file named gitweb_config.perl in the same place as + gitweb.cgi script. You can control the default place for the config file + using the GITWEB_CONFIG build configuration variable, and you can set it + using the GITWEB_CONFIG environment variable. If this file does not + exist, gitweb looks for a system-wide configuration file, normally + /etc/gitweb.conf. You can change the default using the + GITWEB_CONFIG_SYSTEM build configuration variable, and override it + through the GITWEB_CONFIG_SYSTEM environment variable. + + Note that the GITWEB_CONFIG_SYSTEM system-wide configuration file is + only used for instances that lack per-instance configuration file. + You can use GITWEB_CONFIG_COMMON common system-wide configuration + file (normally /etc/gitweb-common.conf) to keep common default + settings that apply to all instances. Settings from per-instance or + system-wide configuration file override those from common system-wide + configuration file. + +- The gitweb config file is a fragment of perl code. You can set variables + using "our $variable = value"; text from "#" character until the end + of a line is ignored. See perlsyn(1) for details. + + See the top of gitweb.perl file for examples of customizable options. + +Config file example +~~~~~~~~~~~~~~~~~~~ + +To enable blame, pickaxe search, and snapshot support, while allowing +individual projects to turn them off, put the following in your +GITWEB_CONFIG file: + + $feature{'blame'}{'default'} = [1]; + $feature{'blame'}{'override'} = 1; + + $feature{'pickaxe'}{'default'} = [1]; + $feature{'pickaxe'}{'override'} = 1; + + $feature{'snapshot'}{'default'} = ['zip', 'tgz']; + $feature{'snapshot'}{'override'} = 1; + +If you allow overriding for the snapshot feature, you can specify which +snapshot formats are globally disabled. You can also add any command line +options you want (such as setting the compression level). For instance, +you can disable Zip compressed snapshots and set GZip to run at level 6 by +adding the following lines to your $GITWEB_CONFIG: + + $known_snapshot_formats{'zip'}{'disabled'} = 1; + $known_snapshot_formats{'tgz'}{'compressor'} = ['gzip','-6']; + + +Gitweb repositories +------------------- + +By default gitweb shows all git repositories under single common repository +root on a local filesystem; see description of GITWEB_PROJECTROOT build-time +configuration variable above (and also of GITWEB_LIST). + +More advanced usage, like limiting access or visibility of repositories and +managing multiple roots are described on gitweb manpage. + + +Example web server configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +See also "Webserver configuration" and "Advanced web server setup" sections +in gitweb(1) manpage. + + +- Apache2, gitweb installed as CGI script, + under /var/www/cgi-bin/ + + ScriptAlias /cgi-bin/ "/var/www/cgi-bin/" + + + Options Indexes FollowSymlinks ExecCGI + AllowOverride None + Order allow,deny + Allow from all + + +- Apache2, gitweb installed as mod_perl legacy script, + under /var/www/perl/ + + Alias /perl "/var/www/perl" + + + SetHandler perl-script + PerlResponseHandler ModPerl::Registry + PerlOptions +ParseHeaders + Options Indexes FollowSymlinks +ExecCGI + AllowOverride None + Order allow,deny + Allow from all + diff --git a/gitweb/Makefile b/gitweb/Makefile new file mode 100644 index 0000000..26a683d --- /dev/null +++ b/gitweb/Makefile @@ -0,0 +1,146 @@ +ifndef MAK_DIR_GITWEB +$(error do not run gitweb/Makefile stand-alone anymore. The "gitweb" and \ +"install-gitweb" targets now live in the top-level Makefile) +endif + +# Define JSMIN to point to JavaScript minifier that functions as +# a filter to have static/gitweb.js minified. +# +# Define CSSMIN to point to a CSS minifier in order to generate a minified +# version of static/gitweb.css +# + +# default configuration for gitweb +GITWEB_CONFIG = gitweb_config.perl +GITWEB_CONFIG_SYSTEM = /etc/gitweb.conf +GITWEB_CONFIG_COMMON = /etc/gitweb-common.conf +GITWEB_HOME_LINK_STR = projects +GITWEB_SITENAME = +GITWEB_PROJECTROOT = /pub/git +GITWEB_PROJECT_MAXDEPTH = 2007 +GITWEB_EXPORT_OK = +GITWEB_STRICT_EXPORT = +GITWEB_BASE_URL = +GITWEB_LIST = +GITWEB_HOMETEXT = indextext.html +GITWEB_CSS_IN = static/gitweb.css +GITWEB_CSS = $(GITWEB_CSS_IN) +GITWEB_LOGO = static/git-logo.png +GITWEB_FAVICON = static/git-favicon.png +GITWEB_JS_IN = static/gitweb.js +GITWEB_JS = $(GITWEB_JS_IN) +GITWEB_SITE_HTML_HEAD_STRING = +GITWEB_SITE_HEADER = +GITWEB_SITE_FOOTER = +HIGHLIGHT_BIN = highlight + +# What targets we'll add to 'all' for "make gitweb" +GITWEB_ALL = +GITWEB_ALL += gitweb.cgi +GITWEB_ALL += $(GITWEB_JS) + +MAK_DIR_GITWEB_ALL = $(addprefix $(MAK_DIR_GITWEB),$(GITWEB_ALL)) + +GITWEB_PROGRAMS = gitweb.cgi + +GITWEB_JS_MIN = static/gitweb.min.js +ifdef JSMIN +GITWEB_JS = $(GITWEB_JS_MIN) +GITWEB_ALL += $(MAK_DIR_GITWEB)$(GITWEB_JS_MIN) +$(MAK_DIR_GITWEB)$(GITWEB_JS_MIN): $(MAK_DIR_GITWEB)GITWEB-BUILD-OPTIONS +$(MAK_DIR_GITWEB)$(GITWEB_JS_MIN): $(MAK_DIR_GITWEB)$(GITWEB_JS_IN) + $(QUIET_GEN)$(JSMIN) <$< >$@ +endif +GITWEB_FILES += $(GITWEB_JS) + +GITWEB_CSS_MIN = static/gitweb.min.css +ifdef CSSMIN +GITWEB_CSS = $(GITWEB_CSS_MIN) +GITWEB_ALL += $(MAK_DIR_GITWEB)$(GITWEB_CSS_MIN) +$(MAK_DIR_GITWEB)$(GITWEB_CSS_MIN): $(MAK_DIR_GITWEB)GITWEB-BUILD-OPTIONS +$(MAK_DIR_GITWEB)$(GITWEB_CSS_MIN): $(MAK_DIR_GITWEB)$(GITWEB_CSS_IN) + $(QUIET_GEN)$(CSSMIN) <$< >$@ +endif +GITWEB_FILES += $(GITWEB_CSS) + +GITWEB_FILES += static/git-logo.png static/git-favicon.png + +# JavaScript files that are composed (concatenated) to form gitweb.js +# +# js/lib/common-lib.js should be always first, then js/lib/*.js, +# then the rest of files; js/gitweb.js should be last (if it exists) +GITWEB_JSLIB_FILES = +GITWEB_JSLIB_FILES += static/js/lib/common-lib.js +GITWEB_JSLIB_FILES += static/js/lib/datetime.js +GITWEB_JSLIB_FILES += static/js/lib/cookies.js +GITWEB_JSLIB_FILES += static/js/javascript-detection.js +GITWEB_JSLIB_FILES += static/js/adjust-timezone.js +GITWEB_JSLIB_FILES += static/js/blame_incremental.js + +.PHONY: FORCE +$(MAK_DIR_GITWEB)GITWEB-BUILD-OPTIONS: FORCE + @sed -e 's|@PERL_PATH@|$(PERL_PATH_SQ)|' \ + -e 's|@JSMIN@|$(JSMIN)|' \ + -e 's|@CSSMIN@|$(CSSMIN)|' \ + -e 's|@GIT_VERSION@|$(GIT_VERSION)|' \ + -e 's|@GIT_BINDIR@|$(bindir)|' \ + -e 's|@GITWEB_CONFIG@|$(GITWEB_CONFIG)|' \ + -e 's|@GITWEB_CONFIG_SYSTEM@|$(GITWEB_CONFIG_SYSTEM)|' \ + -e 's|@GITWEB_CONFIG_COMMON@|$(GITWEB_CONFIG_COMMON)|' \ + -e 's|@GITWEB_HOME_LINK_STR@|$(GITWEB_HOME_LINK_STR)|' \ + -e 's|@GITWEB_SITENAME@|$(GITWEB_SITENAME)|' \ + -e 's|@GITWEB_PROJECTROOT@|$(GITWEB_PROJECTROOT)|' \ + -e 's|@GITWEB_PROJECT_MAXDEPTH@|$(GITWEB_PROJECT_MAXDEPTH)|' \ + -e 's|@GITWEB_EXPORT_OK@|$(GITWEB_EXPORT_OK)|' \ + -e 's|@GITWEB_STRICT_EXPORT@|$(GITWEB_STRICT_EXPORT)|' \ + -e 's|@GITWEB_BASE_URL@|$(GITWEB_BASE_URL)|' \ + -e 's|@GITWEB_LIST@|$(GITWEB_LIST)|' \ + -e 's|@GITWEB_HOMETEXT@|$(GITWEB_HOMETEXT)|' \ + -e 's|@GITWEB_CSS@|$(GITWEB_CSS)|' \ + -e 's|@GITWEB_LOGO@|$(GITWEB_LOGO)|' \ + -e 's|@GITWEB_FAVICON@|$(GITWEB_FAVICON)|' \ + -e 's|@GITWEB_JS@|$(GITWEB_JS)|' \ + -e 's|@GITWEB_SITE_HTML_HEAD_STRING@|$(GITWEB_SITE_HTML_HEAD_STRING)|' \ + -e 's|@GITWEB_SITE_HEADER@|$(GITWEB_SITE_HEADER)|' \ + -e 's|@GITWEB_SITE_FOOTER@|$(GITWEB_SITE_FOOTER)|' \ + -e 's|@HIGHLIGHT_BIN@|$(HIGHLIGHT_BIN)|' \ + $(MAK_DIR_GITWEB)GITWEB-BUILD-OPTIONS.in >"$@+" + @cmp -s $@+ $@ && rm -f $@+ || mv -f $@+ $@ + +$(MAK_DIR_GITWEB)gitweb.cgi: $(MAK_DIR_GITWEB)generate-gitweb-cgi.sh +$(MAK_DIR_GITWEB)gitweb.cgi: $(MAK_DIR_GITWEB)GITWEB-BUILD-OPTIONS +$(MAK_DIR_GITWEB)gitweb.cgi: GIT-VERSION-FILE +$(MAK_DIR_GITWEB)gitweb.cgi: $(MAK_DIR_GITWEB)gitweb.perl + $(QUIET_GEN)$(RM) $@ $@+ && \ + $(MAK_DIR_GITWEB)generate-gitweb-cgi.sh $(MAK_DIR_GITWEB)/GITWEB-BUILD-OPTIONS ./GIT-VERSION-FILE $< $@+ && \ + mv $@+ $@ + +$(MAK_DIR_GITWEB)static/gitweb.js: $(MAK_DIR_GITWEB)generate-gitweb-js.sh +$(MAK_DIR_GITWEB)static/gitweb.js: $(addprefix $(MAK_DIR_GITWEB),$(GITWEB_JSLIB_FILES)) + $(QUIET_GEN)$(RM) $@ $@+ && \ + $(MAK_DIR_GITWEB)generate-gitweb-js.sh $@+ $(filter %.js,$^) && \ + mv $@+ $@ + +### Installation rules + +.PHONY: install-gitweb +install-gitweb: $(MAK_DIR_GITWEB_ALL) + $(INSTALL) -d -m 755 '$(DESTDIR_SQ)$(gitwebdir_SQ)' + $(INSTALL) -m 755 $(addprefix $(MAK_DIR_GITWEB),$(GITWEB_PROGRAMS)) '$(DESTDIR_SQ)$(gitwebdir_SQ)' + $(INSTALL) -d -m 755 '$(DESTDIR_SQ)$(gitwebstaticdir_SQ)' + $(INSTALL) -m 644 $(addprefix $(MAK_DIR_GITWEB),$(GITWEB_FILES)) \ + '$(DESTDIR_SQ)$(gitwebstaticdir_SQ)' +ifndef NO_GITWEB +ifndef NO_PERL +install: install-gitweb +endif +endif + +### Cleaning rules + +.PHONY: gitweb-clean +gitweb-clean: + $(RM) $(addprefix $(MAK_DIR_GITWEB),gitweb.cgi $(GITWEB_JS_IN) \ + $(GITWEB_JS_MIN) $(GITWEB_CSS_MIN) \ + GITWEB-BUILD-OPTIONS) +clean: gitweb-clean diff --git a/gitweb/README b/gitweb/README new file mode 100644 index 0000000..471dcfb --- /dev/null +++ b/gitweb/README @@ -0,0 +1,70 @@ +GIT web Interface +================= + +From the git version 1.4.0 gitweb is bundled with git. + + +Build time gitweb configuration +------------------------------- +There are many configuration variables which affect building gitweb (among +others creating gitweb.cgi out of gitweb.perl by replacing placeholders such +as `++GIT_BINDIR++` by their build-time values). + +Building and installing gitweb is described in gitweb's INSTALL file +(in 'gitweb/INSTALL'). + + +Runtime gitweb configuration +---------------------------- +Gitweb obtains configuration data from the following sources in the +following order: + +1. built-in values (some set during build stage), +2. common system-wide configuration file (`GITWEB_CONFIG_COMMON`, + defaults to '/etc/gitweb-common.conf'), +3. either per-instance configuration file (`GITWEB_CONFIG`, defaults to + 'gitweb_config.perl' in the same directory as the installed gitweb), + or if it does not exists then system-wide configuration file + (`GITWEB_CONFIG_SYSTEM`, defaults to '/etc/gitweb.conf'). + +Values obtained in later configuration files override values obtained earlier +in above sequence. + +You can read defaults in system-wide GITWEB_CONFIG_SYSTEM from GITWEB_CONFIG +by adding + + read_config_file($GITWEB_CONFIG_SYSTEM); + +at very beginning of per-instance GITWEB_CONFIG file. In this case +settings in said per-instance file will override settings from +system-wide configuration file. Note that read_config_file checks +itself that the $GITWEB_CONFIG_SYSTEM file exists. + +The most notable thing that is not configurable at compile time are the +optional features, stored in the '%features' variable. + +Ultimate description on how to reconfigure the default features setting +in your `GITWEB_CONFIG` or per-project in `project.git/config` can be found +as comments inside 'gitweb.cgi'. + +See also gitweb.conf(5) manpage. + + +Web server configuration +------------------------ +Gitweb can be run as CGI script, as legacy mod_perl application (using +ModPerl::Registry), and as FastCGI script. You can find some simple examples +in "Example web server configuration" section in INSTALL file for gitweb (in +gitweb/INSTALL). + +See "Webserver configuration" and "Advanced web server setup" sections in +gitweb(1) manpage. + + +AUTHORS +------- +Originally written by: + Kay Sievers + +Any comment/question/concern to: + Git mailing list diff --git a/gitweb/generate-gitweb-cgi.sh b/gitweb/generate-gitweb-cgi.sh new file mode 100755 index 0000000..ede9038 --- /dev/null +++ b/gitweb/generate-gitweb-cgi.sh @@ -0,0 +1,47 @@ +#!/bin/sh + +set -e + +if test $# -ne 4 +then + echo >&2 "USAGE: $0 " + exit 1 +fi + +GITWEB_BUILD_OPTIONS="$1" +GIT_VERSION_FILE="$2" +INPUT="$3" +OUTPUT="$4" + +. "$GITWEB_BUILD_OPTIONS" +. "$GIT_VERSION_FILE" + +sed -e "1s|#!/usr/bin/perl|#!$PERL_PATH|" \ + -e "s|@PERL_PATH@|$PERL_PATH|" \ + -e "s|@JSMIN@|$JSMIN|" \ + -e "s|@CSSMIN@|$CSSMIN|" \ + -e "s|@GIT_VERSION@|$GIT_VERSION|" \ + -e "s|@GIT_BINDIR@|$GIT_BINDIR|" \ + -e "s|@GITWEB_CONFIG@|$GITWEB_CONFIG|" \ + -e "s|@GITWEB_CONFIG_SYSTEM@|$GITWEB_CONFIG_SYSTEM|" \ + -e "s|@GITWEB_CONFIG_COMMON@|$GITWEB_CONFIG_COMMON|" \ + -e "s|@GITWEB_HOME_LINK_STR@|$GITWEB_HOME_LINK_STR|" \ + -e "s|@GITWEB_SITENAME@|$GITWEB_SITENAME|" \ + -e "s|@GITWEB_PROJECTROOT@|$GITWEB_PROJECTROOT|" \ + -e "s|@GITWEB_PROJECT_MAXDEPTH@|$GITWEB_PROJECT_MAXDEPTH|" \ + -e "s|@GITWEB_EXPORT_OK@|$GITWEB_EXPORT_OK|" \ + -e "s|@GITWEB_STRICT_EXPORT@|$GITWEB_STRICT_EXPORT|" \ + -e "s|@GITWEB_BASE_URL@|$GITWEB_BASE_URL|" \ + -e "s|@GITWEB_LIST@|$GITWEB_LIST|" \ + -e "s|@GITWEB_HOMETEXT@|$GITWEB_HOMETEXT|" \ + -e "s|@GITWEB_CSS@|$GITWEB_CSS|" \ + -e "s|@GITWEB_LOGO@|$GITWEB_LOGO|" \ + -e "s|@GITWEB_FAVICON@|$GITWEB_FAVICON|" \ + -e "s|@GITWEB_JS@|$GITWEB_JS|" \ + -e "s|@GITWEB_SITE_HTML_HEAD_STRING@|$GITWEB_SITE_HTML_HEAD_STRING|" \ + -e "s|@GITWEB_SITE_HEADER@|$GITWEB_SITE_HEADER|" \ + -e "s|@GITWEB_SITE_FOOTER@|$GITWEB_SITE_FOOTER|" \ + -e "s|@HIGHLIGHT_BIN@|$HIGHLIGHT_BIN|" \ + "$INPUT" >"$OUTPUT" + +chmod a+x "$OUTPUT" diff --git a/gitweb/generate-gitweb-js.sh b/gitweb/generate-gitweb-js.sh new file mode 100755 index 0000000..01bb22b --- /dev/null +++ b/gitweb/generate-gitweb-js.sh @@ -0,0 +1,12 @@ +#!/bin/sh + +if test "$#" -lt 2 +then + echo >&2 "USAGE: $0 ..." + exit 1 +fi + +OUTPUT="$1" +shift + +cat "$@" >"$OUTPUT" diff --git a/gitweb/gitweb.perl b/gitweb/gitweb.perl new file mode 100755 index 0000000..b5490df --- /dev/null +++ b/gitweb/gitweb.perl @@ -0,0 +1,8490 @@ +#!/usr/bin/perl + +# gitweb - simple web interface to track changes in git repositories +# +# (C) 2005-2006, Kay Sievers +# (C) 2005, Christian Gierke +# +# This program is licensed under the GPLv2 + +require v5.26; +use strict; +use warnings; +# handle ACL in file access tests +use filetest 'access'; +use CGI qw(:standard :escapeHTML -nosticky); +use CGI::Util qw(unescape); +use CGI::Carp qw(fatalsToBrowser set_message); +use Encode; +use Fcntl ':mode'; +use File::Find qw(); +use File::Basename qw(basename); +use Time::HiRes qw(gettimeofday tv_interval); +use Digest::MD5 qw(md5_hex); + +binmode STDOUT, ':utf8'; + +if (!defined($CGI::VERSION) || $CGI::VERSION < 4.08) { + eval 'sub CGI::multi_param { CGI::param(@_) }' +} + +our $t0 = [ gettimeofday() ]; +our $number_of_git_cmds = 0; + +BEGIN { + CGI->compile() if $ENV{'MOD_PERL'}; +} + +our $version = "@GIT_VERSION@"; + +our ($my_url, $my_uri, $base_url, $path_info, $home_link); +sub evaluate_uri { + our $cgi; + + our $my_url = $cgi->url(); + our $my_uri = $cgi->url(-absolute => 1); + + # Base URL for relative URLs in gitweb ($logo, $favicon, ...), + # needed and used only for URLs with nonempty PATH_INFO + our $base_url = $my_url; + + # When the script is used as DirectoryIndex, the URL does not contain the name + # of the script file itself, and $cgi->url() fails to strip PATH_INFO, so we + # have to do it ourselves. We make $path_info global because it's also used + # later on. + # + # Another issue with the script being the DirectoryIndex is that the resulting + # $my_url data is not the full script URL: this is good, because we want + # generated links to keep implying the script name if it wasn't explicitly + # indicated in the URL we're handling, but it means that $my_url cannot be used + # as base URL. + # Therefore, if we needed to strip PATH_INFO, then we know that we have + # to build the base URL ourselves: + our $path_info = decode_utf8($ENV{"PATH_INFO"}); + if ($path_info) { + # $path_info has already been URL-decoded by the web server, but + # $my_url and $my_uri have not. URL-decode them so we can properly + # strip $path_info. + $my_url = unescape($my_url); + $my_uri = unescape($my_uri); + if ($my_url =~ s,\Q$path_info\E$,, && + $my_uri =~ s,\Q$path_info\E$,, && + defined $ENV{'SCRIPT_NAME'}) { + $base_url = $cgi->url(-base => 1) . $ENV{'SCRIPT_NAME'}; + } + } + + # target of the home link on top of all pages + our $home_link = $my_uri || "/"; +} + +# core git executable to use +# this can just be "git" if your webserver has a sensible PATH +our $GIT = "@GIT_BINDIR@/git"; + +# absolute fs-path which will be prepended to the project path +#our $projectroot = "/pub/scm"; +our $projectroot = "@GITWEB_PROJECTROOT@"; + +# fs traversing limit for getting project list +# the number is relative to the projectroot +our $project_maxdepth = @GITWEB_PROJECT_MAXDEPTH@; + +# string of the home link on top of all pages +our $home_link_str = "@GITWEB_HOME_LINK_STR@"; + +# extra breadcrumbs preceding the home link +our @extra_breadcrumbs = (); + +# name of your site or organization to appear in page titles +# replace this with something more descriptive for clearer bookmarks +our $site_name = "@GITWEB_SITENAME@" + || ($ENV{'SERVER_NAME'} || "Untitled") . " Git"; + +# html snippet to include in the section of each page +our $site_html_head_string = "@GITWEB_SITE_HTML_HEAD_STRING@"; +# filename of html text to include at top of each page +our $site_header = "@GITWEB_SITE_HEADER@"; +# html text to include at home page +our $home_text = "@GITWEB_HOMETEXT@"; +# filename of html text to include at bottom of each page +our $site_footer = "@GITWEB_SITE_FOOTER@"; + +# URI of stylesheets +our @stylesheets = ("@GITWEB_CSS@"); +# URI of a single stylesheet, which can be overridden in GITWEB_CONFIG. +our $stylesheet = undef; +# URI of GIT logo (72x27 size) +our $logo = "@GITWEB_LOGO@"; +# URI of GIT favicon, assumed to be image/png type +our $favicon = "@GITWEB_FAVICON@"; +# URI of gitweb.js (JavaScript code for gitweb) +our $javascript = "@GITWEB_JS@"; + +# URI and label (title) of GIT logo link +#our $logo_url = "https://www.kernel.org/pub/software/scm/git/docs/"; +#our $logo_label = "git documentation"; +our $logo_url = "https://git-scm.com/"; +our $logo_label = "git homepage"; + +# source of projects list +our $projects_list = "@GITWEB_LIST@"; + +# the width (in characters) of the projects list "Description" column +our $projects_list_description_width = 25; + +# group projects by category on the projects list +# (enabled if this variable evaluates to true) +our $projects_list_group_categories = 0; + +# default category if none specified +# (leave the empty string for no category) +our $project_list_default_category = ""; + +# default order of projects list +# valid values are none, project, descr, owner, and age +our $default_projects_order = "project"; + +# show repository only if this file exists +# (only effective if this variable evaluates to true) +our $export_ok = "@GITWEB_EXPORT_OK@"; + +# don't generate age column on the projects list page +our $omit_age_column = 0; + +# don't generate information about owners of repositories +our $omit_owner=0; + +# show repository only if this subroutine returns true +# when given the path to the project, for example: +# sub { return -e "$_[0]/git-daemon-export-ok"; } +our $export_auth_hook = undef; + +# only allow viewing of repositories also shown on the overview page +our $strict_export = "@GITWEB_STRICT_EXPORT@"; + +# list of git base URLs used for URL to where fetch project from, +# i.e. full URL is "$git_base_url/$project" +our @git_base_url_list = grep { $_ ne '' } ("@GITWEB_BASE_URL@"); + +# default blob_plain mimetype and default charset for text/plain blob +our $default_blob_plain_mimetype = 'text/plain'; +our $default_text_plain_charset = undef; + +# file to use for guessing MIME types before trying /etc/mime.types +# (relative to the current git repository) +our $mimetypes_file = undef; + +# assume this charset if line contains non-UTF-8 characters; +# it should be valid encoding (see Encoding::Supported(3pm) for list), +# for which encoding all byte sequences are valid, for example +# 'iso-8859-1' aka 'latin1' (it is decoded without checking, so it +# could be even 'utf-8' for the old behavior) +our $fallback_encoding = 'latin1'; + +# rename detection options for git-diff and git-diff-tree +# - default is '-M', with the cost proportional to +# (number of removed files) * (number of new files). +# - more costly is '-C' (which implies '-M'), with the cost proportional to +# (number of changed files + number of removed files) * (number of new files) +# - even more costly is '-C', '--find-copies-harder' with cost +# (number of files in the original tree) * (number of new files) +# - one might want to include '-B' option, e.g. '-B', '-M' +our @diff_opts = ('-M'); # taken from git_commit + +# Disables features that would allow repository owners to inject script into +# the gitweb domain. +our $prevent_xss = 0; + +# Path to the highlight executable to use (must be the one from +# http://andre-simon.de/zip/download.php due to assumptions about parameters and output). +# Useful if highlight is not installed on your webserver's PATH. +# [Default: highlight] +our $highlight_bin = "@HIGHLIGHT_BIN@"; + +# information about snapshot formats that gitweb is capable of serving +our %known_snapshot_formats = ( + # name => { + # 'display' => display name, + # 'type' => mime type, + # 'suffix' => filename suffix, + # 'format' => --format for git-archive, + # 'compressor' => [compressor command and arguments] + # (array reference, optional) + # 'disabled' => boolean (optional)} + # + 'tgz' => { + 'display' => 'tar.gz', + 'type' => 'application/x-gzip', + 'suffix' => '.tar.gz', + 'format' => 'tar', + 'compressor' => ['gzip', '-n']}, + + 'tbz2' => { + 'display' => 'tar.bz2', + 'type' => 'application/x-bzip2', + 'suffix' => '.tar.bz2', + 'format' => 'tar', + 'compressor' => ['bzip2']}, + + 'txz' => { + 'display' => 'tar.xz', + 'type' => 'application/x-xz', + 'suffix' => '.tar.xz', + 'format' => 'tar', + 'compressor' => ['xz'], + 'disabled' => 1}, + + 'zip' => { + 'display' => 'zip', + 'type' => 'application/x-zip', + 'suffix' => '.zip', + 'format' => 'zip'}, +); + +# Aliases so we understand old gitweb.snapshot values in repository +# configuration. +our %known_snapshot_format_aliases = ( + 'gzip' => 'tgz', + 'bzip2' => 'tbz2', + 'xz' => 'txz', + + # backward compatibility: legacy gitweb config support + 'x-gzip' => undef, 'gz' => undef, + 'x-bzip2' => undef, 'bz2' => undef, + 'x-zip' => undef, '' => undef, +); + +# Pixel sizes for icons and avatars. If the default font sizes or lineheights +# are changed, it may be appropriate to change these values too via +# $GITWEB_CONFIG. +our %avatar_size = ( + 'default' => 16, + 'double' => 32 +); + +# Used to set the maximum load that we will still respond to gitweb queries. +# If server load exceed this value then return "503 server busy" error. +# If gitweb cannot determined server load, it is taken to be 0. +# Leave it undefined (or set to 'undef') to turn off load checking. +our $maxload = 300; + +# configuration for 'highlight' (http://andre-simon.de/doku/highlight/en/highlight.php) +# match by basename +our %highlight_basename = ( + #'Program' => 'py', + #'Library' => 'py', + 'SConstruct' => 'py', # SCons equivalent of Makefile + 'Makefile' => 'make', +); +# match by extension +our %highlight_ext = ( + # main extensions, defining name of syntax; + # see files in /usr/share/highlight/langDefs/ directory + (map { $_ => $_ } qw(py rb java css js tex bib xml awk bat ini spec tcl sql)), + # alternate extensions, see /etc/highlight/filetypes.conf + (map { $_ => 'c' } qw(c h)), + (map { $_ => 'sh' } qw(sh bash zsh ksh)), + (map { $_ => 'cpp' } qw(cpp cxx c++ cc)), + (map { $_ => 'php' } qw(php php3 php4 php5 phps)), + (map { $_ => 'pl' } qw(pl perl pm)), # perhaps also 'cgi' + (map { $_ => 'make'} qw(make mak mk)), + (map { $_ => 'xml' } qw(xml xhtml html htm)), +); + +# You define site-wide feature defaults here; override them with +# $GITWEB_CONFIG as necessary. +our %feature = ( + # feature => { + # 'sub' => feature-sub (subroutine), + # 'override' => allow-override (boolean), + # 'default' => [ default options...] (array reference)} + # + # if feature is overridable (it means that allow-override has true value), + # then feature-sub will be called with default options as parameters; + # return value of feature-sub indicates if to enable specified feature + # + # if there is no 'sub' key (no feature-sub), then feature cannot be + # overridden + # + # use gitweb_get_feature() to retrieve the value + # (an array) or gitweb_check_feature() to check if + # is enabled + + # Enable the 'blame' blob view, showing the last commit that modified + # each line in the file. This can be very CPU-intensive. + + # To enable system wide have in $GITWEB_CONFIG + # $feature{'blame'}{'default'} = [1]; + # To have project specific config enable override in $GITWEB_CONFIG + # $feature{'blame'}{'override'} = 1; + # and in project config gitweb.blame = 0|1; + 'blame' => { + 'sub' => sub { feature_bool('blame', @_) }, + 'override' => 0, + 'default' => [0]}, + + # Enable the 'snapshot' link, providing a compressed archive of any + # tree. This can potentially generate high traffic if you have large + # project. + + # Value is a list of formats defined in %known_snapshot_formats that + # you wish to offer. + # To disable system wide have in $GITWEB_CONFIG + # $feature{'snapshot'}{'default'} = []; + # To have project specific config enable override in $GITWEB_CONFIG + # $feature{'snapshot'}{'override'} = 1; + # and in project config, a comma-separated list of formats or "none" + # to disable. Example: gitweb.snapshot = tbz2,zip; + 'snapshot' => { + 'sub' => \&feature_snapshot, + 'override' => 0, + 'default' => ['tgz']}, + + # Enable text search, which will list the commits which match author, + # committer or commit text to a given string. Enabled by default. + # Project specific override is not supported. + # + # Note that this controls all search features, which means that if + # it is disabled, then 'grep' and 'pickaxe' search would also be + # disabled. + 'search' => { + 'override' => 0, + 'default' => [1]}, + + # Enable grep search, which will list the files in currently selected + # tree containing the given string. Enabled by default. This can be + # potentially CPU-intensive, of course. + # Note that you need to have 'search' feature enabled too. + + # To enable system wide have in $GITWEB_CONFIG + # $feature{'grep'}{'default'} = [1]; + # To have project specific config enable override in $GITWEB_CONFIG + # $feature{'grep'}{'override'} = 1; + # and in project config gitweb.grep = 0|1; + 'grep' => { + 'sub' => sub { feature_bool('grep', @_) }, + 'override' => 0, + 'default' => [1]}, + + # Enable the pickaxe search, which will list the commits that modified + # a given string in a file. This can be practical and quite faster + # alternative to 'blame', but still potentially CPU-intensive. + # Note that you need to have 'search' feature enabled too. + + # To enable system wide have in $GITWEB_CONFIG + # $feature{'pickaxe'}{'default'} = [1]; + # To have project specific config enable override in $GITWEB_CONFIG + # $feature{'pickaxe'}{'override'} = 1; + # and in project config gitweb.pickaxe = 0|1; + 'pickaxe' => { + 'sub' => sub { feature_bool('pickaxe', @_) }, + 'override' => 0, + 'default' => [1]}, + + # Enable showing size of blobs in a 'tree' view, in a separate + # column, similar to what 'ls -l' does. This cost a bit of IO. + + # To disable system wide have in $GITWEB_CONFIG + # $feature{'show-sizes'}{'default'} = [0]; + # To have project specific config enable override in $GITWEB_CONFIG + # $feature{'show-sizes'}{'override'} = 1; + # and in project config gitweb.showsizes = 0|1; + 'show-sizes' => { + 'sub' => sub { feature_bool('showsizes', @_) }, + 'override' => 0, + 'default' => [1]}, + + # Make gitweb use an alternative format of the URLs which can be + # more readable and natural-looking: project name is embedded + # directly in the path and the query string contains other + # auxiliary information. All gitweb installations recognize + # URL in either format; this configures in which formats gitweb + # generates links. + + # To enable system wide have in $GITWEB_CONFIG + # $feature{'pathinfo'}{'default'} = [1]; + # Project specific override is not supported. + + # Note that you will need to change the default location of CSS, + # favicon, logo and possibly other files to an absolute URL. Also, + # if gitweb.cgi serves as your indexfile, you will need to force + # $my_uri to contain the script name in your $GITWEB_CONFIG. + 'pathinfo' => { + 'override' => 0, + 'default' => [0]}, + + # Make gitweb consider projects in project root subdirectories + # to be forks of existing projects. Given project $projname.git, + # projects matching $projname/*.git will not be shown in the main + # projects list, instead a '+' mark will be added to $projname + # there and a 'forks' view will be enabled for the project, listing + # all the forks. If project list is taken from a file, forks have + # to be listed after the main project. + + # To enable system wide have in $GITWEB_CONFIG + # $feature{'forks'}{'default'} = [1]; + # Project specific override is not supported. + 'forks' => { + 'override' => 0, + 'default' => [0]}, + + # Insert custom links to the action bar of all project pages. + # This enables you mainly to link to third-party scripts integrating + # into gitweb; e.g. git-browser for graphical history representation + # or custom web-based repository administration interface. + + # The 'default' value consists of a list of triplets in the form + # (label, link, position) where position is the label after which + # to insert the link and link is a format string where %n expands + # to the project name, %f to the project path within the filesystem, + # %h to the current hash (h gitweb parameter) and %b to the current + # hash base (hb gitweb parameter); %% expands to %. + + # To enable system wide have in $GITWEB_CONFIG e.g. + # $feature{'actions'}{'default'} = [('graphiclog', + # '/git-browser/by-commit.html?r=%n', 'summary')]; + # Project specific override is not supported. + 'actions' => { + 'override' => 0, + 'default' => []}, + + # Allow gitweb scan project content tags of project repository, + # and display the popular Web 2.0-ish "tag cloud" near the projects + # list. Note that this is something COMPLETELY different from the + # normal Git tags. + + # gitweb by itself can show existing tags, but it does not handle + # tagging itself; you need to do it externally, outside gitweb. + # The format is described in git_get_project_ctags() subroutine. + # You may want to install the HTML::TagCloud Perl module to get + # a pretty tag cloud instead of just a list of tags. + + # To enable system wide have in $GITWEB_CONFIG + # $feature{'ctags'}{'default'} = [1]; + # Project specific override is not supported. + + # In the future whether ctags editing is enabled might depend + # on the value, but using 1 should always mean no editing of ctags. + 'ctags' => { + 'override' => 0, + 'default' => [0]}, + + # The maximum number of patches in a patchset generated in patch + # view. Set this to 0 or undef to disable patch view, or to a + # negative number to remove any limit. + + # To disable system wide have in $GITWEB_CONFIG + # $feature{'patches'}{'default'} = [0]; + # To have project specific config enable override in $GITWEB_CONFIG + # $feature{'patches'}{'override'} = 1; + # and in project config gitweb.patches = 0|n; + # where n is the maximum number of patches allowed in a patchset. + 'patches' => { + 'sub' => \&feature_patches, + 'override' => 0, + 'default' => [16]}, + + # Avatar support. When this feature is enabled, views such as + # shortlog or commit will display an avatar associated with + # the email of the committer(s) and/or author(s). + + # Currently available providers are gravatar and picon. + # If an unknown provider is specified, the feature is disabled. + + # Picon currently relies on the indiana.edu database. + + # To enable system wide have in $GITWEB_CONFIG + # $feature{'avatar'}{'default'} = ['']; + # where is either gravatar or picon. + # To have project specific config enable override in $GITWEB_CONFIG + # $feature{'avatar'}{'override'} = 1; + # and in project config gitweb.avatar = ; + 'avatar' => { + 'sub' => \&feature_avatar, + 'override' => 0, + 'default' => ['']}, + + # Enable displaying how much time and how many git commands + # it took to generate and display page. Disabled by default. + # Project specific override is not supported. + 'timed' => { + 'override' => 0, + 'default' => [0]}, + + # Enable turning some links into links to actions which require + # JavaScript to run (like 'blame_incremental'). Not enabled by + # default. Project specific override is currently not supported. + 'javascript-actions' => { + 'override' => 0, + 'default' => [0]}, + + # Enable and configure ability to change common timezone for dates + # in gitweb output via JavaScript. Enabled by default. + # Project specific override is not supported. + 'javascript-timezone' => { + 'override' => 0, + 'default' => [ + 'local', # default timezone: 'utc', 'local', or '(-|+)HHMM' format, + # or undef to turn off this feature + 'gitweb_tz', # name of cookie where to store selected timezone + 'datetime', # CSS class used to mark up dates for manipulation + ]}, + + # Syntax highlighting support. This is based on Daniel Svensson's + # and Sham Chukoury's work in gitweb-xmms2.git. + # It requires the 'highlight' program present in $PATH, + # and therefore is disabled by default. + + # To enable system wide have in $GITWEB_CONFIG + # $feature{'highlight'}{'default'} = [1]; + + 'highlight' => { + 'sub' => sub { feature_bool('highlight', @_) }, + 'override' => 0, + 'default' => [0]}, + + # Enable displaying of remote heads in the heads list + + # To enable system wide have in $GITWEB_CONFIG + # $feature{'remote_heads'}{'default'} = [1]; + # To have project specific config enable override in $GITWEB_CONFIG + # $feature{'remote_heads'}{'override'} = 1; + # and in project config gitweb.remoteheads = 0|1; + 'remote_heads' => { + 'sub' => sub { feature_bool('remote_heads', @_) }, + 'override' => 0, + 'default' => [0]}, + + # Enable showing branches under other refs in addition to heads + + # To set system wide extra branch refs have in $GITWEB_CONFIG + # $feature{'extra-branch-refs'}{'default'} = ['dirs', 'of', 'choice']; + # To have project specific config enable override in $GITWEB_CONFIG + # $feature{'extra-branch-refs'}{'override'} = 1; + # and in project config gitweb.extrabranchrefs = dirs of choice + # Every directory is separated with whitespace. + + 'extra-branch-refs' => { + 'sub' => \&feature_extra_branch_refs, + 'override' => 0, + 'default' => []}, + + # Redact e-mail addresses. + + # To enable system wide have in $GITWEB_CONFIG + # $feature{'email-privacy'}{'default'} = [1]; + 'email-privacy' => { + 'sub' => sub { feature_bool('email-privacy', @_) }, + 'override' => 1, + 'default' => [0]}, +); + +sub gitweb_get_feature { + my ($name) = @_; + return unless exists $feature{$name}; + my ($sub, $override, @defaults) = ( + $feature{$name}{'sub'}, + $feature{$name}{'override'}, + @{$feature{$name}{'default'}}); + # project specific override is possible only if we have project + our $git_dir; # global variable, declared later + if (!$override || !defined $git_dir) { + return @defaults; + } + if (!defined $sub) { + warn "feature $name is not overridable"; + return @defaults; + } + return $sub->(@defaults); +} + +# A wrapper to check if a given feature is enabled. +# With this, you can say +# +# my $bool_feat = gitweb_check_feature('bool_feat'); +# gitweb_check_feature('bool_feat') or somecode; +# +# instead of +# +# my ($bool_feat) = gitweb_get_feature('bool_feat'); +# (gitweb_get_feature('bool_feat'))[0] or somecode; +# +sub gitweb_check_feature { + return (gitweb_get_feature(@_))[0]; +} + + +sub feature_bool { + my $key = shift; + my ($val) = git_get_project_config($key, '--bool'); + + if (!defined $val) { + return ($_[0]); + } elsif ($val eq 'true') { + return (1); + } elsif ($val eq 'false') { + return (0); + } +} + +sub feature_snapshot { + my (@fmts) = @_; + + my ($val) = git_get_project_config('snapshot'); + + if ($val) { + @fmts = ($val eq 'none' ? () : split /\s*[,\s]\s*/, $val); + } + + return @fmts; +} + +sub feature_patches { + my @val = (git_get_project_config('patches', '--int')); + + if (@val) { + return @val; + } + + return ($_[0]); +} + +sub feature_avatar { + my @val = (git_get_project_config('avatar')); + + return @val ? @val : @_; +} + +sub feature_extra_branch_refs { + my (@branch_refs) = @_; + my $values = git_get_project_config('extrabranchrefs'); + + if ($values) { + $values = config_to_multi ($values); + @branch_refs = (); + foreach my $value (@{$values}) { + push @branch_refs, split /\s+/, $value; + } + } + + return @branch_refs; +} + +# checking HEAD file with -e is fragile if the repository was +# initialized long time ago (i.e. symlink HEAD) and was pack-ref'ed +# and then pruned. +sub check_head_link { + my ($dir) = @_; + my $headfile = "$dir/HEAD"; + return ((-e $headfile) || + (-l $headfile && readlink($headfile) =~ /^refs\/heads\//)); +} + +sub check_export_ok { + my ($dir) = @_; + return (check_head_link($dir) && + (!$export_ok || -e "$dir/$export_ok") && + (!$export_auth_hook || $export_auth_hook->($dir))); +} + +# process alternate names for backward compatibility +# filter out unsupported (unknown) snapshot formats +sub filter_snapshot_fmts { + my @fmts = @_; + + @fmts = map { + exists $known_snapshot_format_aliases{$_} ? + $known_snapshot_format_aliases{$_} : $_} @fmts; + @fmts = grep { + exists $known_snapshot_formats{$_} && + !$known_snapshot_formats{$_}{'disabled'}} @fmts; +} + +sub filter_and_validate_refs { + my @refs = @_; + my %unique_refs = (); + + foreach my $ref (@refs) { + die_error(500, "Invalid ref '$ref' in 'extra-branch-refs' feature") unless (is_valid_ref_format($ref)); + # 'heads' are added implicitly in get_branch_refs(). + $unique_refs{$ref} = 1 if ($ref ne 'heads'); + } + return sort keys %unique_refs; +} + +# If it is set to code reference, it is code that it is to be run once per +# request, allowing updating configurations that change with each request, +# while running other code in config file only once. +# +# Otherwise, if it is false then gitweb would process config file only once; +# if it is true then gitweb config would be run for each request. +our $per_request_config = 1; + +# read and parse gitweb config file given by its parameter. +# returns true on success, false on recoverable error, allowing +# to chain this subroutine, using first file that exists. +# dies on errors during parsing config file, as it is unrecoverable. +sub read_config_file { + my $filename = shift; + return unless defined $filename; + if (-e $filename) { + do $filename; + # die if there is a problem accessing the file + die $! if $!; + # die if there are errors parsing config file + die $@ if $@; + return 1; + } + return; +} + +our ($GITWEB_CONFIG, $GITWEB_CONFIG_SYSTEM, $GITWEB_CONFIG_COMMON); +sub evaluate_gitweb_config { + our $GITWEB_CONFIG = $ENV{'GITWEB_CONFIG'} || "@GITWEB_CONFIG@"; + our $GITWEB_CONFIG_SYSTEM = $ENV{'GITWEB_CONFIG_SYSTEM'} || "@GITWEB_CONFIG_SYSTEM@"; + our $GITWEB_CONFIG_COMMON = $ENV{'GITWEB_CONFIG_COMMON'} || "@GITWEB_CONFIG_COMMON@"; + + # Protect against duplications of file names, to not read config twice. + # Only one of $GITWEB_CONFIG and $GITWEB_CONFIG_SYSTEM is used, so + # there possibility of duplication of filename there doesn't matter. + $GITWEB_CONFIG = "" if ($GITWEB_CONFIG eq $GITWEB_CONFIG_COMMON); + $GITWEB_CONFIG_SYSTEM = "" if ($GITWEB_CONFIG_SYSTEM eq $GITWEB_CONFIG_COMMON); + + # Common system-wide settings for convenience. + # Those settings can be overridden by GITWEB_CONFIG or GITWEB_CONFIG_SYSTEM. + read_config_file($GITWEB_CONFIG_COMMON); + + # Use first config file that exists. This means use the per-instance + # GITWEB_CONFIG if exists, otherwise use GITWEB_SYSTEM_CONFIG. + read_config_file($GITWEB_CONFIG) and return; + read_config_file($GITWEB_CONFIG_SYSTEM); +} + +# Get loadavg of system, to compare against $maxload. +# Currently it requires '/proc/loadavg' present to get loadavg; +# if it is not present it returns 0, which means no load checking. +sub get_loadavg { + if( -e '/proc/loadavg' ){ + open my $fd, '<', '/proc/loadavg' + or return 0; + my @load = split(/\s+/, scalar <$fd>); + close $fd; + + # The first three columns measure CPU and IO utilization of the last one, + # five, and 10 minute periods. The fourth column shows the number of + # currently running processes and the total number of processes in the m/n + # format. The last column displays the last process ID used. + return $load[0] || 0; + } + # additional checks for load average should go here for things that don't export + # /proc/loadavg + + return 0; +} + +# version of the core git binary +our $git_version; +sub evaluate_git_version { + our $git_version = qx("$GIT" --version) =~ m/git version (.*)$/ ? $1 : "unknown"; + $number_of_git_cmds++; +} + +sub check_loadavg { + if (defined $maxload && get_loadavg() > $maxload) { + die_error(503, "The load average on the server is too high"); + } +} + +# ====================================================================== +# input validation and dispatch + +# Various hash size-related values. +my $sha1_len = 40; +my $sha256_extra_len = 24; +my $sha256_len = $sha1_len + $sha256_extra_len; + +# A regex matching $len hex characters. $len may be a range (e.g. 7,64). +sub oid_nlen_regex { + my $len = shift; + my $hchr = qr/[0-9a-fA-F]/; + return qr/(?:(?:$hchr){$len})/; +} + +# A regex matching two sets of $nlen hex characters, prefixed by the literal +# string $prefix and with the literal string $infix between them. +sub oid_nlen_prefix_infix_regex { + my $nlen = shift; + my $prefix = shift; + my $infix = shift; + + my $rx = oid_nlen_regex($nlen); + + return qr/^\Q$prefix\E$rx\Q$infix\E$rx$/; +} + +# A regex matching a valid object ID. +our $oid_regex; +{ + my $x = oid_nlen_regex($sha1_len); + my $y = oid_nlen_regex($sha256_extra_len); + $oid_regex = qr/(?:$x(?:$y)?)/; +} + +# input parameters can be collected from a variety of sources (presently, CGI +# and PATH_INFO), so we define an %input_params hash that collects them all +# together during validation: this allows subsequent uses (e.g. href()) to be +# agnostic of the parameter origin + +our %input_params = (); + +# input parameters are stored with the long parameter name as key. This will +# also be used in the href subroutine to convert parameters to their CGI +# equivalent, and since the href() usage is the most frequent one, we store +# the name -> CGI key mapping here, instead of the reverse. +# +# XXX: Warning: If you touch this, check the search form for updating, +# too. + +our @cgi_param_mapping = ( + project => "p", + action => "a", + file_name => "f", + file_parent => "fp", + hash => "h", + hash_parent => "hp", + hash_base => "hb", + hash_parent_base => "hpb", + page => "pg", + order => "o", + searchtext => "s", + searchtype => "st", + snapshot_format => "sf", + extra_options => "opt", + search_use_regexp => "sr", + ctag => "by_tag", + diff_style => "ds", + project_filter => "pf", + # this must be last entry (for manipulation from JavaScript) + javascript => "js" +); +our %cgi_param_mapping = @cgi_param_mapping; + +# we will also need to know the possible actions, for validation +our %actions = ( + "blame" => \&git_blame, + "blame_incremental" => \&git_blame_incremental, + "blame_data" => \&git_blame_data, + "blobdiff" => \&git_blobdiff, + "blobdiff_plain" => \&git_blobdiff_plain, + "blob" => \&git_blob, + "blob_plain" => \&git_blob_plain, + "commitdiff" => \&git_commitdiff, + "commitdiff_plain" => \&git_commitdiff_plain, + "commit" => \&git_commit, + "forks" => \&git_forks, + "heads" => \&git_heads, + "history" => \&git_history, + "log" => \&git_log, + "patch" => \&git_patch, + "patches" => \&git_patches, + "remotes" => \&git_remotes, + "rss" => \&git_rss, + "atom" => \&git_atom, + "search" => \&git_search, + "search_help" => \&git_search_help, + "shortlog" => \&git_shortlog, + "summary" => \&git_summary, + "tag" => \&git_tag, + "tags" => \&git_tags, + "tree" => \&git_tree, + "snapshot" => \&git_snapshot, + "object" => \&git_object, + # those below don't need $project + "opml" => \&git_opml, + "project_list" => \&git_project_list, + "project_index" => \&git_project_index, +); + +# finally, we have the hash of allowed extra_options for the commands that +# allow them +our %allowed_options = ( + "--no-merges" => [ qw(rss atom log shortlog history) ], +); + +# fill %input_params with the CGI parameters. All values except for 'opt' +# should be single values, but opt can be an array. We should probably +# build an array of parameters that can be multi-valued, but since for the time +# being it's only this one, we just single it out +sub evaluate_query_params { + our $cgi; + + while (my ($name, $symbol) = each %cgi_param_mapping) { + if ($symbol eq 'opt') { + $input_params{$name} = [ map { decode_utf8($_) } $cgi->multi_param($symbol) ]; + } else { + $input_params{$name} = decode_utf8($cgi->param($symbol)); + } + } +} + +# now read PATH_INFO and update the parameter list for missing parameters +sub evaluate_path_info { + return if defined $input_params{'project'}; + return if !$path_info; + $path_info =~ s,^/+,,; + return if !$path_info; + + # find which part of PATH_INFO is project + my $project = $path_info; + $project =~ s,/+$,,; + while ($project && !check_head_link("$projectroot/$project")) { + $project =~ s,/*[^/]*$,,; + } + return unless $project; + $input_params{'project'} = $project; + + # do not change any parameters if an action is given using the query string + return if $input_params{'action'}; + $path_info =~ s,^\Q$project\E/*,,; + + # next, check if we have an action + my $action = $path_info; + $action =~ s,/.*$,,; + if (exists $actions{$action}) { + $path_info =~ s,^$action/*,,; + $input_params{'action'} = $action; + } + + # list of actions that want hash_base instead of hash, but can have no + # pathname (f) parameter + my @wants_base = ( + 'tree', + 'history', + ); + + # we want to catch, among others + # [$hash_parent_base[:$file_parent]..]$hash_parent[:$file_name] + my ($parentrefname, $parentpathname, $refname, $pathname) = + ($path_info =~ /^(?:(.+?)(?::(.+))?\.\.)?([^:]+?)?(?::(.+))?$/); + + # first, analyze the 'current' part + if (defined $pathname) { + # we got "branch:filename" or "branch:dir/" + # we could use git_get_type(branch:pathname), but: + # - it needs $git_dir + # - it does a git() call + # - the convention of terminating directories with a slash + # makes it superfluous + # - embedding the action in the PATH_INFO would make it even + # more superfluous + $pathname =~ s,^/+,,; + if (!$pathname || substr($pathname, -1) eq "/") { + $input_params{'action'} ||= "tree"; + $pathname =~ s,/$,,; + } else { + # the default action depends on whether we had parent info + # or not + if ($parentrefname) { + $input_params{'action'} ||= "blobdiff_plain"; + } else { + $input_params{'action'} ||= "blob_plain"; + } + } + $input_params{'hash_base'} ||= $refname; + $input_params{'file_name'} ||= $pathname; + } elsif (defined $refname) { + # we got "branch". In this case we have to choose if we have to + # set hash or hash_base. + # + # Most of the actions without a pathname only want hash to be + # set, except for the ones specified in @wants_base that want + # hash_base instead. It should also be noted that hand-crafted + # links having 'history' as an action and no pathname or hash + # set will fail, but that happens regardless of PATH_INFO. + if (defined $parentrefname) { + # if there is parent let the default be 'shortlog' action + # (for http://git.example.com/repo.git/A..B links); if there + # is no parent, dispatch will detect type of object and set + # action appropriately if required (if action is not set) + $input_params{'action'} ||= "shortlog"; + } + if ($input_params{'action'} && + grep { $_ eq $input_params{'action'} } @wants_base) { + $input_params{'hash_base'} ||= $refname; + } else { + $input_params{'hash'} ||= $refname; + } + } + + # next, handle the 'parent' part, if present + if (defined $parentrefname) { + # a missing pathspec defaults to the 'current' filename, allowing e.g. + # someproject/blobdiff/oldrev..newrev:/filename + if ($parentpathname) { + $parentpathname =~ s,^/+,,; + $parentpathname =~ s,/$,,; + $input_params{'file_parent'} ||= $parentpathname; + } else { + $input_params{'file_parent'} ||= $input_params{'file_name'}; + } + # we assume that hash_parent_base is wanted if a path was specified, + # or if the action wants hash_base instead of hash + if (defined $input_params{'file_parent'} || + grep { $_ eq $input_params{'action'} } @wants_base) { + $input_params{'hash_parent_base'} ||= $parentrefname; + } else { + $input_params{'hash_parent'} ||= $parentrefname; + } + } + + # for the snapshot action, we allow URLs in the form + # $project/snapshot/$hash.ext + # where .ext determines the snapshot and gets removed from the + # passed $refname to provide the $hash. + # + # To be able to tell that $refname includes the format extension, we + # require the following two conditions to be satisfied: + # - the hash input parameter MUST have been set from the $refname part + # of the URL (i.e. they must be equal) + # - the snapshot format MUST NOT have been defined already (e.g. from + # CGI parameter sf) + # It's also useless to try any matching unless $refname has a dot, + # so we check for that too + if (defined $input_params{'action'} && + $input_params{'action'} eq 'snapshot' && + defined $refname && index($refname, '.') != -1 && + $refname eq $input_params{'hash'} && + !defined $input_params{'snapshot_format'}) { + # We loop over the known snapshot formats, checking for + # extensions. Allowed extensions are both the defined suffix + # (which includes the initial dot already) and the snapshot + # format key itself, with a prepended dot + while (my ($fmt, $opt) = each %known_snapshot_formats) { + my $hash = $refname; + unless ($hash =~ s/(\Q$opt->{'suffix'}\E|\Q.$fmt\E)$//) { + next; + } + my $sfx = $1; + # a valid suffix was found, so set the snapshot format + # and reset the hash parameter + $input_params{'snapshot_format'} = $fmt; + $input_params{'hash'} = $hash; + # we also set the format suffix to the one requested + # in the URL: this way a request for e.g. .tgz returns + # a .tgz instead of a .tar.gz + $known_snapshot_formats{$fmt}{'suffix'} = $sfx; + last; + } + } +} + +our ($action, $project, $file_name, $file_parent, $hash, $hash_parent, $hash_base, + $hash_parent_base, @extra_options, $page, $searchtype, $search_use_regexp, + $searchtext, $search_regexp, $project_filter); +sub evaluate_and_validate_params { + our $action = $input_params{'action'}; + if (defined $action) { + if (!is_valid_action($action)) { + die_error(400, "Invalid action parameter"); + } + } + + # parameters which are pathnames + our $project = $input_params{'project'}; + if (defined $project) { + if (!is_valid_project($project)) { + undef $project; + die_error(404, "No such project"); + } + } + + our $project_filter = $input_params{'project_filter'}; + if (defined $project_filter) { + if (!is_valid_pathname($project_filter)) { + die_error(404, "Invalid project_filter parameter"); + } + } + + our $file_name = $input_params{'file_name'}; + if (defined $file_name) { + if (!is_valid_pathname($file_name)) { + die_error(400, "Invalid file parameter"); + } + } + + our $file_parent = $input_params{'file_parent'}; + if (defined $file_parent) { + if (!is_valid_pathname($file_parent)) { + die_error(400, "Invalid file parent parameter"); + } + } + + # parameters which are refnames + our $hash = $input_params{'hash'}; + if (defined $hash) { + if (!is_valid_refname($hash)) { + die_error(400, "Invalid hash parameter"); + } + } + + our $hash_parent = $input_params{'hash_parent'}; + if (defined $hash_parent) { + if (!is_valid_refname($hash_parent)) { + die_error(400, "Invalid hash parent parameter"); + } + } + + our $hash_base = $input_params{'hash_base'}; + if (defined $hash_base) { + if (!is_valid_refname($hash_base)) { + die_error(400, "Invalid hash base parameter"); + } + } + + our @extra_options = @{$input_params{'extra_options'}}; + # @extra_options is always defined, since it can only be (currently) set from + # CGI, and $cgi->param() returns the empty array in array context if the param + # is not set + foreach my $opt (@extra_options) { + if (not exists $allowed_options{$opt}) { + die_error(400, "Invalid option parameter"); + } + if (not grep(/^$action$/, @{$allowed_options{$opt}})) { + die_error(400, "Invalid option parameter for this action"); + } + } + + our $hash_parent_base = $input_params{'hash_parent_base'}; + if (defined $hash_parent_base) { + if (!is_valid_refname($hash_parent_base)) { + die_error(400, "Invalid hash parent base parameter"); + } + } + + # other parameters + our $page = $input_params{'page'}; + if (defined $page) { + if ($page =~ m/[^0-9]/) { + die_error(400, "Invalid page parameter"); + } + } + + our $searchtype = $input_params{'searchtype'}; + if (defined $searchtype) { + if ($searchtype =~ m/[^a-z]/) { + die_error(400, "Invalid searchtype parameter"); + } + } + + our $search_use_regexp = $input_params{'search_use_regexp'}; + + our $searchtext = $input_params{'searchtext'}; + our $search_regexp = undef; + if (defined $searchtext) { + if (length($searchtext) < 2) { + die_error(403, "At least two characters are required for search parameter"); + } + if ($search_use_regexp) { + $search_regexp = $searchtext; + if (!eval { qr/$search_regexp/; 1; }) { + my $error = $@ =~ s/ at \S+ line \d+.*\n?//r; + die_error(400, "Invalid search regexp '$search_regexp'", + esc_html($error)); + } + } else { + $search_regexp = quotemeta $searchtext; + } + } +} + +# path to the current git repository +our $git_dir; +sub evaluate_git_dir { + our $git_dir = "$projectroot/$project" if $project; +} + +our (@snapshot_fmts, $git_avatar, @extra_branch_refs); +sub configure_gitweb_features { + # list of supported snapshot formats + our @snapshot_fmts = gitweb_get_feature('snapshot'); + @snapshot_fmts = filter_snapshot_fmts(@snapshot_fmts); + + our ($git_avatar) = gitweb_get_feature('avatar'); + $git_avatar = '' unless $git_avatar =~ /^(?:gravatar|picon)$/s; + + our @extra_branch_refs = gitweb_get_feature('extra-branch-refs'); + @extra_branch_refs = filter_and_validate_refs (@extra_branch_refs); +} + +sub get_branch_refs { + return ('heads', @extra_branch_refs); +} + +# custom error handler: 'die ' is Internal Server Error +sub handle_errors_html { + my $msg = shift; # it is already HTML escaped + + # to avoid infinite loop where error occurs in die_error, + # change handler to default handler, disabling handle_errors_html + set_message("Error occurred when inside die_error:\n$msg"); + + # you cannot jump out of die_error when called as error handler; + # the subroutine set via CGI::Carp::set_message is called _after_ + # HTTP headers are already written, so it cannot write them itself + die_error(undef, undef, $msg, -error_handler => 1, -no_http_header => 1); +} +set_message(\&handle_errors_html); + +# dispatch +sub dispatch { + if (!defined $action) { + if (defined $hash) { + $action = git_get_type($hash); + $action or die_error(404, "Object does not exist"); + } elsif (defined $hash_base && defined $file_name) { + $action = git_get_type("$hash_base:$file_name"); + $action or die_error(404, "File or directory does not exist"); + } elsif (defined $project) { + $action = 'summary'; + } else { + $action = 'project_list'; + } + } + if (!defined($actions{$action})) { + die_error(400, "Unknown action"); + } + if ($action !~ m/^(?:opml|project_list|project_index)$/ && + !$project) { + die_error(400, "Project needed"); + } + $actions{$action}->(); +} + +sub reset_timer { + our $t0 = [ gettimeofday() ] + if defined $t0; + our $number_of_git_cmds = 0; +} + +our $first_request = 1; +sub run_request { + reset_timer(); + + evaluate_uri(); + if ($first_request) { + evaluate_gitweb_config(); + evaluate_git_version(); + } + if ($per_request_config) { + if (ref($per_request_config) eq 'CODE') { + $per_request_config->(); + } elsif (!$first_request) { + evaluate_gitweb_config(); + } + } + check_loadavg(); + + # $projectroot and $projects_list might be set in gitweb config file + $projects_list ||= $projectroot; + + evaluate_query_params(); + evaluate_path_info(); + evaluate_and_validate_params(); + evaluate_git_dir(); + + configure_gitweb_features(); + + dispatch(); +} + +our $is_last_request = sub { 1 }; +our ($pre_dispatch_hook, $post_dispatch_hook, $pre_listen_hook); +our $CGI = 'CGI'; +our $cgi; +our $FCGI_Stream_PRINT_raw = \&FCGI::Stream::PRINT; +sub configure_as_fcgi { + require CGI::Fast; + our $CGI = 'CGI::Fast'; + # FCGI is not Unicode aware hence the UTF-8 encoding must be done manually. + # However no encoding must be done within git_blob_plain() and git_snapshot() + # which must still output in raw binary mode. + no warnings 'redefine'; + my $enc = Encode::find_encoding('UTF-8'); + *FCGI::Stream::PRINT = sub { + my @OUTPUT = @_; + for (my $i = 1; $i < @_; $i++) { + $OUTPUT[$i] = $enc->encode($_[$i], Encode::FB_CROAK|Encode::LEAVE_SRC); + } + @_ = @OUTPUT; + goto $FCGI_Stream_PRINT_raw; + }; + + my $request_number = 0; + # let each child service 100 requests + our $is_last_request = sub { ++$request_number > 100 }; +} +sub evaluate_argv { + my $script_name = $ENV{'SCRIPT_NAME'} || $ENV{'SCRIPT_FILENAME'} || __FILE__; + configure_as_fcgi() + if $script_name =~ /\.fcgi$/; + + return unless (@ARGV); + + require Getopt::Long; + Getopt::Long::GetOptions( + 'fastcgi|fcgi|f' => \&configure_as_fcgi, + 'nproc|n=i' => sub { + my ($arg, $val) = @_; + return unless eval { require FCGI::ProcManager; 1; }; + my $proc_manager = FCGI::ProcManager->new({ + n_processes => $val, + }); + our $pre_listen_hook = sub { $proc_manager->pm_manage() }; + our $pre_dispatch_hook = sub { $proc_manager->pm_pre_dispatch() }; + our $post_dispatch_hook = sub { $proc_manager->pm_post_dispatch() }; + }, + ); +} + +sub run { + evaluate_argv(); + + $first_request = 1; + $pre_listen_hook->() + if $pre_listen_hook; + + REQUEST: + while ($cgi = $CGI->new()) { + $pre_dispatch_hook->() + if $pre_dispatch_hook; + + run_request(); + + $post_dispatch_hook->() + if $post_dispatch_hook; + $first_request = 0; + + last REQUEST if ($is_last_request->()); + } + + DONE_GITWEB: + 1; +} + +run(); + +if (defined caller) { + # wrapped in a subroutine processing requests, + # e.g. mod_perl with ModPerl::Registry, or PSGI with Plack::App::WrapCGI + return; +} else { + # pure CGI script, serving single request + exit; +} + +## ====================================================================== +## action links + +# possible values of extra options +# -full => 0|1 - use absolute/full URL ($my_uri/$my_url as base) +# -replay => 1 - start from a current view (replay with modifications) +# -path_info => 0|1 - don't use/use path_info URL (if possible) +# -anchor => ANCHOR - add #ANCHOR to end of URL, implies -replay if used alone +sub href { + my %params = @_; + # default is to use -absolute url() i.e. $my_uri + my $href = $params{-full} ? $my_url : $my_uri; + + # implicit -replay, must be first of implicit params + $params{-replay} = 1 if (keys %params == 1 && $params{-anchor}); + + $params{'project'} = $project unless exists $params{'project'}; + + if ($params{-replay}) { + while (my ($name, $symbol) = each %cgi_param_mapping) { + if (!exists $params{$name}) { + $params{$name} = $input_params{$name}; + } + } + } + + my $use_pathinfo = gitweb_check_feature('pathinfo'); + if (defined $params{'project'} && + (exists $params{-path_info} ? $params{-path_info} : $use_pathinfo)) { + # try to put as many parameters as possible in PATH_INFO: + # - project name + # - action + # - hash_parent or hash_parent_base:/file_parent + # - hash or hash_base:/filename + # - the snapshot_format as an appropriate suffix + + # When the script is the root DirectoryIndex for the domain, + # $href here would be something like http://gitweb.example.com/ + # Thus, we strip any trailing / from $href, to spare us double + # slashes in the final URL + $href =~ s,/$,,; + + # Then add the project name, if present + $href .= "/".esc_path_info($params{'project'}); + delete $params{'project'}; + + # since we destructively absorb parameters, we keep this + # boolean that remembers if we're handling a snapshot + my $is_snapshot = $params{'action'} eq 'snapshot'; + + # Summary just uses the project path URL, any other action is + # added to the URL + if (defined $params{'action'}) { + $href .= "/".esc_path_info($params{'action'}) + unless $params{'action'} eq 'summary'; + delete $params{'action'}; + } + + # Next, we put hash_parent_base:/file_parent..hash_base:/file_name, + # stripping nonexistent or useless pieces + $href .= "/" if ($params{'hash_base'} || $params{'hash_parent_base'} + || $params{'hash_parent'} || $params{'hash'}); + if (defined $params{'hash_base'}) { + if (defined $params{'hash_parent_base'}) { + $href .= esc_path_info($params{'hash_parent_base'}); + # skip the file_parent if it's the same as the file_name + if (defined $params{'file_parent'}) { + if (defined $params{'file_name'} && $params{'file_parent'} eq $params{'file_name'}) { + delete $params{'file_parent'}; + } elsif ($params{'file_parent'} !~ /\.\./) { + $href .= ":/".esc_path_info($params{'file_parent'}); + delete $params{'file_parent'}; + } + } + $href .= ".."; + delete $params{'hash_parent'}; + delete $params{'hash_parent_base'}; + } elsif (defined $params{'hash_parent'}) { + $href .= esc_path_info($params{'hash_parent'}). ".."; + delete $params{'hash_parent'}; + } + + $href .= esc_path_info($params{'hash_base'}); + if (defined $params{'file_name'} && $params{'file_name'} !~ /\.\./) { + $href .= ":/".esc_path_info($params{'file_name'}); + delete $params{'file_name'}; + } + delete $params{'hash'}; + delete $params{'hash_base'}; + } elsif (defined $params{'hash'}) { + $href .= esc_path_info($params{'hash'}); + delete $params{'hash'}; + } + + # If the action was a snapshot, we can absorb the + # snapshot_format parameter too + if ($is_snapshot) { + my $fmt = $params{'snapshot_format'}; + # snapshot_format should always be defined when href() + # is called, but just in case some code forgets, we + # fall back to the default + $fmt ||= $snapshot_fmts[0]; + $href .= $known_snapshot_formats{$fmt}{'suffix'}; + delete $params{'snapshot_format'}; + } + } + + # now encode the parameters explicitly + my @result = (); + for (my $i = 0; $i < @cgi_param_mapping; $i += 2) { + my ($name, $symbol) = ($cgi_param_mapping[$i], $cgi_param_mapping[$i+1]); + if (defined $params{$name}) { + if (ref($params{$name}) eq "ARRAY") { + foreach my $par (@{$params{$name}}) { + push @result, $symbol . "=" . esc_param($par); + } + } else { + push @result, $symbol . "=" . esc_param($params{$name}); + } + } + } + $href .= "?" . join(';', @result) if scalar @result; + + # final transformation: trailing spaces must be escaped (URI-encoded) + $href =~ s/(\s+)$/CGI::escape($1)/e; + + if ($params{-anchor}) { + $href .= "#".esc_param($params{-anchor}); + } + + return $href; +} + + +## ====================================================================== +## validation, quoting/unquoting and escaping + +sub is_valid_action { + my $input = shift; + return undef unless exists $actions{$input}; + return 1; +} + +sub is_valid_project { + my $input = shift; + + return unless defined $input; + if (!is_valid_pathname($input) || + !(-d "$projectroot/$input") || + !check_export_ok("$projectroot/$input") || + ($strict_export && !project_in_list($input))) { + return undef; + } else { + return 1; + } +} + +sub is_valid_pathname { + my $input = shift; + + return undef unless defined $input; + # no '.' or '..' as elements of path, i.e. no '.' or '..' + # at the beginning, at the end, and between slashes. + # also this catches doubled slashes + if ($input =~ m!(^|/)(|\.|\.\.)(/|$)!) { + return undef; + } + # no null characters + if ($input =~ m!\0!) { + return undef; + } + return 1; +} + +sub is_valid_ref_format { + my $input = shift; + + return undef unless defined $input; + # restrictions on ref name according to git-check-ref-format + if ($input =~ m!(/\.|\.\.|[\000-\040\177 ~^:?*\[]|/$)!) { + return undef; + } + return 1; +} + +sub is_valid_refname { + my $input = shift; + + return undef unless defined $input; + # textual hashes are O.K. + if ($input =~ m/^$oid_regex$/) { + return 1; + } + # it must be correct pathname + is_valid_pathname($input) or return undef; + # check git-check-ref-format restrictions + is_valid_ref_format($input) or return undef; + return 1; +} + +# decode sequences of octets in utf8 into Perl's internal form, +# which is utf-8 with utf8 flag set if needed. gitweb writes out +# in utf-8 thanks to "binmode STDOUT, ':utf8'" at beginning +sub to_utf8 { + my $str = shift; + return undef unless defined $str; + + if (utf8::is_utf8($str) || utf8::decode($str)) { + return $str; + } else { + return decode($fallback_encoding, $str, Encode::FB_DEFAULT); + } +} + +# quote unsafe chars, but keep the slash, even when it's not +# correct, but quoted slashes look too horrible in bookmarks +sub esc_param { + my $str = shift; + return undef unless defined $str; + $str =~ s/([^A-Za-z0-9\-_.~()\/:@ ]+)/CGI::escape($1)/eg; + $str =~ s/ /\+/g; + return $str; +} + +# the quoting rules for path_info fragment are slightly different +sub esc_path_info { + my $str = shift; + return undef unless defined $str; + + # path_info doesn't treat '+' as space (specially), but '?' must be escaped + $str =~ s/([^A-Za-z0-9\-_.~();\/;:@&= +]+)/CGI::escape($1)/eg; + + return $str; +} + +# quote unsafe chars in whole URL, so some characters cannot be quoted +sub esc_url { + my $str = shift; + return undef unless defined $str; + $str =~ s/([^A-Za-z0-9\-_.~();\/;?:@&= ]+)/CGI::escape($1)/eg; + $str =~ s/ /\+/g; + return $str; +} + +# quote unsafe characters in HTML attributes +sub esc_attr { + + # for XHTML conformance escaping '"' to '"' is not enough + return esc_html(@_); +} + +# replace invalid utf8 character with SUBSTITUTION sequence +sub esc_html { + my $str = shift; + my %opts = @_; + + return undef unless defined $str; + + $str = to_utf8($str); + $str = $cgi->escapeHTML($str); + if ($opts{'-nbsp'}) { + $str =~ s/ / /g; + } + $str =~ s|([[:cntrl:]])|(($1 ne "\t") ? quot_cec($1) : $1)|eg; + return $str; +} + +# quote control characters and escape filename to HTML +sub esc_path { + my $str = shift; + my %opts = @_; + + return undef unless defined $str; + + $str = to_utf8($str); + $str = $cgi->escapeHTML($str); + if ($opts{'-nbsp'}) { + $str =~ s/ / /g; + } + $str =~ s|([[:cntrl:]])|quot_cec($1)|eg; + return $str; +} + +# Sanitize for use in XHTML + application/xml+xhtml (valid XML 1.0) +sub sanitize { + my $str = shift; + + return undef unless defined $str; + + $str = to_utf8($str); + $str =~ s|([[:cntrl:]])|(index("\t\n\r", $1) != -1 ? $1 : quot_cec($1))|eg; + return $str; +} + +# Make control characters "printable", using character escape codes (CEC) +sub quot_cec { + my $cntrl = shift; + my %opts = @_; + my %es = ( # character escape codes, aka escape sequences + "\t" => '\t', # tab (HT) + "\n" => '\n', # line feed (LF) + "\r" => '\r', # carriage return (CR) + "\f" => '\f', # form feed (FF) + "\b" => '\b', # backspace (BS) + "\a" => '\a', # alarm (bell) (BEL) + "\e" => '\e', # escape (ESC) + "\013" => '\v', # vertical tab (VT) + "\000" => '\0', # nul character (NUL) + ); + my $chr = ( (exists $es{$cntrl}) + ? $es{$cntrl} + : sprintf('\%2x', ord($cntrl)) ); + if ($opts{-nohtml}) { + return $chr; + } else { + return "$chr"; + } +} + +# Alternatively use unicode control pictures codepoints, +# Unicode "printable representation" (PR) +sub quot_upr { + my $cntrl = shift; + my %opts = @_; + + my $chr = sprintf('&#%04d;', 0x2400+ord($cntrl)); + if ($opts{-nohtml}) { + return $chr; + } else { + return "$chr"; + } +} + +# git may return quoted and escaped filenames +sub unquote { + my $str = shift; + + sub unq { + my $seq = shift; + my %es = ( # character escape codes, aka escape sequences + 't' => "\t", # tab (HT, TAB) + 'n' => "\n", # newline (NL) + 'r' => "\r", # return (CR) + 'f' => "\f", # form feed (FF) + 'b' => "\b", # backspace (BS) + 'a' => "\a", # alarm (bell) (BEL) + 'e' => "\e", # escape (ESC) + 'v' => "\013", # vertical tab (VT) + ); + + if ($seq =~ m/^[0-7]{1,3}$/) { + # octal char sequence + return chr(oct($seq)); + } elsif (exists $es{$seq}) { + # C escape sequence, aka character escape code + return $es{$seq}; + } + # quoted ordinary character + return $seq; + } + + if ($str =~ m/^"(.*)"$/) { + # needs unquoting + $str = $1; + $str =~ s/\\([^0-7]|[0-7]{1,3})/unq($1)/eg; + } + return $str; +} + +# escape tabs (convert tabs to spaces) +sub untabify { + my $line = shift; + + while ((my $pos = index($line, "\t")) != -1) { + if (my $count = (8 - ($pos % 8))) { + my $spaces = ' ' x $count; + $line =~ s/\t/$spaces/; + } + } + + return $line; +} + +sub project_in_list { + my $project = shift; + my @list = git_get_projects_list(); + return @list && scalar(grep { $_->{'path'} eq $project } @list); +} + +## ---------------------------------------------------------------------- +## HTML aware string manipulation + +# Try to chop given string on a word boundary between position +# $len and $len+$add_len. If there is no word boundary there, +# chop at $len+$add_len. Do not chop if chopped part plus ellipsis +# (marking chopped part) would be longer than given string. +sub chop_str { + my $str = shift; + my $len = shift; + my $add_len = shift || 10; + my $where = shift || 'right'; # 'left' | 'center' | 'right' + + # Make sure perl knows it is utf8 encoded so we don't + # cut in the middle of a utf8 multibyte char. + $str = to_utf8($str); + + # allow only $len chars, but don't cut a word if it would fit in $add_len + # if it doesn't fit, cut it if it's still longer than the dots we would add + # remove chopped character entities entirely + + # when chopping in the middle, distribute $len into left and right part + # return early if chopping wouldn't make string shorter + if ($where eq 'center') { + return $str if ($len + 5 >= length($str)); # filler is length 5 + $len = int($len/2); + } else { + return $str if ($len + 4 >= length($str)); # filler is length 4 + } + + # regexps: ending and beginning with word part up to $add_len + my $endre = qr/.{$len}\w{0,$add_len}/; + my $begre = qr/\w{0,$add_len}.{$len}/; + + if ($where eq 'left') { + $str =~ m/^(.*?)($begre)$/; + my ($lead, $body) = ($1, $2); + if (length($lead) > 4) { + $lead = " ..."; + } + return "$lead$body"; + + } elsif ($where eq 'center') { + $str =~ m/^($endre)(.*)$/; + my ($left, $str) = ($1, $2); + $str =~ m/^(.*?)($begre)$/; + my ($mid, $right) = ($1, $2); + if (length($mid) > 5) { + $mid = " ... "; + } + return "$left$mid$right"; + + } else { + $str =~ m/^($endre)(.*)$/; + my $body = $1; + my $tail = $2; + if (length($tail) > 4) { + $tail = "... "; + } + return "$body$tail"; + } +} + +# takes the same arguments as chop_str, but also wraps a around the +# result with a title attribute if it does get chopped. Additionally, the +# string is HTML-escaped. +sub chop_and_escape_str { + my ($str) = @_; + + my $chopped = chop_str(@_); + $str = to_utf8($str); + if ($chopped eq $str) { + return esc_html($chopped); + } else { + $str =~ s/[[:cntrl:]]/?/g; + return $cgi->span({-title=>$str}, esc_html($chopped)); + } +} + +# Highlight selected fragments of string, using given CSS class, +# and escape HTML. It is assumed that fragments do not overlap. +# Regions are passed as list of pairs (array references). +# +# Example: esc_html_hl_regions("foobar", "mark", [ 0, 3 ]) returns +# 'foobar' +sub esc_html_hl_regions { + my ($str, $css_class, @sel) = @_; + my %opts = grep { ref($_) ne 'ARRAY' } @sel; + @sel = grep { ref($_) eq 'ARRAY' } @sel; + return esc_html($str, %opts) unless @sel; + + my $out = ''; + my $pos = 0; + + for my $s (@sel) { + my ($begin, $end) = @$s; + + # Don't create empty elements. + next if $end <= $begin; + + my $escaped = esc_html(substr($str, $begin, $end - $begin), + %opts); + + $out .= esc_html(substr($str, $pos, $begin - $pos), %opts) + if ($begin - $pos > 0); + $out .= $cgi->span({-class => $css_class}, $escaped); + + $pos = $end; + } + $out .= esc_html(substr($str, $pos), %opts) + if ($pos < length($str)); + + return $out; +} + +# return positions of beginning and end of each match +sub matchpos_list { + my ($str, $regexp) = @_; + return unless (defined $str && defined $regexp); + + my @matches; + while ($str =~ /$regexp/g) { + push @matches, [$-[0], $+[0]]; + } + return @matches; +} + +# highlight match (if any), and escape HTML +sub esc_html_match_hl { + my ($str, $regexp) = @_; + return esc_html($str) unless defined $regexp; + + my @matches = matchpos_list($str, $regexp); + return esc_html($str) unless @matches; + + return esc_html_hl_regions($str, 'match', @matches); +} + + +# highlight match (if any) of shortened string, and escape HTML +sub esc_html_match_hl_chopped { + my ($str, $chopped, $regexp) = @_; + return esc_html_match_hl($str, $regexp) unless defined $chopped; + + my @matches = matchpos_list($str, $regexp); + return esc_html($chopped) unless @matches; + + # filter matches so that we mark chopped string + my $tail = "... "; # see chop_str + unless ($chopped =~ s/\Q$tail\E$//) { + $tail = ''; + } + my $chop_len = length($chopped); + my $tail_len = length($tail); + my @filtered; + + for my $m (@matches) { + if ($m->[0] > $chop_len) { + push @filtered, [ $chop_len, $chop_len + $tail_len ] if ($tail_len > 0); + last; + } elsif ($m->[1] > $chop_len) { + push @filtered, [ $m->[0], $chop_len + $tail_len ]; + last; + } + push @filtered, $m; + } + + return esc_html_hl_regions($chopped . $tail, 'match', @filtered); +} + +## ---------------------------------------------------------------------- +## functions returning short strings + +# CSS class for given age value (in seconds) +sub age_class { + my $age = shift; + + if (!defined $age) { + return "noage"; + } elsif ($age < 60*60*2) { + return "age0"; + } elsif ($age < 60*60*24*2) { + return "age1"; + } else { + return "age2"; + } +} + +# convert age in seconds to "nn units ago" string +sub age_string { + my $age = shift; + my $age_str; + + if ($age > 60*60*24*365*2) { + $age_str = (int $age/60/60/24/365); + $age_str .= " years ago"; + } elsif ($age > 60*60*24*(365/12)*2) { + $age_str = int $age/60/60/24/(365/12); + $age_str .= " months ago"; + } elsif ($age > 60*60*24*7*2) { + $age_str = int $age/60/60/24/7; + $age_str .= " weeks ago"; + } elsif ($age > 60*60*24*2) { + $age_str = int $age/60/60/24; + $age_str .= " days ago"; + } elsif ($age > 60*60*2) { + $age_str = int $age/60/60; + $age_str .= " hours ago"; + } elsif ($age > 60*2) { + $age_str = int $age/60; + $age_str .= " min ago"; + } elsif ($age > 2) { + $age_str = int $age; + $age_str .= " sec ago"; + } else { + $age_str .= " right now"; + } + return $age_str; +} + +use constant { + S_IFINVALID => 0030000, + S_IFGITLINK => 0160000, +}; + +# submodule/subproject, a commit object reference +sub S_ISGITLINK { + my $mode = shift; + + return (($mode & S_IFMT) == S_IFGITLINK) +} + +# convert file mode in octal to symbolic file mode string +sub mode_str { + my $mode = oct shift; + + if (S_ISGITLINK($mode)) { + return 'm---------'; + } elsif (S_ISDIR($mode & S_IFMT)) { + return 'drwxr-xr-x'; + } elsif (S_ISLNK($mode)) { + return 'lrwxrwxrwx'; + } elsif (S_ISREG($mode)) { + # git cares only about the executable bit + if ($mode & S_IXUSR) { + return '-rwxr-xr-x'; + } else { + return '-rw-r--r--'; + }; + } else { + return '----------'; + } +} + +# convert file mode in octal to file type string +sub file_type { + my $mode = shift; + + if ($mode !~ m/^[0-7]+$/) { + return $mode; + } else { + $mode = oct $mode; + } + + if (S_ISGITLINK($mode)) { + return "submodule"; + } elsif (S_ISDIR($mode & S_IFMT)) { + return "directory"; + } elsif (S_ISLNK($mode)) { + return "symlink"; + } elsif (S_ISREG($mode)) { + return "file"; + } else { + return "unknown"; + } +} + +# convert file mode in octal to file type description string +sub file_type_long { + my $mode = shift; + + if ($mode !~ m/^[0-7]+$/) { + return $mode; + } else { + $mode = oct $mode; + } + + if (S_ISGITLINK($mode)) { + return "submodule"; + } elsif (S_ISDIR($mode & S_IFMT)) { + return "directory"; + } elsif (S_ISLNK($mode)) { + return "symlink"; + } elsif (S_ISREG($mode)) { + if ($mode & S_IXUSR) { + return "executable"; + } else { + return "file"; + }; + } else { + return "unknown"; + } +} + + +## ---------------------------------------------------------------------- +## functions returning short HTML fragments, or transforming HTML fragments +## which don't belong to other sections + +# format line of commit message. +sub format_log_line_html { + my $line = shift; + + # Potentially abbreviated OID. + my $regex = oid_nlen_regex("7,64"); + + $line = esc_html($line, -nbsp=>1); + $line =~ s{ + \b + ( + # The output of "git describe", e.g. v2.10.0-297-gf6727b0 + # or hadoop-20160921-113441-20-g094fb7d + (?a({-href => href(action=>"object", hash=>$1), + -class => "text"}, $1); + }egx; + + return $line; +} + +# format marker of refs pointing to given object + +# the destination action is chosen based on object type and current context: +# - for annotated tags, we choose the tag view unless it's the current view +# already, in which case we go to shortlog view +# - for other refs, we keep the current view if we're in history, shortlog or +# log view, and select shortlog otherwise +sub format_ref_marker { + my ($refs, $id) = @_; + my $markers = ''; + + if (defined $refs->{$id}) { + foreach my $ref (@{$refs->{$id}}) { + # this code exploits the fact that non-lightweight tags are the + # only indirect objects, and that they are the only objects for which + # we want to use tag instead of shortlog as action + my ($type, $name) = qw(); + my $indirect = ($ref =~ s/\^\{\}$//); + # e.g. tags/v2.6.11 or heads/next + if ($ref =~ m!^(.*?)s?/(.*)$!) { + $type = $1; + $name = $2; + } else { + $type = "ref"; + $name = $ref; + } + + my $class = $type; + $class .= " indirect" if $indirect; + + my $dest_action = "shortlog"; + + if ($indirect) { + $dest_action = "tag" unless $action eq "tag"; + } elsif ($action =~ /^(history|(short)?log)$/) { + $dest_action = $action; + } + + my $dest = ""; + $dest .= "refs/" unless $ref =~ m!^refs/!; + $dest .= $ref; + + my $link = $cgi->a({ + -href => href( + action=>$dest_action, + hash=>$dest + )}, esc_html($name)); + + $markers .= " " . + $link . ""; + } + } + + if ($markers) { + return ' '. $markers . ''; + } else { + return ""; + } +} + +# format, perhaps shortened and with markers, title line +sub format_subject_html { + my ($long, $short, $href, $extra) = @_; + $extra = '' unless defined($extra); + + if (length($short) < length($long)) { + $long =~ s/[[:cntrl:]]/?/g; + return $cgi->a({-href => $href, -class => "list subject", + -title => to_utf8($long)}, + esc_html($short)) . $extra; + } else { + return $cgi->a({-href => $href, -class => "list subject"}, + esc_html($long)) . $extra; + } +} + +# Rather than recomputing the url for an email multiple times, we cache it +# after the first hit. This gives a visible benefit in views where the avatar +# for the same email is used repeatedly (e.g. shortlog). +# The cache is shared by all avatar engines (currently gravatar only), which +# are free to use it as preferred. Since only one avatar engine is used for any +# given page, there's no risk for cache conflicts. +our %avatar_cache = (); + +# Compute the picon url for a given email, by using the picon search service over at +# http://www.cs.indiana.edu/picons/search.html +sub picon_url { + my $email = lc shift; + if (!$avatar_cache{$email}) { + my ($user, $domain) = split('@', $email); + $avatar_cache{$email} = + "//www.cs.indiana.edu/cgi-pub/kinzler/piconsearch.cgi/" . + "$domain/$user/" . + "users+domains+unknown/up/single"; + } + return $avatar_cache{$email}; +} + +# Compute the gravatar url for a given email, if it's not in the cache already. +# Gravatar stores only the part of the URL before the size, since that's the +# one computationally more expensive. This also allows reuse of the cache for +# different sizes (for this particular engine). +sub gravatar_url { + my $email = lc shift; + my $size = shift; + $avatar_cache{$email} ||= + "//www.gravatar.com/avatar/" . + md5_hex($email) . "?s="; + return $avatar_cache{$email} . $size; +} + +# Insert an avatar for the given $email at the given $size if the feature +# is enabled. +sub git_get_avatar { + my ($email, %opts) = @_; + my $pre_white = ($opts{-pad_before} ? " " : ""); + my $post_white = ($opts{-pad_after} ? " " : ""); + $opts{-size} ||= 'default'; + my $size = $avatar_size{$opts{-size}} || $avatar_size{'default'}; + my $url = ""; + if ($git_avatar eq 'gravatar') { + $url = gravatar_url($email, $size); + } elsif ($git_avatar eq 'picon') { + $url = picon_url($email); + } + # Other providers can be added by extending the if chain, defining $url + # as needed. If no variant puts something in $url, we assume avatars + # are completely disabled/unavailable. + if ($url) { + return $pre_white . + "" . $post_white; + } else { + return ""; + } +} + +sub format_search_author { + my ($author, $searchtype, $displaytext) = @_; + my $have_search = gitweb_check_feature('search'); + + if ($have_search) { + my $performed = ""; + if ($searchtype eq 'author') { + $performed = "authored"; + } elsif ($searchtype eq 'committer') { + $performed = "committed"; + } + + return $cgi->a({-href => href(action=>"search", hash=>$hash, + searchtext=>$author, + searchtype=>$searchtype), class=>"list", + title=>"Search for commits $performed by $author"}, + $displaytext); + + } else { + return $displaytext; + } +} + +# format the author name of the given commit with the given tag +# the author name is chopped and escaped according to the other +# optional parameters (see chop_str). +sub format_author_html { + my $tag = shift; + my $co = shift; + my $author = chop_and_escape_str($co->{'author_name'}, @_); + return "<$tag class=\"author\">" . + format_search_author($co->{'author_name'}, "author", + git_get_avatar($co->{'author_email'}, -pad_after => 1) . + $author) . + ""; +} + +# format git diff header line, i.e. "diff --(git|combined|cc) ..." +sub format_git_diff_header_line { + my $line = shift; + my $diffinfo = shift; + my ($from, $to) = @_; + + if ($diffinfo->{'nparents'}) { + # combined diff + $line =~ s!^(diff (.*?) )"?.*$!$1!; + if ($to->{'href'}) { + $line .= $cgi->a({-href => $to->{'href'}, -class => "path"}, + esc_path($to->{'file'})); + } else { # file was deleted (no href) + $line .= esc_path($to->{'file'}); + } + } else { + # "ordinary" diff + $line =~ s!^(diff (.*?) )"?a/.*$!$1!; + if ($from->{'href'}) { + $line .= $cgi->a({-href => $from->{'href'}, -class => "path"}, + 'a/' . esc_path($from->{'file'})); + } else { # file was added (no href) + $line .= 'a/' . esc_path($from->{'file'}); + } + $line .= ' '; + if ($to->{'href'}) { + $line .= $cgi->a({-href => $to->{'href'}, -class => "path"}, + 'b/' . esc_path($to->{'file'})); + } else { # file was deleted + $line .= 'b/' . esc_path($to->{'file'}); + } + } + + return "
$line
\n"; +} + +# format extended diff header line, before patch itself +sub format_extended_diff_header_line { + my $line = shift; + my $diffinfo = shift; + my ($from, $to) = @_; + + # match + if ($line =~ s!^((copy|rename) from ).*$!$1! && $from->{'href'}) { + $line .= $cgi->a({-href=>$from->{'href'}, -class=>"path"}, + esc_path($from->{'file'})); + } + if ($line =~ s!^((copy|rename) to ).*$!$1! && $to->{'href'}) { + $line .= $cgi->a({-href=>$to->{'href'}, -class=>"path"}, + esc_path($to->{'file'})); + } + # match single + if ($line =~ m/\s(\d{6})$/) { + $line .= ' (' . + file_type_long($1) . + ')'; + } + # match + if ($line =~ oid_nlen_prefix_infix_regex($sha1_len, "index ", ",") | + $line =~ oid_nlen_prefix_infix_regex($sha256_len, "index ", ",")) { + # can match only for combined diff + $line = 'index '; + for (my $i = 0; $i < $diffinfo->{'nparents'}; $i++) { + if ($from->{'href'}[$i]) { + $line .= $cgi->a({-href=>$from->{'href'}[$i], + -class=>"hash"}, + substr($diffinfo->{'from_id'}[$i],0,7)); + } else { + $line .= '0' x 7; + } + # separator + $line .= ',' if ($i < $diffinfo->{'nparents'} - 1); + } + $line .= '..'; + if ($to->{'href'}) { + $line .= $cgi->a({-href=>$to->{'href'}, -class=>"hash"}, + substr($diffinfo->{'to_id'},0,7)); + } else { + $line .= '0' x 7; + } + + } elsif ($line =~ oid_nlen_prefix_infix_regex($sha1_len, "index ", "..") | + $line =~ oid_nlen_prefix_infix_regex($sha256_len, "index ", "..")) { + # can match only for ordinary diff + my ($from_link, $to_link); + if ($from->{'href'}) { + $from_link = $cgi->a({-href=>$from->{'href'}, -class=>"hash"}, + substr($diffinfo->{'from_id'},0,7)); + } else { + $from_link = '0' x 7; + } + if ($to->{'href'}) { + $to_link = $cgi->a({-href=>$to->{'href'}, -class=>"hash"}, + substr($diffinfo->{'to_id'},0,7)); + } else { + $to_link = '0' x 7; + } + my ($from_id, $to_id) = ($diffinfo->{'from_id'}, $diffinfo->{'to_id'}); + $line =~ s!$from_id\.\.$to_id!$from_link..$to_link!; + } + + return $line . "
\n"; +} + +# format from-file/to-file diff header +sub format_diff_from_to_header { + my ($from_line, $to_line, $diffinfo, $from, $to, @parents) = @_; + my $line; + my $result = ''; + + $line = $from_line; + #assert($line =~ m/^---/) if DEBUG; + # no extra formatting for "^--- /dev/null" + if (! $diffinfo->{'nparents'}) { + # ordinary (single parent) diff + if ($line =~ m!^--- "?a/!) { + if ($from->{'href'}) { + $line = '--- a/' . + $cgi->a({-href=>$from->{'href'}, -class=>"path"}, + esc_path($from->{'file'})); + } else { + $line = '--- a/' . + esc_path($from->{'file'}); + } + } + $result .= qq!
$line
\n!; + + } else { + # combined diff (merge commit) + for (my $i = 0; $i < $diffinfo->{'nparents'}; $i++) { + if ($from->{'href'}[$i]) { + $line = '--- ' . + $cgi->a({-href=>href(action=>"blobdiff", + hash_parent=>$diffinfo->{'from_id'}[$i], + hash_parent_base=>$parents[$i], + file_parent=>$from->{'file'}[$i], + hash=>$diffinfo->{'to_id'}, + hash_base=>$hash, + file_name=>$to->{'file'}), + -class=>"path", + -title=>"diff" . ($i+1)}, + $i+1) . + '/' . + $cgi->a({-href=>$from->{'href'}[$i], -class=>"path"}, + esc_path($from->{'file'}[$i])); + } else { + $line = '--- /dev/null'; + } + $result .= qq!
$line
\n!; + } + } + + $line = $to_line; + #assert($line =~ m/^\+\+\+/) if DEBUG; + # no extra formatting for "^+++ /dev/null" + if ($line =~ m!^\+\+\+ "?b/!) { + if ($to->{'href'}) { + $line = '+++ b/' . + $cgi->a({-href=>$to->{'href'}, -class=>"path"}, + esc_path($to->{'file'})); + } else { + $line = '+++ b/' . + esc_path($to->{'file'}); + } + } + $result .= qq!
$line
\n!; + + return $result; +} + +# create note for patch simplified by combined diff +sub format_diff_cc_simplified { + my ($diffinfo, @parents) = @_; + my $result = ''; + + $result .= "
" . + "diff --cc "; + if (!is_deleted($diffinfo)) { + $result .= $cgi->a({-href => href(action=>"blob", + hash_base=>$hash, + hash=>$diffinfo->{'to_id'}, + file_name=>$diffinfo->{'to_file'}), + -class => "path"}, + esc_path($diffinfo->{'to_file'})); + } else { + $result .= esc_path($diffinfo->{'to_file'}); + } + $result .= "
\n" . # class="diff header" + "
" . + "Simple merge" . + "
\n"; # class="diff nodifferences" + + return $result; +} + +sub diff_line_class { + my ($line, $from, $to) = @_; + + # ordinary diff + my $num_sign = 1; + # combined diff + if ($from && $to && ref($from->{'href'}) eq "ARRAY") { + $num_sign = scalar @{$from->{'href'}}; + } + + my @diff_line_classifier = ( + { regexp => qr/^\@\@{$num_sign} /, class => "chunk_header"}, + { regexp => qr/^\\/, class => "incomplete" }, + { regexp => qr/^ {$num_sign}/, class => "ctx" }, + # classifier for context must come before classifier add/rem, + # or we would have to use more complicated regexp, for example + # qr/(?= {0,$m}\+)[+ ]{$num_sign}/, where $m = $num_sign - 1; + { regexp => qr/^[+ ]{$num_sign}/, class => "add" }, + { regexp => qr/^[- ]{$num_sign}/, class => "rem" }, + ); + for my $clsfy (@diff_line_classifier) { + return $clsfy->{'class'} + if ($line =~ $clsfy->{'regexp'}); + } + + # fallback + return ""; +} + +# assumes that $from and $to are defined and correctly filled, +# and that $line holds a line of chunk header for unified diff +sub format_unidiff_chunk_header { + my ($line, $from, $to) = @_; + + my ($from_text, $from_start, $from_lines, $to_text, $to_start, $to_lines, $section) = + $line =~ m/^\@{2} (-(\d+)(?:,(\d+))?) (\+(\d+)(?:,(\d+))?) \@{2}(.*)$/; + + $from_lines = 0 unless defined $from_lines; + $to_lines = 0 unless defined $to_lines; + + if ($from->{'href'}) { + $from_text = $cgi->a({-href=>"$from->{'href'}#l$from_start", + -class=>"list"}, $from_text); + } + if ($to->{'href'}) { + $to_text = $cgi->a({-href=>"$to->{'href'}#l$to_start", + -class=>"list"}, $to_text); + } + $line = "@@ $from_text $to_text @@" . + "" . esc_html($section, -nbsp=>1) . ""; + return $line; +} + +# assumes that $from and $to are defined and correctly filled, +# and that $line holds a line of chunk header for combined diff +sub format_cc_diff_chunk_header { + my ($line, $from, $to) = @_; + + my ($prefix, $ranges, $section) = $line =~ m/^(\@+) (.*?) \@+(.*)$/; + my (@from_text, @from_start, @from_nlines, $to_text, $to_start, $to_nlines); + + @from_text = split(' ', $ranges); + for (my $i = 0; $i < @from_text; ++$i) { + ($from_start[$i], $from_nlines[$i]) = + (split(',', substr($from_text[$i], 1)), 0); + } + + $to_text = pop @from_text; + $to_start = pop @from_start; + $to_nlines = pop @from_nlines; + + $line = "$prefix "; + for (my $i = 0; $i < @from_text; ++$i) { + if ($from->{'href'}[$i]) { + $line .= $cgi->a({-href=>"$from->{'href'}[$i]#l$from_start[$i]", + -class=>"list"}, $from_text[$i]); + } else { + $line .= $from_text[$i]; + } + $line .= " "; + } + if ($to->{'href'}) { + $line .= $cgi->a({-href=>"$to->{'href'}#l$to_start", + -class=>"list"}, $to_text); + } else { + $line .= $to_text; + } + $line .= " $prefix" . + "" . esc_html($section, -nbsp=>1) . ""; + return $line; +} + +# process patch (diff) line (not to be used for diff headers), +# returning HTML-formatted (but not wrapped) line. +# If the line is passed as a reference, it is treated as HTML and not +# esc_html()'ed. +sub format_diff_line { + my ($line, $diff_class, $from, $to) = @_; + + if (ref($line)) { + $line = $$line; + } else { + chomp $line; + $line = untabify($line); + + if ($from && $to && $line =~ m/^\@{2} /) { + $line = format_unidiff_chunk_header($line, $from, $to); + } elsif ($from && $to && $line =~ m/^\@{3}/) { + $line = format_cc_diff_chunk_header($line, $from, $to); + } else { + $line = esc_html($line, -nbsp=>1); + } + } + + my $diff_classes = "diff"; + $diff_classes .= " $diff_class" if ($diff_class); + $line = "
$line
\n"; + + return $line; +} + +# Generates undef or something like "_snapshot_" or "snapshot (_tbz2_ _zip_)", +# linked. Pass the hash of the tree/commit to snapshot. +sub format_snapshot_links { + my ($hash) = @_; + my $num_fmts = @snapshot_fmts; + if ($num_fmts > 1) { + # A parenthesized list of links bearing format names. + # e.g. "snapshot (_tar.gz_ _zip_)" + return "snapshot (" . join(' ', map + $cgi->a({ + -href => href( + action=>"snapshot", + hash=>$hash, + snapshot_format=>$_ + ) + }, $known_snapshot_formats{$_}{'display'}) + , @snapshot_fmts) . ")"; + } elsif ($num_fmts == 1) { + # A single "snapshot" link whose tooltip bears the format name. + # i.e. "_snapshot_" + my ($fmt) = @snapshot_fmts; + return + $cgi->a({ + -href => href( + action=>"snapshot", + hash=>$hash, + snapshot_format=>$fmt + ), + -title => "in format: $known_snapshot_formats{$fmt}{'display'}" + }, "snapshot"); + } else { # $num_fmts == 0 + return undef; + } +} + +## ...................................................................... +## functions returning values to be passed, perhaps after some +## transformation, to other functions; e.g. returning arguments to href() + +# returns hash to be passed to href to generate gitweb URL +# in -title key it returns description of link +sub get_feed_info { + my $format = shift || 'Atom'; + my %res = (action => lc($format)); + my $matched_ref = 0; + + # feed links are possible only for project views + return unless (defined $project); + # some views should link to OPML, or to generic project feed, + # or don't have specific feed yet (so they should use generic) + return if (!$action || $action =~ /^(?:tags|heads|forks|tag|search)$/x); + + my $branch = undef; + # branches refs uses 'refs/' + $get_branch_refs()[x] + '/' prefix + # (fullname) to differentiate from tag links; this also makes + # possible to detect branch links + for my $ref (get_branch_refs()) { + if ((defined $hash_base && $hash_base =~ m!^refs/\Q$ref\E/(.*)$!) || + (defined $hash && $hash =~ m!^refs/\Q$ref\E/(.*)$!)) { + $branch = $1; + $matched_ref = $ref; + last; + } + } + # find log type for feed description (title) + my $type = 'log'; + if (defined $file_name) { + $type = "history of $file_name"; + $type .= "/" if ($action eq 'tree'); + $type .= " on '$branch'" if (defined $branch); + } else { + $type = "log of $branch" if (defined $branch); + } + + $res{-title} = $type; + $res{'hash'} = (defined $branch ? "refs/$matched_ref/$branch" : undef); + $res{'file_name'} = $file_name; + + return %res; +} + +## ---------------------------------------------------------------------- +## git utility subroutines, invoking git commands + +# returns path to the core git executable and the --git-dir parameter as list +sub git_cmd { + $number_of_git_cmds++; + return $GIT, '--git-dir='.$git_dir; +} + +# quote the given arguments for passing them to the shell +# quote_command("command", "arg 1", "arg with ' and ! characters") +# => "'command' 'arg 1' 'arg with '\'' and '\!' characters'" +# Try to avoid using this function wherever possible. +sub quote_command { + return join(' ', + map { my $a = $_ =~ s/(['!])/'\\$1'/gr; "'$a'" } @_ ); +} + +# get HEAD ref of given project as hash +sub git_get_head_hash { + return git_get_full_hash(shift, 'HEAD'); +} + +sub git_get_full_hash { + return git_get_hash(@_); +} + +sub git_get_short_hash { + return git_get_hash(@_, '--short=7'); +} + +sub git_get_hash { + my ($project, $hash, @options) = @_; + my $o_git_dir = $git_dir; + my $retval = undef; + $git_dir = "$projectroot/$project"; + if (open my $fd, '-|', git_cmd(), 'rev-parse', + '--verify', '-q', @options, $hash) { + $retval = <$fd>; + chomp $retval if defined $retval; + close $fd; + } + if (defined $o_git_dir) { + $git_dir = $o_git_dir; + } + return $retval; +} + +# get type of given object +sub git_get_type { + my $hash = shift; + + open my $fd, "-|", git_cmd(), "cat-file", '-t', $hash or return; + my $type = <$fd>; + close $fd or return; + chomp $type; + return $type; +} + +# repository configuration +our $config_file = ''; +our %config; + +# store multiple values for single key as anonymous array reference +# single values stored directly in the hash, not as [ ] +sub hash_set_multi { + my ($hash, $key, $value) = @_; + + if (!exists $hash->{$key}) { + $hash->{$key} = $value; + } elsif (!ref $hash->{$key}) { + $hash->{$key} = [ $hash->{$key}, $value ]; + } else { + push @{$hash->{$key}}, $value; + } +} + +# return hash of git project configuration +# optionally limited to some section, e.g. 'gitweb' +sub git_parse_project_config { + my $section_regexp = shift; + my %config; + + local $/ = "\0"; + + open my $fh, "-|", git_cmd(), "config", '-z', '-l', + or return; + + while (my $keyval = <$fh>) { + chomp $keyval; + my ($key, $value) = split(/\n/, $keyval, 2); + + hash_set_multi(\%config, $key, $value) + if (!defined $section_regexp || $key =~ /^(?:$section_regexp)\./o); + } + close $fh; + + return %config; +} + +# convert config value to boolean: 'true' or 'false' +# no value, number > 0, 'true' and 'yes' values are true +# rest of values are treated as false (never as error) +sub config_to_bool { + my $val = shift; + + return 1 if !defined $val; # section.key + + # strip leading and trailing whitespace + $val =~ s/^\s+//; + $val =~ s/\s+$//; + + return (($val =~ /^\d+$/ && $val) || # section.key = 1 + ($val =~ /^(?:true|yes)$/i)); # section.key = true +} + +# convert config value to simple decimal number +# an optional value suffix of 'k', 'm', or 'g' will cause the value +# to be multiplied by 1024, 1048576, or 1073741824 +sub config_to_int { + my $val = shift; + + # strip leading and trailing whitespace + $val =~ s/^\s+//; + $val =~ s/\s+$//; + + if (my ($num, $unit) = ($val =~ /^([0-9]*)([kmg])$/i)) { + $unit = lc($unit); + # unknown unit is treated as 1 + return $num * ($unit eq 'g' ? 1073741824 : + $unit eq 'm' ? 1048576 : + $unit eq 'k' ? 1024 : 1); + } + return $val; +} + +# convert config value to array reference, if needed +sub config_to_multi { + my $val = shift; + + return ref($val) ? $val : (defined($val) ? [ $val ] : []); +} + +sub git_get_project_config { + my ($key, $type) = @_; + + return unless defined $git_dir; + + # key sanity check + return unless ($key); + # only subsection, if exists, is case sensitive, + # and not lowercased by 'git config -z -l' + if (my ($hi, $mi, $lo) = ($key =~ /^([^.]*)\.(.*)\.([^.]*)$/)) { + $lo =~ s/_//g; + $key = join(".", lc($hi), $mi, lc($lo)); + return if ($lo =~ /\W/ || $hi =~ /\W/); + } else { + $key = lc($key); + $key =~ s/_//g; + return if ($key =~ /\W/); + } + $key =~ s/^gitweb\.//; + + # type sanity check + if (defined $type) { + $type =~ s/^--//; + $type = undef + unless ($type eq 'bool' || $type eq 'int'); + } + + # get config + if (!defined $config_file || + $config_file ne "$git_dir/config") { + %config = git_parse_project_config('gitweb'); + $config_file = "$git_dir/config"; + } + + # check if config variable (key) exists + return unless exists $config{"gitweb.$key"}; + + # ensure given type + if (!defined $type) { + return $config{"gitweb.$key"}; + } elsif ($type eq 'bool') { + # backward compatibility: 'git config --bool' returns true/false + return config_to_bool($config{"gitweb.$key"}) ? 'true' : 'false'; + } elsif ($type eq 'int') { + return config_to_int($config{"gitweb.$key"}); + } + return $config{"gitweb.$key"}; +} + +# get hash of given path at given ref +sub git_get_hash_by_path { + my $base = shift; + my $path = shift || return undef; + my $type = shift; + + $path =~ s,/+$,,; + + open my $fd, "-|", git_cmd(), "ls-tree", $base, "--", $path + or die_error(500, "Open git-ls-tree failed"); + my $line = <$fd>; + close $fd or return undef; + + if (!defined $line) { + # there is no tree or hash given by $path at $base + return undef; + } + + #'100644 blob 0fa3f3a66fb6a137f6ec2c19351ed4d807070ffa panic.c' + $line =~ m/^([0-9]+) (.+) ($oid_regex)\t/; + if (defined $type && $type ne $2) { + # type doesn't match + return undef; + } + return $3; +} + +# get path of entry with given hash at given tree-ish (ref) +# used to get 'from' filename for combined diff (merge commit) for renames +sub git_get_path_by_hash { + my $base = shift || return; + my $hash = shift || return; + + local $/ = "\0"; + + open my $fd, "-|", git_cmd(), "ls-tree", '-r', '-t', '-z', $base + or return undef; + while (my $line = <$fd>) { + chomp $line; + + #'040000 tree 595596a6a9117ddba9fe379b6b012b558bac8423 gitweb' + #'100644 blob e02e90f0429be0d2a69b76571101f20b8f75530f gitweb/README' + if ($line =~ m/(?:[0-9]+) (?:.+) $hash\t(.+)$/) { + close $fd; + return $1; + } + } + close $fd; + return undef; +} + +## ...................................................................... +## git utility functions, directly accessing git repository + +# get the value of config variable either from file named as the variable +# itself in the repository ($GIT_DIR/$name file), or from gitweb.$name +# configuration variable in the repository config file. +sub git_get_file_or_project_config { + my ($path, $name) = @_; + + $git_dir = "$projectroot/$path"; + open my $fd, '<', "$git_dir/$name" + or return git_get_project_config($name); + my $conf = <$fd>; + close $fd; + if (defined $conf) { + chomp $conf; + } + return $conf; +} + +sub git_get_project_description { + my $path = shift; + return git_get_file_or_project_config($path, 'description'); +} + +sub git_get_project_category { + my $path = shift; + return git_get_file_or_project_config($path, 'category'); +} + + +# supported formats: +# * $GIT_DIR/ctags/ file (in 'ctags' subdirectory) +# - if its contents is a number, use it as tag weight, +# - otherwise add a tag with weight 1 +# * $GIT_DIR/ctags file, each line is a tag (with weight 1) +# the same value multiple times increases tag weight +# * `gitweb.ctag' multi-valued repo config variable +sub git_get_project_ctags { + my $project = shift; + my $ctags = {}; + + $git_dir = "$projectroot/$project"; + if (opendir my $dh, "$git_dir/ctags") { + my @files = grep { -f $_ } map { "$git_dir/ctags/$_" } readdir($dh); + foreach my $tagfile (@files) { + open my $ct, '<', $tagfile + or next; + my $val = <$ct>; + chomp $val if $val; + close $ct; + + (my $ctag = $tagfile) =~ s#.*/##; + if ($val =~ /^\d+$/) { + $ctags->{$ctag} = $val; + } else { + $ctags->{$ctag} = 1; + } + } + closedir $dh; + + } elsif (open my $fh, '<', "$git_dir/ctags") { + while (my $line = <$fh>) { + chomp $line; + $ctags->{$line}++ if $line; + } + close $fh; + + } else { + my $taglist = config_to_multi(git_get_project_config('ctag')); + foreach my $tag (@$taglist) { + $ctags->{$tag}++; + } + } + + return $ctags; +} + +# return hash, where keys are content tags ('ctags'), +# and values are sum of weights of given tag in every project +sub git_gather_all_ctags { + my $projects = shift; + my $ctags = {}; + + foreach my $p (@$projects) { + foreach my $ct (keys %{$p->{'ctags'}}) { + $ctags->{$ct} += $p->{'ctags'}->{$ct}; + } + } + + return $ctags; +} + +sub git_populate_project_tagcloud { + my $ctags = shift; + + # First, merge different-cased tags; tags vote on casing + my %ctags_lc; + foreach (keys %$ctags) { + $ctags_lc{lc $_}->{count} += $ctags->{$_}; + if (not $ctags_lc{lc $_}->{topcount} + or $ctags_lc{lc $_}->{topcount} < $ctags->{$_}) { + $ctags_lc{lc $_}->{topcount} = $ctags->{$_}; + $ctags_lc{lc $_}->{topname} = $_; + } + } + + my $cloud; + my $matched = $input_params{'ctag'}; + if (eval { require HTML::TagCloud; 1; }) { + $cloud = HTML::TagCloud->new; + foreach my $ctag (sort keys %ctags_lc) { + # Pad the title with spaces so that the cloud looks + # less crammed. + my $title = esc_html($ctags_lc{$ctag}->{topname}); + $title =~ s/ / /g; + $title =~ s/^/ /g; + $title =~ s/$/ /g; + if (defined $matched && $matched eq $ctag) { + $title = qq($title); + } + $cloud->add($title, href(project=>undef, ctag=>$ctag), + $ctags_lc{$ctag}->{count}); + } + } else { + $cloud = {}; + foreach my $ctag (keys %ctags_lc) { + my $title = esc_html($ctags_lc{$ctag}->{topname}, -nbsp=>1); + if (defined $matched && $matched eq $ctag) { + $title = qq($title); + } + $cloud->{$ctag}{count} = $ctags_lc{$ctag}->{count}; + $cloud->{$ctag}{ctag} = + $cgi->a({-href=>href(project=>undef, ctag=>$ctag)}, $title); + } + } + return $cloud; +} + +sub git_show_project_tagcloud { + my ($cloud, $count) = @_; + if (ref $cloud eq 'HTML::TagCloud') { + return $cloud->html_and_css($count); + } else { + my @tags = sort { $cloud->{$a}->{'count'} <=> $cloud->{$b}->{'count'} } keys %$cloud; + return + '
' . + join (', ', map { + $cloud->{$_}->{'ctag'} + } splice(@tags, 0, $count)) . + '
'; + } +} + +sub git_get_project_url_list { + my $path = shift; + + $git_dir = "$projectroot/$path"; + open my $fd, '<', "$git_dir/cloneurl" + or return wantarray ? + @{ config_to_multi(git_get_project_config('url')) } : + config_to_multi(git_get_project_config('url')); + my @git_project_url_list = map { chomp; $_ } <$fd>; + close $fd; + + return wantarray ? @git_project_url_list : \@git_project_url_list; +} + +sub git_get_projects_list { + my $filter = shift || ''; + my $paranoid = shift; + my @list; + + if (-d $projects_list) { + # search in directory + my $dir = $projects_list; + # remove the trailing "/" + $dir =~ s!/+$!!; + my $pfxlen = length("$dir"); + my $pfxdepth = ($dir =~ tr!/!!); + # when filtering, search only given subdirectory + if ($filter && !$paranoid) { + $dir .= "/$filter"; + $dir =~ s!/+$!!; + } + + File::Find::find({ + follow_fast => 1, # follow symbolic links + follow_skip => 2, # ignore duplicates + dangling_symlinks => 0, # ignore dangling symlinks, silently + wanted => sub { + # global variables + our $project_maxdepth; + our $projectroot; + # skip project-list toplevel, if we get it. + return if (m!^[/.]$!); + # only directories can be git repositories + return unless (-d $_); + # need search permission + return unless (-x $_); + # don't traverse too deep (Find is super slow on os x) + # $project_maxdepth excludes depth of $projectroot + if (($File::Find::name =~ tr!/!!) - $pfxdepth > $project_maxdepth) { + $File::Find::prune = 1; + return; + } + + my $path = substr($File::Find::name, $pfxlen + 1); + # paranoidly only filter here + if ($paranoid && $filter && $path !~ m!^\Q$filter\E/!) { + next; + } + # we check related file in $projectroot + if (check_export_ok("$projectroot/$path")) { + push @list, { path => $path }; + $File::Find::prune = 1; + } + }, + }, "$dir"); + + } elsif (-f $projects_list) { + # read from file(url-encoded): + # 'git%2Fgit.git Linus+Torvalds' + # 'libs%2Fklibc%2Fklibc.git H.+Peter+Anvin' + # 'linux%2Fhotplug%2Fudev.git Greg+Kroah-Hartman' + open my $fd, '<', $projects_list or return; + PROJECT: + while (my $line = <$fd>) { + chomp $line; + my ($path, $owner) = split ' ', $line; + $path = unescape($path); + $owner = unescape($owner); + if (!defined $path) { + next; + } + # if $filter is rpovided, check if $path begins with $filter + if ($filter && $path !~ m!^\Q$filter\E/!) { + next; + } + if (check_export_ok("$projectroot/$path")) { + my $pr = { + path => $path + }; + if ($owner) { + $pr->{'owner'} = to_utf8($owner); + } + push @list, $pr; + } + } + close $fd; + } + return @list; +} + +# written with help of Tree::Trie module (Perl Artistic License, GPL compatible) +# as side effects it sets 'forks' field to list of forks for forked projects +sub filter_forks_from_projects_list { + my $projects = shift; + + my %trie; # prefix tree of directories (path components) + # generate trie out of those directories that might contain forks + foreach my $pr (@$projects) { + my $path = $pr->{'path'}; + $path =~ s/\.git$//; # forks of 'repo.git' are in 'repo/' directory + next if ($path =~ m!/$!); # skip non-bare repositories, e.g. 'repo/.git' + next unless ($path); # skip '.git' repository: tests, git-instaweb + next unless (-d "$projectroot/$path"); # containing directory exists + $pr->{'forks'} = []; # there can be 0 or more forks of project + + # add to trie + my @dirs = split('/', $path); + # walk the trie, until either runs out of components or out of trie + my $ref = \%trie; + while (scalar @dirs && + exists($ref->{$dirs[0]})) { + $ref = $ref->{shift @dirs}; + } + # create rest of trie structure from rest of components + foreach my $dir (@dirs) { + $ref = $ref->{$dir} = {}; + } + # create end marker, store $pr as a data + $ref->{''} = $pr if (!exists $ref->{''}); + } + + # filter out forks, by finding shortest prefix match for paths + my @filtered; + PROJECT: + foreach my $pr (@$projects) { + # trie lookup + my $ref = \%trie; + DIR: + foreach my $dir (split('/', $pr->{'path'})) { + if (exists $ref->{''}) { + # found [shortest] prefix, is a fork - skip it + push @{$ref->{''}{'forks'}}, $pr; + next PROJECT; + } + if (!exists $ref->{$dir}) { + # not in trie, cannot have prefix, not a fork + push @filtered, $pr; + next PROJECT; + } + # If the dir is there, we just walk one step down the trie. + $ref = $ref->{$dir}; + } + # we ran out of trie + # (shouldn't happen: it's either no match, or end marker) + push @filtered, $pr; + } + + return @filtered; +} + +# note: fill_project_list_info must be run first, +# for 'descr_long' and 'ctags' to be filled +sub search_projects_list { + my ($projlist, %opts) = @_; + my $tagfilter = $opts{'tagfilter'}; + my $search_re = $opts{'search_regexp'}; + + return @$projlist + unless ($tagfilter || $search_re); + + # searching projects require filling to be run before it; + fill_project_list_info($projlist, + $tagfilter ? 'ctags' : (), + $search_re ? ('path', 'descr') : ()); + my @projects; + PROJECT: + foreach my $pr (@$projlist) { + + if ($tagfilter) { + next unless ref($pr->{'ctags'}) eq 'HASH'; + next unless + grep { lc($_) eq lc($tagfilter) } keys %{$pr->{'ctags'}}; + } + + if ($search_re) { + next unless + $pr->{'path'} =~ /$search_re/ || + $pr->{'descr_long'} =~ /$search_re/; + } + + push @projects, $pr; + } + + return @projects; +} + +our $gitweb_project_owner = undef; +sub git_get_project_list_from_file { + + return if (defined $gitweb_project_owner); + + $gitweb_project_owner = {}; + # read from file (url-encoded): + # 'git%2Fgit.git Linus+Torvalds' + # 'libs%2Fklibc%2Fklibc.git H.+Peter+Anvin' + # 'linux%2Fhotplug%2Fudev.git Greg+Kroah-Hartman' + if (-f $projects_list) { + open(my $fd, '<', $projects_list); + while (my $line = <$fd>) { + chomp $line; + my ($pr, $ow) = split ' ', $line; + $pr = unescape($pr); + $ow = unescape($ow); + $gitweb_project_owner->{$pr} = to_utf8($ow); + } + close $fd; + } +} + +sub git_get_project_owner { + my $project = shift; + my $owner; + + return undef unless $project; + $git_dir = "$projectroot/$project"; + + if (!defined $gitweb_project_owner) { + git_get_project_list_from_file(); + } + + if (exists $gitweb_project_owner->{$project}) { + $owner = $gitweb_project_owner->{$project}; + } + if (!defined $owner){ + $owner = git_get_project_config('owner'); + } + if (!defined $owner) { + $owner = get_file_owner("$git_dir"); + } + + return $owner; +} + +sub git_get_last_activity { + my ($path) = @_; + my $fd; + + $git_dir = "$projectroot/$path"; + open($fd, "-|", git_cmd(), 'for-each-ref', + '--format=%(committer)', + '--sort=-committerdate', + '--count=1', + map { "refs/$_" } get_branch_refs ()) or return; + my $most_recent = <$fd>; + close $fd or return; + if (defined $most_recent && + $most_recent =~ / (\d+) [-+][01]\d\d\d$/) { + my $timestamp = $1; + my $age = time - $timestamp; + return ($age, age_string($age)); + } + return (undef, undef); +} + +# Implementation note: when a single remote is wanted, we cannot use 'git +# remote show -n' because that command always work (assuming it's a remote URL +# if it's not defined), and we cannot use 'git remote show' because that would +# try to make a network roundtrip. So the only way to find if that particular +# remote is defined is to walk the list provided by 'git remote -v' and stop if +# and when we find what we want. +sub git_get_remotes_list { + my $wanted = shift; + my %remotes = (); + + open my $fd, '-|' , git_cmd(), 'remote', '-v'; + return unless $fd; + while (my $remote = <$fd>) { + chomp $remote; + $remote =~ s!\t(.*?)\s+\((\w+)\)$!!; + next if $wanted and not $remote eq $wanted; + my ($url, $key) = ($1, $2); + + $remotes{$remote} ||= { 'heads' => () }; + $remotes{$remote}{$key} = $url; + } + close $fd or return; + return wantarray ? %remotes : \%remotes; +} + +# Takes a hash of remotes as first parameter and fills it by adding the +# available remote heads for each of the indicated remotes. +sub fill_remote_heads { + my $remotes = shift; + my @heads = map { "remotes/$_" } keys %$remotes; + my @remoteheads = git_get_heads_list(undef, @heads); + foreach my $remote (keys %$remotes) { + $remotes->{$remote}{'heads'} = [ grep { + $_->{'name'} =~ s!^$remote/!! + } @remoteheads ]; + } +} + +sub git_get_references { + my $type = shift || ""; + my %refs; + # 5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11 + # c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11^{} + open my $fd, "-|", git_cmd(), "show-ref", "--dereference", + ($type ? ("--", "refs/$type") : ()) # use -- if $type + or return; + + while (my $line = <$fd>) { + chomp $line; + if ($line =~ m!^($oid_regex)\srefs/($type.*)$!) { + if (defined $refs{$1}) { + push @{$refs{$1}}, $2; + } else { + $refs{$1} = [ $2 ]; + } + } + } + close $fd or return; + return \%refs; +} + +sub git_get_rev_name_tags { + my $hash = shift || return undef; + + open my $fd, "-|", git_cmd(), "name-rev", "--tags", $hash + or return; + my $name_rev = <$fd>; + close $fd; + + if ($name_rev =~ m|^$hash tags/(.*)$|) { + return $1; + } else { + # catches also '$hash undefined' output + return undef; + } +} + +## ---------------------------------------------------------------------- +## parse to hash functions + +sub parse_date { + my $epoch = shift; + my $tz = shift || "-0000"; + + my %date; + my @months = ("Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"); + my @days = ("Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"); + my ($sec, $min, $hour, $mday, $mon, $year, $wday, $yday) = gmtime($epoch); + $date{'hour'} = $hour; + $date{'minute'} = $min; + $date{'mday'} = $mday; + $date{'day'} = $days[$wday]; + $date{'month'} = $months[$mon]; + $date{'rfc2822'} = sprintf "%s, %d %s %4d %02d:%02d:%02d +0000", + $days[$wday], $mday, $months[$mon], 1900+$year, $hour ,$min, $sec; + $date{'mday-time'} = sprintf "%d %s %02d:%02d", + $mday, $months[$mon], $hour ,$min; + $date{'iso-8601'} = sprintf "%04d-%02d-%02dT%02d:%02d:%02dZ", + 1900+$year, 1+$mon, $mday, $hour ,$min, $sec; + + my ($tz_sign, $tz_hour, $tz_min) = + ($tz =~ m/^([-+])(\d\d)(\d\d)$/); + $tz_sign = ($tz_sign eq '-' ? -1 : +1); + my $local = $epoch + $tz_sign*((($tz_hour*60) + $tz_min)*60); + ($sec, $min, $hour, $mday, $mon, $year, $wday, $yday) = gmtime($local); + $date{'hour_local'} = $hour; + $date{'minute_local'} = $min; + $date{'tz_local'} = $tz; + $date{'iso-tz'} = sprintf("%04d-%02d-%02d %02d:%02d:%02d %s", + 1900+$year, $mon+1, $mday, + $hour, $min, $sec, $tz); + return %date; +} + +sub hide_mailaddrs_if_private { + my $line = shift; + return $line unless gitweb_check_feature('email-privacy'); + $line =~ s/<[^@>]+@[^>]+>//g; + return $line; +} + +sub parse_tag { + my $tag_id = shift; + my %tag; + my @comment; + + open my $fd, "-|", git_cmd(), "cat-file", "tag", $tag_id or return; + $tag{'id'} = $tag_id; + while (my $line = <$fd>) { + chomp $line; + if ($line =~ m/^object ($oid_regex)$/) { + $tag{'object'} = $1; + } elsif ($line =~ m/^type (.+)$/) { + $tag{'type'} = $1; + } elsif ($line =~ m/^tag (.+)$/) { + $tag{'name'} = $1; + } elsif ($line =~ m/^tagger (.*) ([0-9]+) (.*)$/) { + $tag{'author'} = hide_mailaddrs_if_private($1); + $tag{'author_epoch'} = $2; + $tag{'author_tz'} = $3; + if ($tag{'author'} =~ m/^([^<]+) <([^>]*)>/) { + $tag{'author_name'} = $1; + $tag{'author_email'} = $2; + } else { + $tag{'author_name'} = $tag{'author'}; + } + } elsif ($line =~ m/--BEGIN/) { + push @comment, $line; + last; + } elsif ($line eq "") { + last; + } + } + push @comment, <$fd>; + $tag{'comment'} = \@comment; + close $fd or return; + if (!defined $tag{'name'}) { + return + }; + return %tag +} + +sub parse_commit_text { + my ($commit_text, $withparents) = @_; + my @commit_lines = split '\n', $commit_text; + my %co; + + pop @commit_lines; # Remove '\0' + + if (! @commit_lines) { + return; + } + + my $header = shift @commit_lines; + if ($header !~ m/^$oid_regex/) { + return; + } + ($co{'id'}, my @parents) = split ' ', $header; + while (my $line = shift @commit_lines) { + last if $line eq "\n"; + if ($line =~ m/^tree ($oid_regex)$/) { + $co{'tree'} = $1; + } elsif ((!defined $withparents) && ($line =~ m/^parent ($oid_regex)$/)) { + push @parents, $1; + } elsif ($line =~ m/^author (.*) ([0-9]+) (.*)$/) { + $co{'author'} = hide_mailaddrs_if_private(to_utf8($1)); + $co{'author_epoch'} = $2; + $co{'author_tz'} = $3; + if ($co{'author'} =~ m/^([^<]+) <([^>]*)>/) { + $co{'author_name'} = $1; + $co{'author_email'} = $2; + } else { + $co{'author_name'} = $co{'author'}; + } + } elsif ($line =~ m/^committer (.*) ([0-9]+) (.*)$/) { + $co{'committer'} = hide_mailaddrs_if_private(to_utf8($1)); + $co{'committer_epoch'} = $2; + $co{'committer_tz'} = $3; + if ($co{'committer'} =~ m/^([^<]+) <([^>]*)>/) { + $co{'committer_name'} = $1; + $co{'committer_email'} = $2; + } else { + $co{'committer_name'} = $co{'committer'}; + } + } + } + if (!defined $co{'tree'}) { + return; + }; + $co{'parents'} = \@parents; + $co{'parent'} = $parents[0]; + + foreach my $title (@commit_lines) { + $title =~ s/^ //; + if ($title ne "") { + $co{'title'} = chop_str($title, 80, 5); + $co{'title_short'} = chop_str($title, 50, 5); + last; + } + } + if (! defined $co{'title'} || $co{'title'} eq "") { + $co{'title'} = $co{'title_short'} = '(no commit message)'; + } + # remove added spaces, redact e-mail addresses if applicable. + foreach my $line (@commit_lines) { + $line =~ s/^ //; + $line = hide_mailaddrs_if_private($line); + } + $co{'comment'} = \@commit_lines; + + my $age = time - $co{'committer_epoch'}; + $co{'age'} = $age; + $co{'age_string'} = age_string($age); + my ($sec, $min, $hour, $mday, $mon, $year, $wday, $yday) = gmtime($co{'committer_epoch'}); + if ($age > 60*60*24*7*2) { + $co{'age_string_date'} = sprintf "%4i-%02u-%02i", 1900 + $year, $mon+1, $mday; + $co{'age_string_age'} = $co{'age_string'}; + } else { + $co{'age_string_date'} = $co{'age_string'}; + $co{'age_string_age'} = sprintf "%4i-%02u-%02i", 1900 + $year, $mon+1, $mday; + } + return %co; +} + +sub parse_commit { + my ($commit_id) = @_; + my %co; + + local $/ = "\0"; + + open my $fd, "-|", git_cmd(), "rev-list", + "--parents", + "--header", + "--max-count=1", + $commit_id, + "--", + or die_error(500, "Open git-rev-list failed"); + %co = parse_commit_text(<$fd>, 1); + close $fd; + + return %co; +} + +sub parse_commits { + my ($commit_id, $maxcount, $skip, $filename, @args) = @_; + my @cos; + + $maxcount ||= 1; + $skip ||= 0; + + local $/ = "\0"; + + open my $fd, "-|", git_cmd(), "rev-list", + "--header", + @args, + ("--max-count=" . $maxcount), + ("--skip=" . $skip), + @extra_options, + $commit_id, + "--", + ($filename ? ($filename) : ()) + or die_error(500, "Open git-rev-list failed"); + while (my $line = <$fd>) { + my %co = parse_commit_text($line); + push @cos, \%co; + } + close $fd; + + return wantarray ? @cos : \@cos; +} + +# parse line of git-diff-tree "raw" output +sub parse_difftree_raw_line { + my $line = shift; + my %res; + + # ':100644 100644 03b218260e99b78c6df0ed378e59ed9205ccc96d 3b93d5e7cc7f7dd4ebed13a5cc1a4ad976fc94d8 M ls-files.c' + # ':100644 100644 7f9281985086971d3877aca27704f2aaf9c448ce bc190ebc71bbd923f2b728e505408f5e54bd073a M rev-tree.c' + if ($line =~ m/^:([0-7]{6}) ([0-7]{6}) ($oid_regex) ($oid_regex) (.)([0-9]{0,3})\t(.*)$/) { + $res{'from_mode'} = $1; + $res{'to_mode'} = $2; + $res{'from_id'} = $3; + $res{'to_id'} = $4; + $res{'status'} = $5; + $res{'similarity'} = $6; + if ($res{'status'} eq 'R' || $res{'status'} eq 'C') { # renamed or copied + ($res{'from_file'}, $res{'to_file'}) = map { unquote($_) } split("\t", $7); + } else { + $res{'from_file'} = $res{'to_file'} = $res{'file'} = unquote($7); + } + } + # '::100755 100755 100755 60e79ca1b01bc8b057abe17ddab484699a7f5fdb 94067cc5f73388f33722d52ae02f44692bc07490 94067cc5f73388f33722d52ae02f44692bc07490 MR git-gui/git-gui.sh' + # combined diff (for merge commit) + elsif ($line =~ s/^(::+)((?:[0-7]{6} )+)((?:$oid_regex )+)([a-zA-Z]+)\t(.*)$//) { + $res{'nparents'} = length($1); + $res{'from_mode'} = [ split(' ', $2) ]; + $res{'to_mode'} = pop @{$res{'from_mode'}}; + $res{'from_id'} = [ split(' ', $3) ]; + $res{'to_id'} = pop @{$res{'from_id'}}; + $res{'status'} = [ split('', $4) ]; + $res{'to_file'} = unquote($5); + } + # 'c512b523472485aef4fff9e57b229d9d243c967f' + elsif ($line =~ m/^($oid_regex)$/) { + $res{'commit'} = $1; + } + + return wantarray ? %res : \%res; +} + +# wrapper: return parsed line of git-diff-tree "raw" output +# (the argument might be raw line, or parsed info) +sub parsed_difftree_line { + my $line_or_ref = shift; + + if (ref($line_or_ref) eq "HASH") { + # pre-parsed (or generated by hand) + return $line_or_ref; + } else { + return parse_difftree_raw_line($line_or_ref); + } +} + +# parse line of git-ls-tree output +sub parse_ls_tree_line { + my $line = shift; + my %opts = @_; + my %res; + + if ($opts{'-l'}) { + #'100644 blob 0fa3f3a66fb6a137f6ec2c19351ed4d807070ffa 16717 panic.c' + $line =~ m/^([0-9]+) (.+) ($oid_regex) +(-|[0-9]+)\t(.+)$/s; + + $res{'mode'} = $1; + $res{'type'} = $2; + $res{'hash'} = $3; + $res{'size'} = $4; + if ($opts{'-z'}) { + $res{'name'} = $5; + } else { + $res{'name'} = unquote($5); + } + } else { + #'100644 blob 0fa3f3a66fb6a137f6ec2c19351ed4d807070ffa panic.c' + $line =~ m/^([0-9]+) (.+) ($oid_regex)\t(.+)$/s; + + $res{'mode'} = $1; + $res{'type'} = $2; + $res{'hash'} = $3; + if ($opts{'-z'}) { + $res{'name'} = $4; + } else { + $res{'name'} = unquote($4); + } + } + + return wantarray ? %res : \%res; +} + +# generates _two_ hashes, references to which are passed as 2 and 3 argument +sub parse_from_to_diffinfo { + my ($diffinfo, $from, $to, @parents) = @_; + + if ($diffinfo->{'nparents'}) { + # combined diff + $from->{'file'} = []; + $from->{'href'} = []; + fill_from_file_info($diffinfo, @parents) + unless exists $diffinfo->{'from_file'}; + for (my $i = 0; $i < $diffinfo->{'nparents'}; $i++) { + $from->{'file'}[$i] = + defined $diffinfo->{'from_file'}[$i] ? + $diffinfo->{'from_file'}[$i] : + $diffinfo->{'to_file'}; + if ($diffinfo->{'status'}[$i] ne "A") { # not new (added) file + $from->{'href'}[$i] = href(action=>"blob", + hash_base=>$parents[$i], + hash=>$diffinfo->{'from_id'}[$i], + file_name=>$from->{'file'}[$i]); + } else { + $from->{'href'}[$i] = undef; + } + } + } else { + # ordinary (not combined) diff + $from->{'file'} = $diffinfo->{'from_file'}; + if ($diffinfo->{'status'} ne "A") { # not new (added) file + $from->{'href'} = href(action=>"blob", hash_base=>$hash_parent, + hash=>$diffinfo->{'from_id'}, + file_name=>$from->{'file'}); + } else { + delete $from->{'href'}; + } + } + + $to->{'file'} = $diffinfo->{'to_file'}; + if (!is_deleted($diffinfo)) { # file exists in result + $to->{'href'} = href(action=>"blob", hash_base=>$hash, + hash=>$diffinfo->{'to_id'}, + file_name=>$to->{'file'}); + } else { + delete $to->{'href'}; + } +} + +## ...................................................................... +## parse to array of hashes functions + +sub git_get_heads_list { + my ($limit, @classes) = @_; + @classes = get_branch_refs() unless @classes; + my @patterns = map { "refs/$_" } @classes; + my @headslist; + + open my $fd, '-|', git_cmd(), 'for-each-ref', + ($limit ? '--count='.($limit+1) : ()), + '--sort=-HEAD', '--sort=-committerdate', + '--format=%(objectname) %(refname) %(subject)%00%(committer)', + @patterns + or return; + while (my $line = <$fd>) { + my %ref_item; + + chomp $line; + my ($refinfo, $committerinfo) = split(/\0/, $line); + my ($hash, $name, $title) = split(' ', $refinfo, 3); + my ($committer, $epoch, $tz) = + ($committerinfo =~ /^(.*) ([0-9]+) (.*)$/); + $ref_item{'fullname'} = $name; + my $strip_refs = join '|', map { quotemeta } get_branch_refs(); + $name =~ s!^refs/($strip_refs|remotes)/!!; + $ref_item{'name'} = $name; + # for refs neither in 'heads' nor 'remotes' we want to + # show their ref dir + my $ref_dir = (defined $1) ? $1 : ''; + if ($ref_dir ne '' and $ref_dir ne 'heads' and $ref_dir ne 'remotes') { + $ref_item{'name'} .= ' (' . $ref_dir . ')'; + } + + $ref_item{'id'} = $hash; + $ref_item{'title'} = $title || '(no commit message)'; + $ref_item{'epoch'} = $epoch; + if ($epoch) { + $ref_item{'age'} = age_string(time - $ref_item{'epoch'}); + } else { + $ref_item{'age'} = "unknown"; + } + + push @headslist, \%ref_item; + } + close $fd; + + return wantarray ? @headslist : \@headslist; +} + +sub git_get_tags_list { + my $limit = shift; + my @tagslist; + + open my $fd, '-|', git_cmd(), 'for-each-ref', + ($limit ? '--count='.($limit+1) : ()), '--sort=-creatordate', + '--format=%(objectname) %(objecttype) %(refname) '. + '%(*objectname) %(*objecttype) %(subject)%00%(creator)', + 'refs/tags' + or return; + while (my $line = <$fd>) { + my %ref_item; + + chomp $line; + my ($refinfo, $creatorinfo) = split(/\0/, $line); + my ($id, $type, $name, $refid, $reftype, $title) = split(' ', $refinfo, 6); + my ($creator, $epoch, $tz) = + ($creatorinfo =~ /^(.*) ([0-9]+) (.*)$/); + $ref_item{'fullname'} = $name; + $name =~ s!^refs/tags/!!; + + $ref_item{'type'} = $type; + $ref_item{'id'} = $id; + $ref_item{'name'} = $name; + if ($type eq "tag") { + $ref_item{'subject'} = $title; + $ref_item{'reftype'} = $reftype; + $ref_item{'refid'} = $refid; + } else { + $ref_item{'reftype'} = $type; + $ref_item{'refid'} = $id; + } + + if ($type eq "tag" || $type eq "commit") { + $ref_item{'epoch'} = $epoch; + if ($epoch) { + $ref_item{'age'} = age_string(time - $ref_item{'epoch'}); + } else { + $ref_item{'age'} = "unknown"; + } + } + + push @tagslist, \%ref_item; + } + close $fd; + + return wantarray ? @tagslist : \@tagslist; +} + +## ---------------------------------------------------------------------- +## filesystem-related functions + +sub get_file_owner { + my $path = shift; + + my ($dev, $ino, $mode, $nlink, $st_uid, $st_gid, $rdev, $size) = stat($path); + my ($name, $passwd, $uid, $gid, $quota, $comment, $gcos, $dir, $shell) = getpwuid($st_uid); + if (!defined $gcos) { + return undef; + } + my $owner = $gcos; + $owner =~ s/[,;].*$//; + return to_utf8($owner); +} + +# assume that file exists +sub insert_file { + my $filename = shift; + + open my $fd, '<', $filename; + print map { to_utf8($_) } <$fd>; + close $fd; +} + +## ...................................................................... +## mimetype related functions + +sub mimetype_guess_file { + my $filename = shift; + my $mimemap = shift; + -r $mimemap or return undef; + + my %mimemap; + open(my $mh, '<', $mimemap) or return undef; + while (<$mh>) { + next if m/^#/; # skip comments + my ($mimetype, @exts) = split(/\s+/); + foreach my $ext (@exts) { + $mimemap{$ext} = $mimetype; + } + } + close($mh); + + $filename =~ /\.([^.]*)$/; + return $mimemap{$1}; +} + +sub mimetype_guess { + my $filename = shift; + my $mime; + $filename =~ /\./ or return undef; + + if ($mimetypes_file) { + my $file = $mimetypes_file; + if ($file !~ m!^/!) { # if it is relative path + # it is relative to project + $file = "$projectroot/$project/$file"; + } + $mime = mimetype_guess_file($filename, $file); + } + $mime ||= mimetype_guess_file($filename, '/etc/mime.types'); + return $mime; +} + +sub blob_mimetype { + my $fd = shift; + my $filename = shift; + + if ($filename) { + my $mime = mimetype_guess($filename); + $mime and return $mime; + } + + # just in case + return $default_blob_plain_mimetype unless $fd; + + if (-T $fd) { + return 'text/plain'; + } elsif (! $filename) { + return 'application/octet-stream'; + } elsif ($filename =~ m/\.png$/i) { + return 'image/png'; + } elsif ($filename =~ m/\.gif$/i) { + return 'image/gif'; + } elsif ($filename =~ m/\.jpe?g$/i) { + return 'image/jpeg'; + } else { + return 'application/octet-stream'; + } +} + +sub blob_contenttype { + my ($fd, $file_name, $type) = @_; + + $type ||= blob_mimetype($fd, $file_name); + if ($type eq 'text/plain' && defined $default_text_plain_charset) { + $type .= "; charset=$default_text_plain_charset"; + } + + return $type; +} + +# guess file syntax for syntax highlighting; return undef if no highlighting +# the name of syntax can (in the future) depend on syntax highlighter used +sub guess_file_syntax { + my ($highlight, $file_name) = @_; + return undef unless ($highlight && defined $file_name); + my $basename = basename($file_name, '.in'); + return $highlight_basename{$basename} + if exists $highlight_basename{$basename}; + + $basename =~ /\.([^.]*)$/; + my $ext = $1 or return undef; + return $highlight_ext{$ext} + if exists $highlight_ext{$ext}; + + return undef; +} + +# run highlighter and return FD of its output, +# or return original FD if no highlighting +sub run_highlighter { + my ($fd, $highlight, $syntax) = @_; + return $fd unless ($highlight); + + close $fd; + my $syntax_arg = (defined $syntax) ? "--syntax $syntax" : "--force"; + open $fd, quote_command(git_cmd(), "cat-file", "blob", $hash)." | ". + quote_command($^X, '-CO', '-MEncode=decode,FB_DEFAULT', '-pse', + '$_ = decode($fe, $_, FB_DEFAULT) if !utf8::decode($_);', + '--', "-fe=$fallback_encoding")." | ". + quote_command($highlight_bin). + " --replace-tabs=8 --fragment $syntax_arg |" + or die_error(500, "Couldn't open file or run syntax highlighter"); + return $fd; +} + +## ====================================================================== +## functions printing HTML: header, footer, error page + +sub get_page_title { + my $title = to_utf8($site_name); + + unless (defined $project) { + if (defined $project_filter) { + $title .= " - projects in '" . esc_path($project_filter) . "'"; + } + return $title; + } + $title .= " - " . to_utf8($project); + + return $title unless (defined $action); + $title .= "/$action"; # $action is US-ASCII (7bit ASCII) + + return $title unless (defined $file_name); + $title .= " - " . esc_path($file_name); + if ($action eq "tree" && $file_name !~ m|/$|) { + $title .= "/"; + } + + return $title; +} + +sub get_content_type_html { + # require explicit support from the UA if we are to send the page as + # 'application/xhtml+xml', otherwise send it as plain old 'text/html'. + # we have to do this because MSIE sometimes globs '*/*', pretending to + # support xhtml+xml but choking when it gets what it asked for. + if (defined $cgi->http('HTTP_ACCEPT') && + $cgi->http('HTTP_ACCEPT') =~ m/(,|;|\s|^)application\/xhtml\+xml(,|;|\s|$)/ && + $cgi->Accept('application/xhtml+xml') != 0) { + return 'application/xhtml+xml'; + } else { + return 'text/html'; + } +} + +sub print_feed_meta { + if (defined $project) { + my %href_params = get_feed_info(); + if (!exists $href_params{'-title'}) { + $href_params{'-title'} = 'log'; + } + + foreach my $format (qw(RSS Atom)) { + my $type = lc($format); + my %link_attr = ( + '-rel' => 'alternate', + '-title' => esc_attr("$project - $href_params{'-title'} - $format feed"), + '-type' => "application/$type+xml" + ); + + $href_params{'extra_options'} = undef; + $href_params{'action'} = $type; + $link_attr{'-href'} = esc_attr(href(%href_params)); + print "\n"; + + $href_params{'extra_options'} = '--no-merges'; + $link_attr{'-href'} = esc_attr(href(%href_params)); + $link_attr{'-title'} .= ' (no merges)'; + print "\n"; + } + + } else { + printf(''."\n", + esc_attr($site_name), + esc_attr(href(project=>undef, action=>"project_index"))); + printf(''."\n", + esc_attr($site_name), + esc_attr(href(project=>undef, action=>"opml"))); + } +} + +sub print_header_links { + my $status = shift; + + # print out each stylesheet that exist, providing backwards capability + # for those people who defined $stylesheet in a config file + if (defined $stylesheet) { + print ''."\n"; + } else { + foreach my $stylesheet (@stylesheets) { + next unless $stylesheet; + print ''."\n"; + } + } + print_feed_meta() + if ($status eq '200 OK'); + if (defined $favicon) { + print qq(\n); + } +} + +sub print_nav_breadcrumbs_path { + my $dirprefix = undef; + while (my $part = shift) { + $dirprefix .= "/" if defined $dirprefix; + $dirprefix .= $part; + print $cgi->a({-href => href(project => undef, + project_filter => $dirprefix, + action => "project_list")}, + esc_html($part)) . " / "; + } +} + +sub print_nav_breadcrumbs { + my %opts = @_; + + for my $crumb (@extra_breadcrumbs, [ $home_link_str => $home_link ]) { + print $cgi->a({-href => esc_url($crumb->[1])}, $crumb->[0]) . " / "; + } + if (defined $project) { + my @dirname = split '/', $project; + my $projectbasename = pop @dirname; + print_nav_breadcrumbs_path(@dirname); + print $cgi->a({-href => href(action=>"summary")}, esc_html($projectbasename)); + if (defined $action) { + my $action_print = $action ; + if (defined $opts{-action_extra}) { + $action_print = $cgi->a({-href => href(action=>$action)}, + $action); + } + print " / $action_print"; + } + if (defined $opts{-action_extra}) { + print " / $opts{-action_extra}"; + } + print "\n"; + } elsif (defined $project_filter) { + print_nav_breadcrumbs_path(split '/', $project_filter); + } +} + +sub print_search_form { + if (!defined $searchtext) { + $searchtext = ""; + } + my $search_hash; + if (defined $hash_base) { + $search_hash = $hash_base; + } elsif (defined $hash) { + $search_hash = $hash; + } else { + $search_hash = "HEAD"; + } + my $action = $my_uri; + my $use_pathinfo = gitweb_check_feature('pathinfo'); + if ($use_pathinfo) { + $action .= "/".esc_url($project); + } + print $cgi->start_form(-method => "get", -action => $action) . + "
\n" . + (!$use_pathinfo && + $cgi->input({-name=>"p", -value=>$project, -type=>"hidden"}) . "\n") . + $cgi->input({-name=>"a", -value=>"search", -type=>"hidden"}) . "\n" . + $cgi->input({-name=>"h", -value=>$search_hash, -type=>"hidden"}) . "\n" . + $cgi->popup_menu(-name => 'st', -default => 'commit', + -values => ['commit', 'grep', 'author', 'committer', 'pickaxe']) . + " " . $cgi->a({-href => href(action=>"search_help"), + -title => "search help" }, "?") . " search:\n", + $cgi->textfield(-name => "s", -value => $searchtext, -override => 1) . "\n" . + "" . + $cgi->checkbox(-name => 'sr', -value => 1, -label => 're', + -checked => $search_use_regexp) . + "" . + "
" . + $cgi->end_form() . "\n"; +} + +sub git_header_html { + my $status = shift || "200 OK"; + my $expires = shift; + my %opts = @_; + + my $title = get_page_title(); + print $cgi->header(-type=>get_content_type_html(), -charset => 'utf-8', + -status=> $status, -expires => $expires) + unless ($opts{'-no_http_header'}); + my $mod_perl_version = $ENV{'MOD_PERL'} ? " $ENV{'MOD_PERL'}" : ''; + print < + + +]> + + + + + + +$title +EOF + # the stylesheet, favicon etc urls won't work correctly with path_info + # unless we set the appropriate base URL + if ($ENV{'PATH_INFO'}) { + print "\n"; + } + print_header_links($status); + + if (defined $site_html_head_string) { + print to_utf8($site_html_head_string); + } + + print "\n" . + "\n"; + + if (defined $site_header && -f $site_header) { + insert_file($site_header); + } + + print "
\n"; + if (defined $logo) { + print $cgi->a({-href => esc_url($logo_url), + -title => $logo_label}, + $cgi->img({-src => esc_url($logo), + -width => 72, -height => 27, + -alt => "git", + -class => "logo"})); + } + print_nav_breadcrumbs(%opts); + print "
\n"; + + my $have_search = gitweb_check_feature('search'); + if (defined $project && $have_search) { + print_search_form(); + } +} + +sub git_footer_html { + my $feed_class = 'rss_logo'; + + print "
\n"; + if (defined $project) { + my $descr = git_get_project_description($project); + if (defined $descr) { + print "\n"; + } + + my %href_params = get_feed_info(); + if (!%href_params) { + $feed_class .= ' generic'; + } + $href_params{'-title'} ||= 'log'; + + foreach my $format (qw(RSS Atom)) { + $href_params{'action'} = lc($format); + print $cgi->a({-href => href(%href_params), + -title => "$href_params{'-title'} $format feed", + -class => $feed_class}, $format)."\n"; + } + + } else { + print $cgi->a({-href => href(project=>undef, action=>"opml", + project_filter => $project_filter), + -class => $feed_class}, "OPML") . " "; + print $cgi->a({-href => href(project=>undef, action=>"project_index", + project_filter => $project_filter), + -class => $feed_class}, "TXT") . "\n"; + } + print "
\n"; # class="page_footer" + + if (defined $t0 && gitweb_check_feature('timed')) { + print "
\n"; + print 'This page took '. + ''. + tv_interval($t0, [ gettimeofday() ]). + ' seconds '. + ' and '. + ''. + $number_of_git_cmds. + ' git commands '. + " to generate.\n"; + print "
\n"; # class="page_footer" + } + + if (defined $site_footer && -f $site_footer) { + insert_file($site_footer); + } + + print qq!\n!; + if (defined $action && + $action eq 'blame_incremental') { + print qq!\n!; + } else { + my ($jstimezone, $tz_cookie, $datetime_class) = + gitweb_get_feature('javascript-timezone'); + + print qq!\n!; + } + + print "\n" . + ""; +} + +# die_error(, [, ]) +# Example: die_error(404, 'Hash not found') +# By convention, use the following status codes (as defined in RFC 2616): +# 400: Invalid or missing CGI parameters, or +# requested object exists but has wrong type. +# 403: Requested feature (like "pickaxe" or "snapshot") not enabled on +# this server or project. +# 404: Requested object/revision/project doesn't exist. +# 500: The server isn't configured properly, or +# an internal error occurred (e.g. failed assertions caused by bugs), or +# an unknown error occurred (e.g. the git binary died unexpectedly). +# 503: The server is currently unavailable (because it is overloaded, +# or down for maintenance). Generally, this is a temporary state. +sub die_error { + my $status = shift || 500; + my $error = esc_html(shift) || "Internal Server Error"; + my $extra = shift; + my %opts = @_; + + my %http_responses = ( + 400 => '400 Bad Request', + 403 => '403 Forbidden', + 404 => '404 Not Found', + 500 => '500 Internal Server Error', + 503 => '503 Service Unavailable', + ); + git_header_html($http_responses{$status}, undef, %opts); + print < +

+$status - $error +
+EOF + if (defined $extra) { + print "
\n" . + "$extra\n"; + } + print "\n"; + + git_footer_html(); + goto DONE_GITWEB + unless ($opts{'-error_handler'}); +} + +## ---------------------------------------------------------------------- +## functions printing or outputting HTML: navigation + +sub git_print_page_nav { + my ($current, $suppress, $head, $treehead, $treebase, $extra) = @_; + $extra = '' if !defined $extra; # pager or formats + + my @navs = qw(summary shortlog log commit commitdiff tree); + if ($suppress) { + @navs = grep { $_ ne $suppress } @navs; + } + + my %arg = map { $_ => {action=>$_} } @navs; + if (defined $head) { + for (qw(commit commitdiff)) { + $arg{$_}{'hash'} = $head; + } + if ($current =~ m/^(tree | log | shortlog | commit | commitdiff | search)$/x) { + for (qw(shortlog log)) { + $arg{$_}{'hash'} = $head; + } + } + } + + $arg{'tree'}{'hash'} = $treehead if defined $treehead; + $arg{'tree'}{'hash_base'} = $treebase if defined $treebase; + + my @actions = gitweb_get_feature('actions'); + my %repl = ( + '%' => '%', + 'n' => $project, # project name + 'f' => $git_dir, # project path within filesystem + 'h' => $treehead || '', # current hash ('h' parameter) + 'b' => $treebase || '', # hash base ('hb' parameter) + ); + while (@actions) { + my ($label, $link, $pos) = splice(@actions,0,3); + # insert + @navs = map { $_ eq $pos ? ($_, $label) : $_ } @navs; + # munch munch + $link =~ s/%([%nfhb])/$repl{$1}/g; + $arg{$label}{'_href'} = $link; + } + + print "
\n" . + (join " | ", + map { $_ eq $current ? + $_ : $cgi->a({-href => ($arg{$_}{_href} ? $arg{$_}{_href} : href(%{$arg{$_}}))}, "$_") + } @navs); + print "
\n$extra
\n" . + "
\n"; +} + +# returns a submenu for the navigation of the refs views (tags, heads, +# remotes) with the current view disabled and the remotes view only +# available if the feature is enabled +sub format_ref_views { + my ($current) = @_; + my @ref_views = qw{tags heads}; + push @ref_views, 'remotes' if gitweb_check_feature('remote_heads'); + return join " | ", map { + $_ eq $current ? $_ : + $cgi->a({-href => href(action=>$_)}, $_) + } @ref_views +} + +sub format_paging_nav { + my ($action, $page, $has_next_link) = @_; + my $paging_nav; + + + if ($page > 0) { + $paging_nav .= + $cgi->a({-href => href(-replay=>1, page=>undef)}, "first") . + " ⋅ " . + $cgi->a({-href => href(-replay=>1, page=>$page-1), + -accesskey => "p", -title => "Alt-p"}, "prev"); + } else { + $paging_nav .= "first ⋅ prev"; + } + + if ($has_next_link) { + $paging_nav .= " ⋅ " . + $cgi->a({-href => href(-replay=>1, page=>$page+1), + -accesskey => "n", -title => "Alt-n"}, "next"); + } else { + $paging_nav .= " ⋅ next"; + } + + return $paging_nav; +} + +## ...................................................................... +## functions printing or outputting HTML: div + +sub git_print_header_div { + my ($action, $title, $hash, $hash_base) = @_; + my %args = (); + + $args{'action'} = $action; + $args{'hash'} = $hash if $hash; + $args{'hash_base'} = $hash_base if $hash_base; + + print "
\n" . + $cgi->a({-href => href(%args), -class => "title"}, + $title ? $title : $action) . + "\n
\n"; +} + +sub format_repo_url { + my ($name, $url) = @_; + return "$name$url\n"; +} + +# Group output by placing it in a DIV element and adding a header. +# Options for start_div() can be provided by passing a hash reference as the +# first parameter to the function. +# Options to git_print_header_div() can be provided by passing an array +# reference. This must follow the options to start_div if they are present. +# The content can be a scalar, which is output as-is, a scalar reference, which +# is output after html escaping, an IO handle passed either as *handle or +# *handle{IO}, or a function reference. In the latter case all following +# parameters will be taken as argument to the content function call. +sub git_print_section { + my ($div_args, $header_args, $content); + my $arg = shift; + if (ref($arg) eq 'HASH') { + $div_args = $arg; + $arg = shift; + } + if (ref($arg) eq 'ARRAY') { + $header_args = $arg; + $arg = shift; + } + $content = $arg; + + print $cgi->start_div($div_args); + git_print_header_div(@$header_args); + + if (ref($content) eq 'CODE') { + $content->(@_); + } elsif (ref($content) eq 'SCALAR') { + print esc_html($$content); + } elsif (ref($content) eq 'GLOB' or ref($content) eq 'IO::Handle') { + print <$content>; + } elsif (!ref($content) && defined($content)) { + print $content; + } + + print $cgi->end_div; +} + +sub format_timestamp_html { + my $date = shift; + my $strtime = $date->{'rfc2822'}; + + my (undef, undef, $datetime_class) = + gitweb_get_feature('javascript-timezone'); + if ($datetime_class) { + $strtime = qq!$strtime!; + } + + my $localtime_format = '(%02d:%02d %s)'; + if ($date->{'hour_local'} < 6) { + $localtime_format = '(%02d:%02d %s)'; + } + $strtime .= ' ' . + sprintf($localtime_format, + $date->{'hour_local'}, $date->{'minute_local'}, $date->{'tz_local'}); + + return $strtime; +} + +# Outputs the author name and date in long form +sub git_print_authorship { + my $co = shift; + my %opts = @_; + my $tag = $opts{-tag} || 'div'; + my $author = $co->{'author_name'}; + + my %ad = parse_date($co->{'author_epoch'}, $co->{'author_tz'}); + print "<$tag class=\"author_date\">" . + format_search_author($author, "author", esc_html($author)) . + " [".format_timestamp_html(\%ad)."]". + git_get_avatar($co->{'author_email'}, -pad_before => 1) . + "\n"; +} + +# Outputs table rows containing the full author or committer information, +# in the format expected for 'commit' view (& similar). +# Parameters are a commit hash reference, followed by the list of people +# to output information for. If the list is empty it defaults to both +# author and committer. +sub git_print_authorship_rows { + my $co = shift; + # too bad we can't use @people = @_ || ('author', 'committer') + my @people = @_; + @people = ('author', 'committer') unless @people; + foreach my $who (@people) { + my %wd = parse_date($co->{"${who}_epoch"}, $co->{"${who}_tz"}); + print "$who" . + format_search_author($co->{"${who}_name"}, $who, + esc_html($co->{"${who}_name"})) . " " . + format_search_author($co->{"${who}_email"}, $who, + esc_html("<" . $co->{"${who}_email"} . ">")) . + "" . + git_get_avatar($co->{"${who}_email"}, -size => 'double') . + "\n" . + "" . + "" . + format_timestamp_html(\%wd) . + "" . + "\n"; + } +} + +sub git_print_page_path { + my $name = shift; + my $type = shift; + my $hb = shift; + + + print "
"; + print $cgi->a({-href => href(action=>"tree", hash_base=>$hb), + -title => 'tree root'}, to_utf8("[$project]")); + print " / "; + if (defined $name) { + my @dirname = split '/', $name; + my $basename = pop @dirname; + my $fullname = ''; + + foreach my $dir (@dirname) { + $fullname .= ($fullname ? '/' : '') . $dir; + print $cgi->a({-href => href(action=>"tree", file_name=>$fullname, + hash_base=>$hb), + -title => $fullname}, esc_path($dir)); + print " / "; + } + if (defined $type && $type eq 'blob') { + print $cgi->a({-href => href(action=>"blob_plain", file_name=>$file_name, + hash_base=>$hb), + -title => $name}, esc_path($basename)); + } elsif (defined $type && $type eq 'tree') { + print $cgi->a({-href => href(action=>"tree", file_name=>$file_name, + hash_base=>$hb), + -title => $name}, esc_path($basename)); + print " / "; + } else { + print esc_path($basename); + } + } + print "
\n"; +} + +sub git_print_log { + my $log = shift; + my %opts = @_; + + if ($opts{'-remove_title'}) { + # remove title, i.e. first line of log + shift @$log; + } + # remove leading empty lines + while (defined $log->[0] && $log->[0] eq "") { + shift @$log; + } + + # print log + my $skip_blank_line = 0; + foreach my $line (@$log) { + if ($line =~ m/^\s*([A-Z][-A-Za-z]*-([Bb]y|[Tt]o)|C[Cc]|(Clos|Fix)es): /) { + if (! $opts{'-remove_signoff'}) { + print "" . esc_html($line) . "
\n"; + $skip_blank_line = 1; + } + next; + } + + if ($line =~ m,\s*([a-z]*link): (https?://\S+),i) { + if (! $opts{'-remove_signoff'}) { + print "" . esc_html($1) . ": " . + "" . esc_html($2) . "" . + "
\n"; + $skip_blank_line = 1; + } + next; + } + + # print only one empty line + # do not print empty line after signoff + if ($line eq "") { + next if ($skip_blank_line); + $skip_blank_line = 1; + } else { + $skip_blank_line = 0; + } + + print format_log_line_html($line) . "
\n"; + } + + if ($opts{'-final_empty_line'}) { + # end with single empty line + print "
\n" unless $skip_blank_line; + } +} + +# return link target (what link points to) +sub git_get_link_target { + my $hash = shift; + my $link_target; + + # read link + open my $fd, "-|", git_cmd(), "cat-file", "blob", $hash + or return; + { + local $/ = undef; + $link_target = <$fd>; + } + close $fd + or return; + + return $link_target; +} + +# given link target, and the directory (basedir) the link is in, +# return target of link relative to top directory (top tree); +# return undef if it is not possible (including absolute links). +sub normalize_link_target { + my ($link_target, $basedir) = @_; + + # absolute symlinks (beginning with '/') cannot be normalized + return if (substr($link_target, 0, 1) eq '/'); + + # normalize link target to path from top (root) tree (dir) + my $path; + if ($basedir) { + $path = $basedir . '/' . $link_target; + } else { + # we are in top (root) tree (dir) + $path = $link_target; + } + + # remove //, /./, and /../ + my @path_parts; + foreach my $part (split('/', $path)) { + # discard '.' and '' + next if (!$part || $part eq '.'); + # handle '..' + if ($part eq '..') { + if (@path_parts) { + pop @path_parts; + } else { + # link leads outside repository (outside top dir) + return; + } + } else { + push @path_parts, $part; + } + } + $path = join('/', @path_parts); + + return $path; +} + +# print tree entry (row of git_tree), but without encompassing element +sub git_print_tree_entry { + my ($t, $basedir, $hash_base, $have_blame) = @_; + + my %base_key = (); + $base_key{'hash_base'} = $hash_base if defined $hash_base; + + # The format of a table row is: mode list link. Where mode is + # the mode of the entry, list is the name of the entry, an href, + # and link is the action links of the entry. + + print "" . mode_str($t->{'mode'}) . "\n"; + if (exists $t->{'size'}) { + print "$t->{'size'}\n"; + } + if ($t->{'type'} eq "blob") { + print "" . + $cgi->a({-href => href(action=>"blob", hash=>$t->{'hash'}, + file_name=>"$basedir$t->{'name'}", %base_key), + -class => "list"}, esc_path($t->{'name'})); + if (S_ISLNK(oct $t->{'mode'})) { + my $link_target = git_get_link_target($t->{'hash'}); + if ($link_target) { + my $norm_target = normalize_link_target($link_target, $basedir); + if (defined $norm_target) { + print " -> " . + $cgi->a({-href => href(action=>"object", hash_base=>$hash_base, + file_name=>$norm_target), + -title => $norm_target}, esc_path($link_target)); + } else { + print " -> " . esc_path($link_target); + } + } + } + print "\n"; + print ""; + print $cgi->a({-href => href(action=>"blob", hash=>$t->{'hash'}, + file_name=>"$basedir$t->{'name'}", %base_key)}, + "blob"); + if ($have_blame) { + print " | " . + $cgi->a({-href => href(action=>"blame", hash=>$t->{'hash'}, + file_name=>"$basedir$t->{'name'}", %base_key)}, + "blame"); + } + if (defined $hash_base) { + print " | " . + $cgi->a({-href => href(action=>"history", hash_base=>$hash_base, + hash=>$t->{'hash'}, file_name=>"$basedir$t->{'name'}")}, + "history"); + } + print " | " . + $cgi->a({-href => href(action=>"blob_plain", hash_base=>$hash_base, + file_name=>"$basedir$t->{'name'}")}, + "raw"); + print "\n"; + + } elsif ($t->{'type'} eq "tree") { + print ""; + print $cgi->a({-href => href(action=>"tree", hash=>$t->{'hash'}, + file_name=>"$basedir$t->{'name'}", + %base_key)}, + esc_path($t->{'name'})); + print "\n"; + print ""; + print $cgi->a({-href => href(action=>"tree", hash=>$t->{'hash'}, + file_name=>"$basedir$t->{'name'}", + %base_key)}, + "tree"); + if (defined $hash_base) { + print " | " . + $cgi->a({-href => href(action=>"history", hash_base=>$hash_base, + file_name=>"$basedir$t->{'name'}")}, + "history"); + } + print "\n"; + } else { + # unknown object: we can only present history for it + # (this includes 'commit' object, i.e. submodule support) + print "" . + esc_path($t->{'name'}) . + "\n"; + print ""; + if (defined $hash_base) { + print $cgi->a({-href => href(action=>"history", + hash_base=>$hash_base, + file_name=>"$basedir$t->{'name'}")}, + "history"); + } + print "\n"; + } +} + +## ...................................................................... +## functions printing large fragments of HTML + +# get pre-image filenames for merge (combined) diff +sub fill_from_file_info { + my ($diff, @parents) = @_; + + $diff->{'from_file'} = [ ]; + $diff->{'from_file'}[$diff->{'nparents'} - 1] = undef; + for (my $i = 0; $i < $diff->{'nparents'}; $i++) { + if ($diff->{'status'}[$i] eq 'R' || + $diff->{'status'}[$i] eq 'C') { + $diff->{'from_file'}[$i] = + git_get_path_by_hash($parents[$i], $diff->{'from_id'}[$i]); + } + } + + return $diff; +} + +# is current raw difftree line of file deletion +sub is_deleted { + my $diffinfo = shift; + + return $diffinfo->{'to_id'} eq ('0' x 40) || $diffinfo->{'to_id'} eq ('0' x 64); +} + +# does patch correspond to [previous] difftree raw line +# $diffinfo - hashref of parsed raw diff format +# $patchinfo - hashref of parsed patch diff format +# (the same keys as in $diffinfo) +sub is_patch_split { + my ($diffinfo, $patchinfo) = @_; + + return defined $diffinfo && defined $patchinfo + && $diffinfo->{'to_file'} eq $patchinfo->{'to_file'}; +} + + +sub git_difftree_body { + my ($difftree, $hash, @parents) = @_; + my ($parent) = $parents[0]; + my $have_blame = gitweb_check_feature('blame'); + print "
\n"; + if ($#{$difftree} > 10) { + print(($#{$difftree} + 1) . " files changed:\n"); + } + print "
\n"; + + print " 1 ? "combined " : "") . + "diff_tree\">\n"; + + # header only for combined diff in 'commitdiff' view + my $has_header = @$difftree && @parents > 1 && $action eq 'commitdiff'; + if ($has_header) { + # table header + print "\n" . + "\n"; # filename, patchN link + for (my $i = 0; $i < @parents; $i++) { + my $par = $parents[$i]; + print "\n"; + } + print "\n\n"; + } + + my $alternate = 1; + my $patchno = 0; + foreach my $line (@{$difftree}) { + my $diff = parsed_difftree_line($line); + + if ($alternate) { + print "\n"; + } else { + print "\n"; + } + $alternate ^= 1; + + if (exists $diff->{'nparents'}) { # combined diff + + fill_from_file_info($diff, @parents) + unless exists $diff->{'from_file'}; + + if (!is_deleted($diff)) { + # file exists in the result (child) commit + print "\n"; + } else { + print "\n"; + } + + if ($action eq 'commitdiff') { + # link to patch + $patchno++; + print "\n"; + } + + my $has_history = 0; + my $not_deleted = 0; + for (my $i = 0; $i < $diff->{'nparents'}; $i++) { + my $hash_parent = $parents[$i]; + my $from_hash = $diff->{'from_id'}[$i]; + my $from_path = $diff->{'from_file'}[$i]; + my $status = $diff->{'status'}[$i]; + + $has_history ||= ($status ne 'A'); + $not_deleted ||= ($status ne 'D'); + + if ($status eq 'A') { + print "\n"; + } elsif ($status eq 'D') { + print "\n"; + } else { + if ($diff->{'to_id'} eq $from_hash) { + print "\n"; + } + } + + print "\n"; + + print "\n"; + next; # instead of 'else' clause, to avoid extra indent + } + # else ordinary diff + + my ($to_mode_oct, $to_mode_str, $to_file_type); + my ($from_mode_oct, $from_mode_str, $from_file_type); + if ($diff->{'to_mode'} ne ('0' x 6)) { + $to_mode_oct = oct $diff->{'to_mode'}; + if (S_ISREG($to_mode_oct)) { # only for regular file + $to_mode_str = sprintf("%04o", $to_mode_oct & 0777); # permission bits + } + $to_file_type = file_type($diff->{'to_mode'}); + } + if ($diff->{'from_mode'} ne ('0' x 6)) { + $from_mode_oct = oct $diff->{'from_mode'}; + if (S_ISREG($from_mode_oct)) { # only for regular file + $from_mode_str = sprintf("%04o", $from_mode_oct & 0777); # permission bits + } + $from_file_type = file_type($diff->{'from_mode'}); + } + + if ($diff->{'status'} eq "A") { # created + my $mode_chng = "[new $to_file_type"; + $mode_chng .= " with mode: $to_mode_str" if $to_mode_str; + $mode_chng .= "]"; + print "\n"; + print "\n"; + print "\n"; + + } elsif ($diff->{'status'} eq "D") { # deleted + my $mode_chng = "[deleted $from_file_type]"; + print "\n"; + print "\n"; + print "\n"; + + } elsif ($diff->{'status'} eq "M" || $diff->{'status'} eq "T") { # modified, or type changed + my $mode_chnge = ""; + if ($diff->{'from_mode'} != $diff->{'to_mode'}) { + $mode_chnge = "[changed"; + if ($from_file_type ne $to_file_type) { + $mode_chnge .= " from $from_file_type to $to_file_type"; + } + if (($from_mode_oct & 0777) != ($to_mode_oct & 0777)) { + if ($from_mode_str && $to_mode_str) { + $mode_chnge .= " mode: $from_mode_str->$to_mode_str"; + } elsif ($to_mode_str) { + $mode_chnge .= " mode: $to_mode_str"; + } + } + $mode_chnge .= "]\n"; + } + print "\n"; + print "\n"; + print "\n"; + + } elsif ($diff->{'status'} eq "R" || $diff->{'status'} eq "C") { # renamed or copied + my %status_name = ('R' => 'moved', 'C' => 'copied'); + my $nstatus = $status_name{$diff->{'status'}}; + my $mode_chng = ""; + if ($diff->{'from_mode'} != $diff->{'to_mode'}) { + # mode also for directories, so we cannot use $to_mode_str + $mode_chng = sprintf(", mode: %04o", $to_mode_oct & 0777); + } + print "\n" . + "\n" . + "\n"; + + } # we should not encounter Unmerged (U) or Unknown (X) status + print "\n"; + } + print "" if $has_header; + print "
" . + $cgi->a({-href => href(action=>"commitdiff", + hash=>$hash, hash_parent=>$par), + -title => 'commitdiff to parent number ' . + ($i+1) . ': ' . substr($par,0,7)}, + $i+1) . + " 
" . + $cgi->a({-href => href(action=>"blob", hash=>$diff->{'to_id'}, + file_name=>$diff->{'to_file'}, + hash_base=>$hash), + -class => "list"}, esc_path($diff->{'to_file'})) . + "" . + esc_path($diff->{'to_file'}) . + "" . + $cgi->a({-href => href(-anchor=>"patch$patchno")}, + "patch") . + " | " . + " | " . + $cgi->a({-href => href(action=>"blob", + hash_base=>$hash, + hash=>$from_hash, + file_name=>$from_path)}, + "blob" . ($i+1)) . + " | "; + } + print $cgi->a({-href => href(action=>"blobdiff", + hash=>$diff->{'to_id'}, + hash_parent=>$from_hash, + hash_base=>$hash, + hash_parent_base=>$hash_parent, + file_name=>$diff->{'to_file'}, + file_parent=>$from_path)}, + "diff" . ($i+1)) . + " | "; + if ($not_deleted) { + print $cgi->a({-href => href(action=>"blob", + hash=>$diff->{'to_id'}, + file_name=>$diff->{'to_file'}, + hash_base=>$hash)}, + "blob"); + print " | " if ($has_history); + } + if ($has_history) { + print $cgi->a({-href => href(action=>"history", + file_name=>$diff->{'to_file'}, + hash_base=>$hash)}, + "history"); + } + print "
"; + print $cgi->a({-href => href(action=>"blob", hash=>$diff->{'to_id'}, + hash_base=>$hash, file_name=>$diff->{'file'}), + -class => "list"}, esc_path($diff->{'file'})); + print "$mode_chng"; + if ($action eq 'commitdiff') { + # link to patch + $patchno++; + print $cgi->a({-href => href(-anchor=>"patch$patchno")}, + "patch") . + " | "; + } + print $cgi->a({-href => href(action=>"blob", hash=>$diff->{'to_id'}, + hash_base=>$hash, file_name=>$diff->{'file'})}, + "blob"); + print ""; + print $cgi->a({-href => href(action=>"blob", hash=>$diff->{'from_id'}, + hash_base=>$parent, file_name=>$diff->{'file'}), + -class => "list"}, esc_path($diff->{'file'})); + print "$mode_chng"; + if ($action eq 'commitdiff') { + # link to patch + $patchno++; + print $cgi->a({-href => href(-anchor=>"patch$patchno")}, + "patch") . + " | "; + } + print $cgi->a({-href => href(action=>"blob", hash=>$diff->{'from_id'}, + hash_base=>$parent, file_name=>$diff->{'file'})}, + "blob") . " | "; + if ($have_blame) { + print $cgi->a({-href => href(action=>"blame", hash_base=>$parent, + file_name=>$diff->{'file'})}, + "blame") . " | "; + } + print $cgi->a({-href => href(action=>"history", hash_base=>$parent, + file_name=>$diff->{'file'})}, + "history"); + print ""; + print $cgi->a({-href => href(action=>"blob", hash=>$diff->{'to_id'}, + hash_base=>$hash, file_name=>$diff->{'file'}), + -class => "list"}, esc_path($diff->{'file'})); + print "$mode_chnge"; + if ($action eq 'commitdiff') { + # link to patch + $patchno++; + print $cgi->a({-href => href(-anchor=>"patch$patchno")}, + "patch") . + " | "; + } elsif ($diff->{'to_id'} ne $diff->{'from_id'}) { + # "commit" view and modified file (not onlu mode changed) + print $cgi->a({-href => href(action=>"blobdiff", + hash=>$diff->{'to_id'}, hash_parent=>$diff->{'from_id'}, + hash_base=>$hash, hash_parent_base=>$parent, + file_name=>$diff->{'file'})}, + "diff") . + " | "; + } + print $cgi->a({-href => href(action=>"blob", hash=>$diff->{'to_id'}, + hash_base=>$hash, file_name=>$diff->{'file'})}, + "blob") . " | "; + if ($have_blame) { + print $cgi->a({-href => href(action=>"blame", hash_base=>$hash, + file_name=>$diff->{'file'})}, + "blame") . " | "; + } + print $cgi->a({-href => href(action=>"history", hash_base=>$hash, + file_name=>$diff->{'file'})}, + "history"); + print "" . + $cgi->a({-href => href(action=>"blob", hash_base=>$hash, + hash=>$diff->{'to_id'}, file_name=>$diff->{'to_file'}), + -class => "list"}, esc_path($diff->{'to_file'})) . "[$nstatus from " . + $cgi->a({-href => href(action=>"blob", hash_base=>$parent, + hash=>$diff->{'from_id'}, file_name=>$diff->{'from_file'}), + -class => "list"}, esc_path($diff->{'from_file'})) . + " with " . (int $diff->{'similarity'}) . "% similarity$mode_chng]"; + if ($action eq 'commitdiff') { + # link to patch + $patchno++; + print $cgi->a({-href => href(-anchor=>"patch$patchno")}, + "patch") . + " | "; + } elsif ($diff->{'to_id'} ne $diff->{'from_id'}) { + # "commit" view and modified file (not only pure rename or copy) + print $cgi->a({-href => href(action=>"blobdiff", + hash=>$diff->{'to_id'}, hash_parent=>$diff->{'from_id'}, + hash_base=>$hash, hash_parent_base=>$parent, + file_name=>$diff->{'to_file'}, file_parent=>$diff->{'from_file'})}, + "diff") . + " | "; + } + print $cgi->a({-href => href(action=>"blob", hash=>$diff->{'to_id'}, + hash_base=>$parent, file_name=>$diff->{'to_file'})}, + "blob") . " | "; + if ($have_blame) { + print $cgi->a({-href => href(action=>"blame", hash_base=>$hash, + file_name=>$diff->{'to_file'})}, + "blame") . " | "; + } + print $cgi->a({-href => href(action=>"history", hash_base=>$hash, + file_name=>$diff->{'to_file'})}, + "history"); + print "
\n"; +} + +# Print context lines and then rem/add lines in a side-by-side manner. +sub print_sidebyside_diff_lines { + my ($ctx, $rem, $add) = @_; + + # print context block before add/rem block + if (@$ctx) { + print join '', + '
', + '
', + @$ctx, + '
', + '
', + @$ctx, + '
', + '
'; + } + + if (!@$add) { + # pure removal + print join '', + '
', + '
', + @$rem, + '
', + '
'; + } elsif (!@$rem) { + # pure addition + print join '', + '
', + '
', + @$add, + '
', + '
'; + } else { + print join '', + '
', + '
', + @$rem, + '
', + '
', + @$add, + '
', + '
'; + } +} + +# Print context lines and then rem/add lines in inline manner. +sub print_inline_diff_lines { + my ($ctx, $rem, $add) = @_; + + print @$ctx, @$rem, @$add; +} + +# Format removed and added line, mark changed part and HTML-format them. +# Implementation is based on contrib/diff-highlight +sub format_rem_add_lines_pair { + my ($rem, $add, $num_parents) = @_; + + # We need to untabify lines before split()'ing them; + # otherwise offsets would be invalid. + chomp $rem; + chomp $add; + $rem = untabify($rem); + $add = untabify($add); + + my @rem = split(//, $rem); + my @add = split(//, $add); + my ($esc_rem, $esc_add); + # Ignore leading +/- characters for each parent. + my ($prefix_len, $suffix_len) = ($num_parents, 0); + my ($prefix_has_nonspace, $suffix_has_nonspace); + + my $shorter = (@rem < @add) ? @rem : @add; + while ($prefix_len < $shorter) { + last if ($rem[$prefix_len] ne $add[$prefix_len]); + + $prefix_has_nonspace = 1 if ($rem[$prefix_len] !~ /\s/); + $prefix_len++; + } + + while ($prefix_len + $suffix_len < $shorter) { + last if ($rem[-1 - $suffix_len] ne $add[-1 - $suffix_len]); + + $suffix_has_nonspace = 1 if ($rem[-1 - $suffix_len] !~ /\s/); + $suffix_len++; + } + + # Mark lines that are different from each other, but have some common + # part that isn't whitespace. If lines are completely different, don't + # mark them because that would make output unreadable, especially if + # diff consists of multiple lines. + if ($prefix_has_nonspace || $suffix_has_nonspace) { + $esc_rem = esc_html_hl_regions($rem, 'marked', + [$prefix_len, @rem - $suffix_len], -nbsp=>1); + $esc_add = esc_html_hl_regions($add, 'marked', + [$prefix_len, @add - $suffix_len], -nbsp=>1); + } else { + $esc_rem = esc_html($rem, -nbsp=>1); + $esc_add = esc_html($add, -nbsp=>1); + } + + return format_diff_line(\$esc_rem, 'rem'), + format_diff_line(\$esc_add, 'add'); +} + +# HTML-format diff context, removed and added lines. +sub format_ctx_rem_add_lines { + my ($ctx, $rem, $add, $num_parents) = @_; + my (@new_ctx, @new_rem, @new_add); + my $can_highlight = 0; + my $is_combined = ($num_parents > 1); + + # Highlight if every removed line has a corresponding added line. + if (@$add > 0 && @$add == @$rem) { + $can_highlight = 1; + + # Highlight lines in combined diff only if the chunk contains + # diff between the same version, e.g. + # + # - a + # - b + # + c + # + d + # + # Otherwise the highlighting would be confusing. + if ($is_combined) { + for (my $i = 0; $i < @$add; $i++) { + my $prefix_rem = substr($rem->[$i], 0, $num_parents); + my $prefix_add = substr($add->[$i], 0, $num_parents); + + $prefix_rem =~ s/-/+/g; + + if ($prefix_rem ne $prefix_add) { + $can_highlight = 0; + last; + } + } + } + } + + if ($can_highlight) { + for (my $i = 0; $i < @$add; $i++) { + my ($line_rem, $line_add) = format_rem_add_lines_pair( + $rem->[$i], $add->[$i], $num_parents); + push @new_rem, $line_rem; + push @new_add, $line_add; + } + } else { + @new_rem = map { format_diff_line($_, 'rem') } @$rem; + @new_add = map { format_diff_line($_, 'add') } @$add; + } + + @new_ctx = map { format_diff_line($_, 'ctx') } @$ctx; + + return (\@new_ctx, \@new_rem, \@new_add); +} + +# Print context lines and then rem/add lines. +sub print_diff_lines { + my ($ctx, $rem, $add, $diff_style, $num_parents) = @_; + my $is_combined = $num_parents > 1; + + ($ctx, $rem, $add) = format_ctx_rem_add_lines($ctx, $rem, $add, + $num_parents); + + if ($diff_style eq 'sidebyside' && !$is_combined) { + print_sidebyside_diff_lines($ctx, $rem, $add); + } else { + # default 'inline' style and unknown styles + print_inline_diff_lines($ctx, $rem, $add); + } +} + +sub print_diff_chunk { + my ($diff_style, $num_parents, $from, $to, @chunk) = @_; + my (@ctx, @rem, @add); + + # The class of the previous line. + my $prev_class = ''; + + return unless @chunk; + + # incomplete last line might be among removed or added lines, + # or both, or among context lines: find which + for (my $i = 1; $i < @chunk; $i++) { + if ($chunk[$i][0] eq 'incomplete') { + $chunk[$i][0] = $chunk[$i-1][0]; + } + } + + # guardian + push @chunk, ["", ""]; + + foreach my $line_info (@chunk) { + my ($class, $line) = @$line_info; + + # print chunk headers + if ($class && $class eq 'chunk_header') { + print format_diff_line($line, $class, $from, $to); + next; + } + + ## print from accumulator when have some add/rem lines or end + # of chunk (flush context lines), or when have add and rem + # lines and new block is reached (otherwise add/rem lines could + # be reordered) + if (!$class || ((@rem || @add) && $class eq 'ctx') || + (@rem && @add && $class ne $prev_class)) { + print_diff_lines(\@ctx, \@rem, \@add, + $diff_style, $num_parents); + @ctx = @rem = @add = (); + } + + ## adding lines to accumulator + # guardian value + last unless $line; + # rem, add or change + if ($class eq 'rem') { + push @rem, $line; + } elsif ($class eq 'add') { + push @add, $line; + } + # context line + if ($class eq 'ctx') { + push @ctx, $line; + } + + $prev_class = $class; + } +} + +sub git_patchset_body { + my ($fd, $diff_style, $difftree, $hash, @hash_parents) = @_; + my ($hash_parent) = $hash_parents[0]; + + my $is_combined = (@hash_parents > 1); + my $patch_idx = 0; + my $patch_number = 0; + my $patch_line; + my $diffinfo; + my $to_name; + my (%from, %to); + my @chunk; # for side-by-side diff + + print "
\n"; + + # skip to first patch + while ($patch_line = <$fd>) { + chomp $patch_line; + + last if ($patch_line =~ m/^diff /); + } + + PATCH: + while ($patch_line) { + + # parse "git diff" header line + if ($patch_line =~ m/^diff --git (\"(?:[^\\\"]*(?:\\.[^\\\"]*)*)\"|[^ "]*) (.*)$/) { + # $1 is from_name, which we do not use + $to_name = unquote($2); + $to_name =~ s!^b/!!; + } elsif ($patch_line =~ m/^diff --(cc|combined) ("?.*"?)$/) { + # $1 is 'cc' or 'combined', which we do not use + $to_name = unquote($2); + } else { + $to_name = undef; + } + + # check if current patch belong to current raw line + # and parse raw git-diff line if needed + if (is_patch_split($diffinfo, { 'to_file' => $to_name })) { + # this is continuation of a split patch + print "
\n"; + } else { + # advance raw git-diff output if needed + $patch_idx++ if defined $diffinfo; + + # read and prepare patch information + $diffinfo = parsed_difftree_line($difftree->[$patch_idx]); + + # compact combined diff output can have some patches skipped + # find which patch (using pathname of result) we are at now; + if ($is_combined) { + while ($to_name ne $diffinfo->{'to_file'}) { + print "
\n" . + format_diff_cc_simplified($diffinfo, @hash_parents) . + "
\n"; # class="patch" + + $patch_idx++; + $patch_number++; + + last if $patch_idx > $#$difftree; + $diffinfo = parsed_difftree_line($difftree->[$patch_idx]); + } + } + + # modifies %from, %to hashes + parse_from_to_diffinfo($diffinfo, \%from, \%to, @hash_parents); + + # this is first patch for raw difftree line with $patch_idx index + # we index @$difftree array from 0, but number patches from 1 + print "
\n"; + } + + # git diff header + #assert($patch_line =~ m/^diff /) if DEBUG; + #assert($patch_line !~ m!$/$!) if DEBUG; # is chomp-ed + $patch_number++; + # print "git diff" header + print format_git_diff_header_line($patch_line, $diffinfo, + \%from, \%to); + + # print extended diff header + print "
\n"; + EXTENDED_HEADER: + while ($patch_line = <$fd>) { + chomp $patch_line; + + last EXTENDED_HEADER if ($patch_line =~ m/^--- |^diff /); + + print format_extended_diff_header_line($patch_line, $diffinfo, + \%from, \%to); + } + print "
\n"; # class="diff extended_header" + + # from-file/to-file diff header + if (! $patch_line) { + print "
\n"; # class="patch" + last PATCH; + } + next PATCH if ($patch_line =~ m/^diff /); + #assert($patch_line =~ m/^---/) if DEBUG; + + my $last_patch_line = $patch_line; + $patch_line = <$fd>; + chomp $patch_line; + #assert($patch_line =~ m/^\+\+\+/) if DEBUG; + + print format_diff_from_to_header($last_patch_line, $patch_line, + $diffinfo, \%from, \%to, + @hash_parents); + + # the patch itself + LINE: + while ($patch_line = <$fd>) { + chomp $patch_line; + + next PATCH if ($patch_line =~ m/^diff /); + + my $class = diff_line_class($patch_line, \%from, \%to); + + if ($class eq 'chunk_header') { + print_diff_chunk($diff_style, scalar @hash_parents, \%from, \%to, @chunk); + @chunk = (); + } + + push @chunk, [ $class, $patch_line ]; + } + + } continue { + if (@chunk) { + print_diff_chunk($diff_style, scalar @hash_parents, \%from, \%to, @chunk); + @chunk = (); + } + print "
\n"; # class="patch" + } + + # for compact combined (--cc) format, with chunk and patch simplification + # the patchset might be empty, but there might be unprocessed raw lines + for (++$patch_idx if $patch_number > 0; + $patch_idx < @$difftree; + ++$patch_idx) { + # read and prepare patch information + $diffinfo = parsed_difftree_line($difftree->[$patch_idx]); + + # generate anchor for "patch" links in difftree / whatchanged part + print "
\n" . + format_diff_cc_simplified($diffinfo, @hash_parents) . + "
\n"; # class="patch" + + $patch_number++; + } + + if ($patch_number == 0) { + if (@hash_parents > 1) { + print "
Trivial merge
\n"; + } else { + print "
No differences found
\n"; + } + } + + print "
\n"; # class="patchset" +} + +# . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . + +sub git_project_search_form { + my ($searchtext, $search_use_regexp) = @_; + + my $limit = ''; + if ($project_filter) { + $limit = " in '$project_filter/'"; + } + + print "
\n"; + print $cgi->start_form(-method => 'get', -action => $my_uri) . + $cgi->hidden(-name => 'a', -value => 'project_list') . "\n"; + print $cgi->hidden(-name => 'pf', -value => $project_filter). "\n" + if (defined $project_filter); + print $cgi->textfield(-name => 's', -value => $searchtext, + -title => "Search project by name and description$limit", + -size => 60) . "\n" . + "" . + $cgi->checkbox(-name => 'sr', -value => 1, -label => 're', + -checked => $search_use_regexp) . + "\n" . + $cgi->submit(-name => 'btnS', -value => 'Search') . + $cgi->end_form() . "\n" . + $cgi->a({-href => href(project => undef, searchtext => undef, + project_filter => $project_filter)}, + esc_html("List all projects$limit")) . "
\n"; + print "
\n"; +} + +# entry for given @keys needs filling if at least one of keys in list +# is not present in %$project_info +sub project_info_needs_filling { + my ($project_info, @keys) = @_; + + # return List::MoreUtils::any { !exists $project_info->{$_} } @keys; + foreach my $key (@keys) { + if (!exists $project_info->{$key}) { + return 1; + } + } + return; +} + +# fills project list info (age, description, owner, category, forks, etc.) +# for each project in the list, removing invalid projects from +# returned list, or fill only specified info. +# +# Invalid projects are removed from the returned list if and only if you +# ask 'age' or 'age_string' to be filled, because they are the only fields +# that run unconditionally git command that requires repository, and +# therefore do always check if project repository is invalid. +# +# USAGE: +# * fill_project_list_info(\@project_list, 'descr_long', 'ctags') +# ensures that 'descr_long' and 'ctags' fields are filled +# * @project_list = fill_project_list_info(\@project_list) +# ensures that all fields are filled (and invalid projects removed) +# +# NOTE: modifies $projlist, but does not remove entries from it +sub fill_project_list_info { + my ($projlist, @wanted_keys) = @_; + my @projects; + my $filter_set = sub { return @_; }; + if (@wanted_keys) { + my %wanted_keys = map { $_ => 1 } @wanted_keys; + $filter_set = sub { return grep { $wanted_keys{$_} } @_; }; + } + + my $show_ctags = gitweb_check_feature('ctags'); + PROJECT: + foreach my $pr (@$projlist) { + if (project_info_needs_filling($pr, $filter_set->('age', 'age_string'))) { + my (@activity) = git_get_last_activity($pr->{'path'}); + unless (@activity) { + next PROJECT; + } + ($pr->{'age'}, $pr->{'age_string'}) = @activity; + } + if (project_info_needs_filling($pr, $filter_set->('descr', 'descr_long'))) { + my $descr = git_get_project_description($pr->{'path'}) || ""; + $descr = to_utf8($descr); + $pr->{'descr_long'} = $descr; + $pr->{'descr'} = chop_str($descr, $projects_list_description_width, 5); + } + if (project_info_needs_filling($pr, $filter_set->('owner'))) { + $pr->{'owner'} = git_get_project_owner("$pr->{'path'}") || ""; + } + if ($show_ctags && + project_info_needs_filling($pr, $filter_set->('ctags'))) { + $pr->{'ctags'} = git_get_project_ctags($pr->{'path'}); + } + if ($projects_list_group_categories && + project_info_needs_filling($pr, $filter_set->('category'))) { + my $cat = git_get_project_category($pr->{'path'}) || + $project_list_default_category; + $pr->{'category'} = to_utf8($cat); + } + + push @projects, $pr; + } + + return @projects; +} + +sub sort_projects_list { + my ($projlist, $order) = @_; + + sub order_str { + my $key = shift; + return sub { $a->{$key} cmp $b->{$key} }; + } + + sub order_num_then_undef { + my $key = shift; + return sub { + defined $a->{$key} ? + (defined $b->{$key} ? $a->{$key} <=> $b->{$key} : -1) : + (defined $b->{$key} ? 1 : 0) + }; + } + + my %orderings = ( + project => order_str('path'), + descr => order_str('descr_long'), + owner => order_str('owner'), + age => order_num_then_undef('age'), + ); + + my $ordering = $orderings{$order}; + return defined $ordering ? sort $ordering @$projlist : @$projlist; +} + +# returns a hash of categories, containing the list of project +# belonging to each category +sub build_projlist_by_category { + my ($projlist, $from, $to) = @_; + my %categories; + + $from = 0 unless defined $from; + $to = $#$projlist if (!defined $to || $#$projlist < $to); + + for (my $i = $from; $i <= $to; $i++) { + my $pr = $projlist->[$i]; + push @{$categories{ $pr->{'category'} }}, $pr; + } + + return wantarray ? %categories : \%categories; +} + +# print 'sort by' element, generating 'sort by $name' replay link +# if that order is not selected +sub print_sort_th { + print format_sort_th(@_); +} + +sub format_sort_th { + my ($name, $order, $header) = @_; + my $sort_th = ""; + $header ||= ucfirst($name); + + if ($order eq $name) { + $sort_th .= "$header\n"; + } else { + $sort_th .= "" . + $cgi->a({-href => href(-replay=>1, order=>$name), + -class => "header"}, $header) . + "\n"; + } + + return $sort_th; +} + +sub git_project_list_rows { + my ($projlist, $from, $to, $check_forks) = @_; + + $from = 0 unless defined $from; + $to = $#$projlist if (!defined $to || $#$projlist < $to); + + my $alternate = 1; + for (my $i = $from; $i <= $to; $i++) { + my $pr = $projlist->[$i]; + + if ($alternate) { + print "\n"; + } else { + print "\n"; + } + $alternate ^= 1; + + if ($check_forks) { + print ""; + if ($pr->{'forks'}) { + my $nforks = scalar @{$pr->{'forks'}}; + if ($nforks > 0) { + print $cgi->a({-href => href(project=>$pr->{'path'}, action=>"forks"), + -title => "$nforks forks"}, "+"); + } else { + print $cgi->span({-title => "$nforks forks"}, "+"); + } + } + print "\n"; + } + print "" . $cgi->a({-href => href(project=>$pr->{'path'}, action=>"summary"), + -class => "list"}, + esc_html_match_hl($pr->{'path'}, $search_regexp)) . + "\n" . + "" . $cgi->a({-href => href(project=>$pr->{'path'}, action=>"summary"), + -class => "list", + -title => $pr->{'descr_long'}}, + $search_regexp + ? esc_html_match_hl_chopped($pr->{'descr_long'}, + $pr->{'descr'}, $search_regexp) + : esc_html($pr->{'descr'})) . + "\n"; + unless ($omit_owner) { + print "" . chop_and_escape_str($pr->{'owner'}, 15) . "\n"; + } + unless ($omit_age_column) { + print "{'age'}) . "\">" . + (defined $pr->{'age_string'} ? $pr->{'age_string'} : "No commits") . "\n"; + } + print"" . + $cgi->a({-href => href(project=>$pr->{'path'}, action=>"summary")}, "summary") . " | " . + $cgi->a({-href => href(project=>$pr->{'path'}, action=>"shortlog")}, "shortlog") . " | " . + $cgi->a({-href => href(project=>$pr->{'path'}, action=>"log")}, "log") . " | " . + $cgi->a({-href => href(project=>$pr->{'path'}, action=>"tree")}, "tree") . + ($pr->{'forks'} ? " | " . $cgi->a({-href => href(project=>$pr->{'path'}, action=>"forks")}, "forks") : '') . + "\n" . + "\n"; + } +} + +sub git_project_list_body { + # actually uses global variable $project + my ($projlist, $order, $from, $to, $extra, $no_header) = @_; + my @projects = @$projlist; + + my $check_forks = gitweb_check_feature('forks'); + my $show_ctags = gitweb_check_feature('ctags'); + my $tagfilter = $show_ctags ? $input_params{'ctag'} : undef; + $check_forks = undef + if ($tagfilter || $search_regexp); + + # filtering out forks before filling info allows to do less work + @projects = filter_forks_from_projects_list(\@projects) + if ($check_forks); + # search_projects_list pre-fills required info + @projects = search_projects_list(\@projects, + 'search_regexp' => $search_regexp, + 'tagfilter' => $tagfilter) + if ($tagfilter || $search_regexp); + # fill the rest + my @all_fields = ('descr', 'descr_long', 'ctags', 'category'); + push @all_fields, ('age', 'age_string') unless($omit_age_column); + push @all_fields, 'owner' unless($omit_owner); + @projects = fill_project_list_info(\@projects, @all_fields); + + $order ||= $default_projects_order; + $from = 0 unless defined $from; + $to = $#projects if (!defined $to || $#projects < $to); + + # short circuit + if ($from > $to) { + print "
\n". + "No such projects found
\n". + "Click ".$cgi->a({-href=>href(project=>undef)},"here")." to view all projects
\n". + "
\n
\n"; + return; + } + + @projects = sort_projects_list(\@projects, $order); + + if ($show_ctags) { + my $ctags = git_gather_all_ctags(\@projects); + my $cloud = git_populate_project_tagcloud($ctags); + print git_show_project_tagcloud($cloud, 64); + } + + print "\n"; + unless ($no_header) { + print "\n"; + if ($check_forks) { + print "\n"; + } + print_sort_th('project', $order, 'Project'); + print_sort_th('descr', $order, 'Description'); + print_sort_th('owner', $order, 'Owner') unless $omit_owner; + print_sort_th('age', $order, 'Last Change') unless $omit_age_column; + print "\n" . # for links + "\n"; + } + + if ($projects_list_group_categories) { + # only display categories with projects in the $from-$to window + @projects = sort {$a->{'category'} cmp $b->{'category'}} @projects[$from..$to]; + my %categories = build_projlist_by_category(\@projects, $from, $to); + foreach my $cat (sort keys %categories) { + unless ($cat eq "") { + print "\n"; + if ($check_forks) { + print "\n"; + } + print "\n"; + print "\n"; + } + + git_project_list_rows($categories{$cat}, undef, undef, $check_forks); + } + } else { + git_project_list_rows(\@projects, $from, $to, $check_forks); + } + + if (defined $extra) { + print "\n"; + if ($check_forks) { + print "\n"; + } + print "\n" . + "\n"; + } + print "
".esc_html($cat)."
$extra
\n"; +} + +sub git_log_body { + # uses global variable $project + my ($commitlist, $from, $to, $refs, $extra) = @_; + + $from = 0 unless defined $from; + $to = $#{$commitlist} if (!defined $to || $#{$commitlist} < $to); + + for (my $i = 0; $i <= $to; $i++) { + my %co = %{$commitlist->[$i]}; + next if !%co; + my $commit = $co{'id'}; + my $ref = format_ref_marker($refs, $commit); + git_print_header_div('commit', + "$co{'age_string'}" . + esc_html($co{'title'}) . $ref, + $commit); + print "
\n" . + "
\n" . + $cgi->a({-href => href(action=>"commit", hash=>$commit)}, "commit") . + " | " . + $cgi->a({-href => href(action=>"commitdiff", hash=>$commit)}, "commitdiff") . + " | " . + $cgi->a({-href => href(action=>"tree", hash=>$commit, hash_base=>$commit)}, "tree") . + "
\n" . + "
\n"; + git_print_authorship(\%co, -tag => 'span'); + print "
\n
\n"; + + print "
\n"; + git_print_log($co{'comment'}, -final_empty_line=> 1); + print "
\n"; + } + if ($extra) { + print "
\n"; + print "$extra\n"; + print "
\n"; + } +} + +sub git_shortlog_body { + # uses global variable $project + my ($commitlist, $from, $to, $refs, $extra) = @_; + + $from = 0 unless defined $from; + $to = $#{$commitlist} if (!defined $to || $#{$commitlist} < $to); + + print "\n"; + my $alternate = 1; + for (my $i = $from; $i <= $to; $i++) { + my %co = %{$commitlist->[$i]}; + my $commit = $co{'id'}; + my $ref = format_ref_marker($refs, $commit); + if ($alternate) { + print "\n"; + } else { + print "\n"; + } + $alternate ^= 1; + # git_summary() used print "\n" . + print "\n" . + format_author_html('td', \%co, 10) . "\n" . + "\n" . + "\n"; + } + if (defined $extra) { + print "\n" . + "\n" . + "\n"; + } + print "
$co{'age_string'}$co{'age_string_date'}"; + print format_subject_html($co{'title'}, $co{'title_short'}, + href(action=>"commit", hash=>$commit), $ref); + print "" . + $cgi->a({-href => href(action=>"commit", hash=>$commit)}, "commit") . " | " . + $cgi->a({-href => href(action=>"commitdiff", hash=>$commit)}, "commitdiff") . " | " . + $cgi->a({-href => href(action=>"tree", hash=>$commit, hash_base=>$commit)}, "tree"); + my $snapshot_links = format_snapshot_links($commit); + if (defined $snapshot_links) { + print " | " . $snapshot_links; + } + print "
$extra
\n"; +} + +sub git_history_body { + # Warning: assumes constant type (blob or tree) during history + my ($commitlist, $from, $to, $refs, $extra, + $file_name, $file_hash, $ftype) = @_; + + $from = 0 unless defined $from; + $to = $#{$commitlist} unless (defined $to && $to <= $#{$commitlist}); + + print "\n"; + my $alternate = 1; + for (my $i = $from; $i <= $to; $i++) { + my %co = %{$commitlist->[$i]}; + if (!%co) { + next; + } + my $commit = $co{'id'}; + + my $ref = format_ref_marker($refs, $commit); + + if ($alternate) { + print "\n"; + } else { + print "\n"; + } + $alternate ^= 1; + print "\n" . + # shortlog: format_author_html('td', \%co, 10) + format_author_html('td', \%co, 15, 3) . "\n" . + "\n" . + "\n"; + } + if (defined $extra) { + print "\n" . + "\n" . + "\n"; + } + print "
$co{'age_string_date'}"; + # originally git_history used chop_str($co{'title'}, 50) + print format_subject_html($co{'title'}, $co{'title_short'}, + href(action=>"commit", hash=>$commit), $ref); + print "" . + $cgi->a({-href => href(action=>$ftype, hash_base=>$commit, file_name=>$file_name)}, $ftype) . " | " . + $cgi->a({-href => href(action=>"commitdiff", hash=>$commit)}, "commitdiff"); + + if ($ftype eq 'blob') { + print " | " . + $cgi->a({-href => href(action=>"blob_plain", hash_base=>$commit, file_name=>$file_name)}, "raw"); + + my $blob_current = $file_hash; + my $blob_parent = git_get_hash_by_path($commit, $file_name); + if (defined $blob_current && defined $blob_parent && + $blob_current ne $blob_parent) { + print " | " . + $cgi->a({-href => href(action=>"blobdiff", + hash=>$blob_current, hash_parent=>$blob_parent, + hash_base=>$hash_base, hash_parent_base=>$commit, + file_name=>$file_name)}, + "diff to current"); + } + } + print "
$extra
\n"; +} + +sub git_tags_body { + # uses global variable $project + my ($taglist, $from, $to, $extra) = @_; + $from = 0 unless defined $from; + $to = $#{$taglist} if (!defined $to || $#{$taglist} < $to); + + print "\n"; + my $alternate = 1; + for (my $i = $from; $i <= $to; $i++) { + my $entry = $taglist->[$i]; + my %tag = %$entry; + my $comment = $tag{'subject'}; + my $comment_short; + if (defined $comment) { + $comment_short = chop_str($comment, 30, 5); + } + if ($alternate) { + print "\n"; + } else { + print "\n"; + } + $alternate ^= 1; + if (defined $tag{'age'}) { + print "\n"; + } else { + print "\n"; + } + print "\n" . + "\n" . + "\n" . + "\n" . + ""; + } + if (defined $extra) { + print "\n" . + "\n" . + "\n"; + } + print "
$tag{'age'}" . + $cgi->a({-href => href(action=>$tag{'reftype'}, hash=>$tag{'refid'}), + -class => "list name"}, esc_html($tag{'name'})) . + ""; + if (defined $comment) { + print format_subject_html($comment, $comment_short, + href(action=>"tag", hash=>$tag{'id'})); + } + print ""; + if ($tag{'type'} eq "tag") { + print $cgi->a({-href => href(action=>"tag", hash=>$tag{'id'})}, "tag"); + } else { + print " "; + } + print "" . " | " . + $cgi->a({-href => href(action=>$tag{'reftype'}, hash=>$tag{'refid'})}, $tag{'reftype'}); + if ($tag{'reftype'} eq "commit") { + print " | " . $cgi->a({-href => href(action=>"shortlog", hash=>$tag{'fullname'})}, "shortlog") . + " | " . $cgi->a({-href => href(action=>"log", hash=>$tag{'fullname'})}, "log"); + } elsif ($tag{'reftype'} eq "blob") { + print " | " . $cgi->a({-href => href(action=>"blob_plain", hash=>$tag{'refid'})}, "raw"); + } + print "
$extra
\n"; +} + +sub git_heads_body { + # uses global variable $project + my ($headlist, $head_at, $from, $to, $extra) = @_; + $from = 0 unless defined $from; + $to = $#{$headlist} if (!defined $to || $#{$headlist} < $to); + + print "\n"; + my $alternate = 1; + for (my $i = $from; $i <= $to; $i++) { + my $entry = $headlist->[$i]; + my %ref = %$entry; + my $curr = defined $head_at && $ref{'id'} eq $head_at; + if ($alternate) { + print "\n"; + } else { + print "\n"; + } + $alternate ^= 1; + print "\n" . + ($curr ? "\n" . + "\n" . + ""; + } + if (defined $extra) { + print "\n" . + "\n" . + "\n"; + } + print "
$ref{'age'}" : "") . + $cgi->a({-href => href(action=>"shortlog", hash=>$ref{'fullname'}), + -class => "list name"},esc_html($ref{'name'})) . + "" . + $cgi->a({-href => href(action=>"shortlog", hash=>$ref{'fullname'})}, "shortlog") . " | " . + $cgi->a({-href => href(action=>"log", hash=>$ref{'fullname'})}, "log") . " | " . + $cgi->a({-href => href(action=>"tree", hash=>$ref{'fullname'}, hash_base=>$ref{'fullname'})}, "tree") . + "
$extra
\n"; +} + +# Display a single remote block +sub git_remote_block { + my ($remote, $rdata, $limit, $head) = @_; + + my $heads = $rdata->{'heads'}; + my $fetch = $rdata->{'fetch'}; + my $push = $rdata->{'push'}; + + my $urls_table = "\n" ; + + if (defined $fetch) { + if ($fetch eq $push) { + $urls_table .= format_repo_url("URL", $fetch); + } else { + $urls_table .= format_repo_url("Fetch URL", $fetch); + $urls_table .= format_repo_url("Push URL", $push) if defined $push; + } + } elsif (defined $push) { + $urls_table .= format_repo_url("Push URL", $push); + } else { + $urls_table .= format_repo_url("", "No remote URL"); + } + + $urls_table .= "
\n"; + + my $dots; + if (defined $limit && $limit < @$heads) { + $dots = $cgi->a({-href => href(action=>"remotes", hash=>$remote)}, "..."); + } + + print $urls_table; + git_heads_body($heads, $head, 0, $limit, $dots); +} + +# Display a list of remote names with the respective fetch and push URLs +sub git_remotes_list { + my ($remotedata, $limit) = @_; + print "\n"; + my $alternate = 1; + my @remotes = sort keys %$remotedata; + + my $limited = $limit && $limit < @remotes; + + $#remotes = $limit - 1 if $limited; + + while (my $remote = shift @remotes) { + my $rdata = $remotedata->{$remote}; + my $fetch = $rdata->{'fetch'}; + my $push = $rdata->{'push'}; + if ($alternate) { + print "\n"; + } else { + print "\n"; + } + $alternate ^= 1; + print ""; + print ""; + + print "\n"; + } + + if ($limited) { + print "\n" . + "\n" . "\n"; + } + + print "
" . + $cgi->a({-href=> href(action=>'remotes', hash=>$remote), + -class=> "list name"},esc_html($remote)) . + "" . + (defined $fetch ? $cgi->a({-href=> $fetch}, "fetch") : "fetch") . + " | " . + (defined $push ? $cgi->a({-href=> $push}, "push") : "push") . + "
" . + $cgi->a({-href => href(action=>"remotes")}, "...") . + "
"; +} + +# Display remote heads grouped by remote, unless there are too many +# remotes, in which case we only display the remote names +sub git_remotes_body { + my ($remotedata, $limit, $head) = @_; + if ($limit and $limit < keys %$remotedata) { + git_remotes_list($remotedata, $limit); + } else { + fill_remote_heads($remotedata); + while (my ($remote, $rdata) = each %$remotedata) { + git_print_section({-class=>"remote", -id=>$remote}, + ["remotes", $remote, $remote], sub { + git_remote_block($remote, $rdata, $limit, $head); + }); + } + } +} + +sub git_search_message { + my %co = @_; + + my $greptype; + if ($searchtype eq 'commit') { + $greptype = "--grep="; + } elsif ($searchtype eq 'author') { + $greptype = "--author="; + } elsif ($searchtype eq 'committer') { + $greptype = "--committer="; + } + $greptype .= $searchtext; + my @commitlist = parse_commits($hash, 101, (100 * $page), undef, + $greptype, '--regexp-ignore-case', + $search_use_regexp ? '--extended-regexp' : '--fixed-strings'); + + my $paging_nav = ''; + if ($page > 0) { + $paging_nav .= + $cgi->a({-href => href(-replay=>1, page=>undef)}, + "first") . + " ⋅ " . + $cgi->a({-href => href(-replay=>1, page=>$page-1), + -accesskey => "p", -title => "Alt-p"}, "prev"); + } else { + $paging_nav .= "first ⋅ prev"; + } + my $next_link = ''; + if ($#commitlist >= 100) { + $next_link = + $cgi->a({-href => href(-replay=>1, page=>$page+1), + -accesskey => "n", -title => "Alt-n"}, "next"); + $paging_nav .= " ⋅ $next_link"; + } else { + $paging_nav .= " ⋅ next"; + } + + git_header_html(); + + git_print_page_nav('','', $hash,$co{'tree'},$hash, $paging_nav); + git_print_header_div('commit', esc_html($co{'title'}), $hash); + if ($page == 0 && !@commitlist) { + print "

No match.

\n"; + } else { + git_search_grep_body(\@commitlist, 0, 99, $next_link); + } + + git_footer_html(); +} + +sub git_search_changes { + my %co = @_; + + local $/ = "\n"; + open my $fd, '-|', git_cmd(), '--no-pager', 'log', @diff_opts, + '--pretty=format:%H', '--no-abbrev', '--raw', "-S$searchtext", + ($search_use_regexp ? '--pickaxe-regex' : ()) + or die_error(500, "Open git-log failed"); + + git_header_html(); + + git_print_page_nav('','', $hash,$co{'tree'},$hash); + git_print_header_div('commit', esc_html($co{'title'}), $hash); + + print "\n"; + my $alternate = 1; + undef %co; + my @files; + while (my $line = <$fd>) { + chomp $line; + next unless $line; + + my %set = parse_difftree_raw_line($line); + if (defined $set{'commit'}) { + # finish previous commit + if (%co) { + print "\n" . + "\n" . + "\n"; + } + + if ($alternate) { + print "\n"; + } else { + print "\n"; + } + $alternate ^= 1; + %co = parse_commit($set{'commit'}); + my $author = chop_and_escape_str($co{'author_name'}, 15, 5); + print "\n" . + "\n" . + "\n" . + "\n" . + "\n"; + } + + print "
" . + $cgi->a({-href => href(action=>"commit", hash=>$co{'id'})}, + "commit") . + " | " . + $cgi->a({-href => href(action=>"tree", hash=>$co{'tree'}, + hash_base=>$co{'id'})}, + "tree") . + "
$co{'age_string_date'}$author" . + $cgi->a({-href => href(action=>"commit", hash=>$co{'id'}), + -class => "list subject"}, + chop_and_escape_str($co{'title'}, 50) . "
"); + } elsif (defined $set{'to_id'}) { + next if is_deleted(\%set); + + print $cgi->a({-href => href(action=>"blob", hash_base=>$co{'id'}, + hash=>$set{'to_id'}, file_name=>$set{'to_file'}), + -class => "list"}, + "" . esc_path($set{'file'}) . "") . + "
\n"; + } + } + close $fd; + + # finish last commit (warning: repetition!) + if (%co) { + print "
" . + $cgi->a({-href => href(action=>"commit", hash=>$co{'id'})}, + "commit") . + " | " . + $cgi->a({-href => href(action=>"tree", hash=>$co{'tree'}, + hash_base=>$co{'id'})}, + "tree") . + "
\n"; + + git_footer_html(); +} + +sub git_search_files { + my %co = @_; + + local $/ = "\n"; + open my $fd, "-|", git_cmd(), 'grep', '-n', '-z', + $search_use_regexp ? ('-E', '-i') : '-F', + $searchtext, $co{'tree'} + or die_error(500, "Open git-grep failed"); + + git_header_html(); + + git_print_page_nav('','', $hash,$co{'tree'},$hash); + git_print_header_div('commit', esc_html($co{'title'}), $hash); + + print "\n"; + my $alternate = 1; + my $matches = 0; + my $lastfile = ''; + my $file_href; + while (my $line = <$fd>) { + chomp $line; + my ($file, $lno, $ltext, $binary); + last if ($matches++ > 1000); + if ($line =~ /^Binary file (.+) matches$/) { + $file = $1; + $binary = 1; + } else { + ($file, $lno, $ltext) = split(/\0/, $line, 3); + $file =~ s/^$co{'tree'}://; + } + if ($file ne $lastfile) { + $lastfile and print "\n"; + if ($alternate++) { + print "\n"; + } else { + print "\n"; + } + $file_href = href(action=>"blob", hash_base=>$co{'id'}, + file_name=>$file); + print "\n"; + if ($matches > 1000) { + print "
Too many matches, listing trimmed
\n"; + } + } else { + print "
No matches found
\n"; + } + close $fd; + + print "
". + $cgi->a({-href => $file_href, -class => "list"}, esc_path($file)); + print "\n"; + $lastfile = $file; + } + if ($binary) { + print "
Binary file
\n"; + } else { + $ltext = untabify($ltext); + if ($ltext =~ m/^(.*)($search_regexp)(.*)$/i) { + $ltext = esc_html($1, -nbsp=>1); + $ltext .= ''; + $ltext .= esc_html($2, -nbsp=>1); + $ltext .= ''; + $ltext .= esc_html($3, -nbsp=>1); + } else { + $ltext = esc_html($ltext, -nbsp=>1); + } + print "
" . + $cgi->a({-href => $file_href.'#l'.$lno, + -class => "linenr"}, sprintf('%4i', $lno)) . + ' ' . $ltext . "
\n"; + } + } + if ($lastfile) { + print "
\n"; + + git_footer_html(); +} + +sub git_search_grep_body { + my ($commitlist, $from, $to, $extra) = @_; + $from = 0 unless defined $from; + $to = $#{$commitlist} if (!defined $to || $#{$commitlist} < $to); + + print "\n"; + my $alternate = 1; + for (my $i = $from; $i <= $to; $i++) { + my %co = %{$commitlist->[$i]}; + if (!%co) { + next; + } + my $commit = $co{'id'}; + if ($alternate) { + print "\n"; + } else { + print "\n"; + } + $alternate ^= 1; + print "\n" . + format_author_html('td', \%co, 15, 5) . + "\n" . + "\n" . + "\n"; + } + if (defined $extra) { + print "\n" . + "\n" . + "\n"; + } + print "
$co{'age_string_date'}" . + $cgi->a({-href => href(action=>"commit", hash=>$co{'id'}), + -class => "list subject"}, + chop_and_escape_str($co{'title'}, 50) . "
"); + my $comment = $co{'comment'}; + foreach my $line (@$comment) { + if ($line =~ m/^(.*?)($search_regexp)(.*)$/i) { + my ($lead, $match, $trail) = ($1, $2, $3); + $match = chop_str($match, 70, 5, 'center'); + my $contextlen = int((80 - length($match))/2); + $contextlen = 30 if ($contextlen > 30); + $lead = chop_str($lead, $contextlen, 10, 'left'); + $trail = chop_str($trail, $contextlen, 10, 'right'); + + $lead = esc_html($lead); + $match = esc_html($match); + $trail = esc_html($trail); + + print "$lead$match$trail
"; + } + } + print "
" . + $cgi->a({-href => href(action=>"commit", hash=>$co{'id'})}, "commit") . + " | " . + $cgi->a({-href => href(action=>"commitdiff", hash=>$co{'id'})}, "commitdiff") . + " | " . + $cgi->a({-href => href(action=>"tree", hash=>$co{'tree'}, hash_base=>$co{'id'})}, "tree"); + print "
$extra
\n"; +} + +## ====================================================================== +## ====================================================================== +## actions + +sub git_project_list { + my $order = $input_params{'order'}; + if (defined $order && $order !~ m/none|project|descr|owner|age/) { + die_error(400, "Unknown order parameter"); + } + + my @list = git_get_projects_list($project_filter, $strict_export); + if (!@list) { + die_error(404, "No projects found"); + } + + git_header_html(); + if (defined $home_text && -f $home_text) { + print "
\n"; + insert_file($home_text); + print "
\n"; + } + + git_project_search_form($searchtext, $search_use_regexp); + git_project_list_body(\@list, $order); + git_footer_html(); +} + +sub git_forks { + my $order = $input_params{'order'}; + if (defined $order && $order !~ m/none|project|descr|owner|age/) { + die_error(400, "Unknown order parameter"); + } + + my $filter = $project; + $filter =~ s/\.git$//; + my @list = git_get_projects_list($filter); + if (!@list) { + die_error(404, "No forks found"); + } + + git_header_html(); + git_print_page_nav('',''); + git_print_header_div('summary', "$project forks"); + git_project_list_body(\@list, $order); + git_footer_html(); +} + +sub git_project_index { + my @projects = git_get_projects_list($project_filter, $strict_export); + if (!@projects) { + die_error(404, "No projects found"); + } + + print $cgi->header( + -type => 'text/plain', + -charset => 'utf-8', + -content_disposition => 'inline; filename="index.aux"'); + + foreach my $pr (@projects) { + if (!exists $pr->{'owner'}) { + $pr->{'owner'} = git_get_project_owner("$pr->{'path'}"); + } + + my ($path, $owner) = ($pr->{'path'}, $pr->{'owner'}); + # quote as in CGI::Util::encode, but keep the slash, and use '+' for ' ' + $path =~ s/([^a-zA-Z0-9_.\-\/ ])/sprintf("%%%02X", ord($1))/eg; + $owner =~ s/([^a-zA-Z0-9_.\-\/ ])/sprintf("%%%02X", ord($1))/eg; + $path =~ s/ /\+/g; + $owner =~ s/ /\+/g; + + print "$path $owner\n"; + } +} + +sub git_summary { + my $descr = git_get_project_description($project) || "none"; + my %co = parse_commit("HEAD"); + my %cd = %co ? parse_date($co{'committer_epoch'}, $co{'committer_tz'}) : (); + my $head = $co{'id'}; + my $remote_heads = gitweb_check_feature('remote_heads'); + + my $owner = git_get_project_owner($project); + + my $refs = git_get_references(); + # These get_*_list functions return one more to allow us to see if + # there are more ... + my @taglist = git_get_tags_list(16); + my @headlist = git_get_heads_list(16); + my %remotedata = $remote_heads ? git_get_remotes_list() : (); + my @forklist; + my $check_forks = gitweb_check_feature('forks'); + + if ($check_forks) { + # find forks of a project + my $filter = $project; + $filter =~ s/\.git$//; + @forklist = git_get_projects_list($filter); + # filter out forks of forks + @forklist = filter_forks_from_projects_list(\@forklist) + if (@forklist); + } + + git_header_html(); + git_print_page_nav('summary','', $head); + + print "
 
\n"; + print "\n" . + "\n"; + if ($owner and not $omit_owner) { + print "\n"; + } + if (defined $cd{'rfc2822'}) { + print "" . + "\n"; + } + + # use per project git URL list in $projectroot/$project/cloneurl + # or make project git URL from git base URL and project name + my $url_tag = "URL"; + my @url_list = git_get_project_url_list($project); + @url_list = map { "$_/$project" } @git_base_url_list unless @url_list; + foreach my $git_url (@url_list) { + next unless $git_url; + print format_repo_url($url_tag, $git_url); + $url_tag = ""; + } + + # Tag cloud + my $show_ctags = gitweb_check_feature('ctags'); + if ($show_ctags) { + my $ctags = git_get_project_ctags($project); + if (%$ctags) { + # without ability to add tags, don't show if there are none + my $cloud = git_populate_project_tagcloud($ctags); + print "" . + "" . + "" . + "\n"; + } + } + + print "
description" . esc_html($descr) . "
owner" . esc_html($owner) . "
last change".format_timestamp_html(\%cd)."
content tags".git_show_project_tagcloud($cloud, 48)."
\n"; + + # If XSS prevention is on, we don't include README.html. + # TODO: Allow a readme in some safe format. + if (!$prevent_xss && -s "$projectroot/$project/README.html") { + print "
readme
\n" . + "
\n"; + insert_file("$projectroot/$project/README.html"); + print "\n
\n"; # class="readme" + } + + # we need to request one more than 16 (0..15) to check if + # those 16 are all + my @commitlist = $head ? parse_commits($head, 17) : (); + if (@commitlist) { + git_print_header_div('shortlog'); + git_shortlog_body(\@commitlist, 0, 15, $refs, + $#commitlist <= 15 ? undef : + $cgi->a({-href => href(action=>"shortlog")}, "...")); + } + + if (@taglist) { + git_print_header_div('tags'); + git_tags_body(\@taglist, 0, 15, + $#taglist <= 15 ? undef : + $cgi->a({-href => href(action=>"tags")}, "...")); + } + + if (@headlist) { + git_print_header_div('heads'); + git_heads_body(\@headlist, $head, 0, 15, + $#headlist <= 15 ? undef : + $cgi->a({-href => href(action=>"heads")}, "...")); + } + + if (%remotedata) { + git_print_header_div('remotes'); + git_remotes_body(\%remotedata, 15, $head); + } + + if (@forklist) { + git_print_header_div('forks'); + git_project_list_body(\@forklist, 'age', 0, 15, + $#forklist <= 15 ? undef : + $cgi->a({-href => href(action=>"forks")}, "..."), + 'no_header'); + } + + git_footer_html(); +} + +sub git_tag { + my %tag = parse_tag($hash); + + if (! %tag) { + die_error(404, "Unknown tag object"); + } + + my $head = git_get_head_hash($project); + git_header_html(); + git_print_page_nav('','', $head,undef,$head); + git_print_header_div('commit', esc_html($tag{'name'}), $hash); + print "
\n" . + "\n" . + "\n" . + "\n" . + "\n" . + "\n" . + "\n"; + if (defined($tag{'author'})) { + git_print_authorship_rows(\%tag, 'author'); + } + print "
object" . $cgi->a({-class => "list", -href => href(action=>$tag{'type'}, hash=>$tag{'object'})}, + $tag{'object'}) . "" . $cgi->a({-href => href(action=>$tag{'type'}, hash=>$tag{'object'})}, + $tag{'type'}) . "
\n\n" . + "
\n"; + print "
"; + my $comment = $tag{'comment'}; + foreach my $line (@$comment) { + chomp $line; + print esc_html($line, -nbsp=>1) . "
\n"; + } + print "
\n"; + git_footer_html(); +} + +sub git_blame_common { + my $format = shift || 'porcelain'; + if ($format eq 'porcelain' && $input_params{'javascript'}) { + $format = 'incremental'; + $action = 'blame_incremental'; # for page title etc + } + + # permissions + gitweb_check_feature('blame') + or die_error(403, "Blame view not allowed"); + + # error checking + die_error(400, "No file name given") unless $file_name; + $hash_base ||= git_get_head_hash($project); + die_error(404, "Couldn't find base commit") unless $hash_base; + my %co = parse_commit($hash_base) + or die_error(404, "Commit not found"); + my $ftype = "blob"; + if (!defined $hash) { + $hash = git_get_hash_by_path($hash_base, $file_name, "blob") + or die_error(404, "Error looking up file"); + } else { + $ftype = git_get_type($hash); + if ($ftype !~ "blob") { + die_error(400, "Object is not a blob"); + } + } + + my $fd; + if ($format eq 'incremental') { + # get file contents (as base) + open $fd, "-|", git_cmd(), 'cat-file', 'blob', $hash + or die_error(500, "Open git-cat-file failed"); + } elsif ($format eq 'data') { + # run git-blame --incremental + open $fd, "-|", git_cmd(), "blame", "--incremental", + $hash_base, "--", $file_name + or die_error(500, "Open git-blame --incremental failed"); + } else { + # run git-blame --porcelain + open $fd, "-|", git_cmd(), "blame", '-p', + $hash_base, '--', $file_name + or die_error(500, "Open git-blame --porcelain failed"); + } + binmode $fd, ':utf8'; + + # incremental blame data returns early + if ($format eq 'data') { + print $cgi->header( + -type=>"text/plain", -charset => "utf-8", + -status=> "200 OK"); + local $| = 1; # output autoflush + while (my $line = <$fd>) { + print to_utf8($line); + } + close $fd + or print "ERROR $!\n"; + + print 'END'; + if (defined $t0 && gitweb_check_feature('timed')) { + print ' '. + tv_interval($t0, [ gettimeofday() ]). + ' '.$number_of_git_cmds; + } + print "\n"; + + return; + } + + # page header + git_header_html(); + my $formats_nav = + $cgi->a({-href => href(action=>"blob", -replay=>1)}, + "blob") . + " | "; + if ($format eq 'incremental') { + $formats_nav .= + $cgi->a({-href => href(action=>"blame", javascript=>0, -replay=>1)}, + "blame") . " (non-incremental)"; + } else { + $formats_nav .= + $cgi->a({-href => href(action=>"blame_incremental", -replay=>1)}, + "blame") . " (incremental)"; + } + $formats_nav .= + " | " . + $cgi->a({-href => href(action=>"history", -replay=>1)}, + "history") . + " | " . + $cgi->a({-href => href(action=>$action, file_name=>$file_name)}, + "HEAD"); + git_print_page_nav('','', $hash_base,$co{'tree'},$hash_base, $formats_nav); + git_print_header_div('commit', esc_html($co{'title'}), $hash_base); + git_print_page_path($file_name, $ftype, $hash_base); + + # page body + if ($format eq 'incremental') { + print "\n"; + + print qq!
\n!; + } + + print qq!
\n!; + print qq!
... / ...
\n! + if ($format eq 'incremental'); + print qq!\n!. + #qq!\n!. + qq!\n!. + qq!\n!. + qq!\n!. + qq!\n!; + + my @rev_color = qw(light dark); + my $num_colors = scalar(@rev_color); + my $current_color = 0; + + if ($format eq 'incremental') { + my $color_class = $rev_color[$current_color]; + + #contents of a file + my $linenr = 0; + LINE: + while (my $line = <$fd>) { + chomp $line; + $linenr++; + + print qq!!. + qq!!. + qq!!; + print qq!\n"; + print qq!\n!; + } + + } else { # porcelain, i.e. ordinary blame + my %metainfo = (); # saves information about commits + + # blame data + LINE: + while (my $line = <$fd>) { + chomp $line; + # the header: [] + # no for subsequent lines in group of lines + my ($full_rev, $orig_lineno, $lineno, $group_size) = + ($line =~ /^($oid_regex) (\d+) (\d+)(?: (\d+))?$/); + if (!exists $metainfo{$full_rev}) { + $metainfo{$full_rev} = { 'nprevious' => 0 }; + } + my $meta = $metainfo{$full_rev}; + my $data; + while ($data = <$fd>) { + chomp $data; + last if ($data =~ s/^\t//); # contents of line + if ($data =~ /^(\S+)(?: (.*))?$/) { + $meta->{$1} = $2 unless exists $meta->{$1}; + } + if ($data =~ /^previous /) { + $meta->{'nprevious'}++; + } + } + my $short_rev = substr($full_rev, 0, 8); + my $author = $meta->{'author'}; + my %date = + parse_date($meta->{'author-time'}, $meta->{'author-tz'}); + my $date = $date{'iso-tz'}; + if ($group_size) { + $current_color = ($current_color + 1) % $num_colors; + } + my $tr_class = $rev_color[$current_color]; + $tr_class .= ' boundary' if (exists $meta->{'boundary'}); + $tr_class .= ' no-previous' if ($meta->{'nprevious'} == 0); + $tr_class .= ' multiple-previous' if ($meta->{'nprevious'} > 1); + print "\n"; + if ($group_size) { + print "\n"; + } + # 'previous' + if (exists $meta->{'previous'} && + $meta->{'previous'} =~ /^($oid_regex) (.*)$/) { + $meta->{'parent'} = $1; + $meta->{'file_parent'} = unquote($2); + } + my $linenr_commit = + exists($meta->{'parent'}) ? + $meta->{'parent'} : $full_rev; + my $linenr_filename = + exists($meta->{'file_parent'}) ? + $meta->{'file_parent'} : unquote($meta->{'filename'}); + my $blamed = href(action => 'blame', + file_name => $linenr_filename, + hash_base => $linenr_commit); + print ""; + print "\n"; + print "\n"; + } # end while + + } + + # footer + print "\n". + "
CommitLineData
!. + qq!$linenr! . esc_html($line) . "
1); + print ">"; + print $cgi->a({-href => href(action=>"commit", + hash=>$full_rev, + file_name=>$file_name)}, + esc_html($short_rev)); + if ($group_size >= 2) { + my @author_initials = ($author =~ /\b([[:upper:]])\B/g); + if (@author_initials) { + print "
" . + esc_html(join('', @author_initials)); + # or join('.', ...) + } + } + print "
"; + print $cgi->a({ -href => "$blamed#l$orig_lineno", + -class => "linenr" }, + esc_html($lineno)); + print "" . esc_html($data) . "
\n"; # class="blame" + print "
\n"; # class="blame_body" + close $fd + or print "Reading blob failed\n"; + + git_footer_html(); +} + +sub git_blame { + git_blame_common(); +} + +sub git_blame_incremental { + git_blame_common('incremental'); +} + +sub git_blame_data { + git_blame_common('data'); +} + +sub git_tags { + my $head = git_get_head_hash($project); + git_header_html(); + git_print_page_nav('','', $head,undef,$head,format_ref_views('tags')); + git_print_header_div('summary', $project); + + my @tagslist = git_get_tags_list(); + if (@tagslist) { + git_tags_body(\@tagslist); + } + git_footer_html(); +} + +sub git_heads { + my $head = git_get_head_hash($project); + git_header_html(); + git_print_page_nav('','', $head,undef,$head,format_ref_views('heads')); + git_print_header_div('summary', $project); + + my @headslist = git_get_heads_list(); + if (@headslist) { + git_heads_body(\@headslist, $head); + } + git_footer_html(); +} + +# used both for single remote view and for list of all the remotes +sub git_remotes { + gitweb_check_feature('remote_heads') + or die_error(403, "Remote heads view is disabled"); + + my $head = git_get_head_hash($project); + my $remote = $input_params{'hash'}; + + my $remotedata = git_get_remotes_list($remote); + die_error(500, "Unable to get remote information") unless defined $remotedata; + + unless (%$remotedata) { + die_error(404, defined $remote ? + "Remote $remote not found" : + "No remotes found"); + } + + git_header_html(undef, undef, -action_extra => $remote); + git_print_page_nav('', '', $head, undef, $head, + format_ref_views($remote ? '' : 'remotes')); + + fill_remote_heads($remotedata); + if (defined $remote) { + git_print_header_div('remotes', "$remote remote for $project"); + git_remote_block($remote, $remotedata->{$remote}, undef, $head); + } else { + git_print_header_div('summary', "$project remotes"); + git_remotes_body($remotedata, undef, $head); + } + + git_footer_html(); +} + +sub git_blob_plain { + my $type = shift; + my $expires; + + if (!defined $hash) { + if (defined $file_name) { + my $base = $hash_base || git_get_head_hash($project); + $hash = git_get_hash_by_path($base, $file_name, "blob") + or die_error(404, "Cannot find file"); + } else { + die_error(400, "No file name defined"); + } + } elsif ($hash =~ m/^$oid_regex$/) { + # blobs defined by non-textual hash id's can be cached + $expires = "+1d"; + } + + open my $fd, "-|", git_cmd(), "cat-file", "blob", $hash + or die_error(500, "Open git-cat-file blob '$hash' failed"); + + # content-type (can include charset) + $type = blob_contenttype($fd, $file_name, $type); + + # "save as" filename, even when no $file_name is given + my $save_as = "$hash"; + if (defined $file_name) { + $save_as = $file_name; + } elsif ($type =~ m/^text\//) { + $save_as .= '.txt'; + } + + # With XSS prevention on, blobs of all types except a few known safe + # ones are served with "Content-Disposition: attachment" to make sure + # they don't run in our security domain. For certain image types, + # blob view writes an tag referring to blob_plain view, and we + # want to be sure not to break that by serving the image as an + # attachment (though Firefox 3 doesn't seem to care). + my $sandbox = $prevent_xss && + $type !~ m!^(?:text/[a-z]+|image/(?:gif|png|jpeg))(?:[ ;]|$)!; + + # serve text/* as text/plain + if ($prevent_xss && + ($type =~ m!^text/[a-z]+\b(.*)$! || + ($type =~ m!^[a-z]+/[a-z]\+xml\b(.*)$! && -T $fd))) { + my $rest = $1; + $rest = defined $rest ? $rest : ''; + $type = "text/plain$rest"; + } + + print $cgi->header( + -type => $type, + -expires => $expires, + -content_disposition => + ($sandbox ? 'attachment' : 'inline') + . '; filename="' . $save_as . '"'); + local $/ = undef; + local *FCGI::Stream::PRINT = $FCGI_Stream_PRINT_raw; + binmode STDOUT, ':raw'; + print <$fd>; + binmode STDOUT, ':utf8'; # as set at the beginning of gitweb.cgi + close $fd; +} + +sub git_blob { + my $expires; + + if (!defined $hash) { + if (defined $file_name) { + my $base = $hash_base || git_get_head_hash($project); + $hash = git_get_hash_by_path($base, $file_name, "blob") + or die_error(404, "Cannot find file"); + } else { + die_error(400, "No file name defined"); + } + } elsif ($hash =~ m/^$oid_regex$/) { + # blobs defined by non-textual hash id's can be cached + $expires = "+1d"; + } + + my $have_blame = gitweb_check_feature('blame'); + open my $fd, "-|", git_cmd(), "cat-file", "blob", $hash + or die_error(500, "Couldn't cat $file_name, $hash"); + my $mimetype = blob_mimetype($fd, $file_name); + # use 'blob_plain' (aka 'raw') view for files that cannot be displayed + if ($mimetype !~ m!^(?:text/|image/(?:gif|png|jpeg)$)! && -B $fd) { + close $fd; + return git_blob_plain($mimetype); + } + # we can have blame only for text/* mimetype + $have_blame &&= ($mimetype =~ m!^text/!); + + my $highlight = gitweb_check_feature('highlight'); + my $syntax = guess_file_syntax($highlight, $file_name); + $fd = run_highlighter($fd, $highlight, $syntax); + + git_header_html(undef, $expires); + my $formats_nav = ''; + if (defined $hash_base && (my %co = parse_commit($hash_base))) { + if (defined $file_name) { + if ($have_blame) { + $formats_nav .= + $cgi->a({-href => href(action=>"blame", -replay=>1)}, + "blame") . + " | "; + } + $formats_nav .= + $cgi->a({-href => href(action=>"history", -replay=>1)}, + "history") . + " | " . + $cgi->a({-href => href(action=>"blob_plain", -replay=>1)}, + "raw") . + " | " . + $cgi->a({-href => href(action=>"blob", + hash_base=>"HEAD", file_name=>$file_name)}, + "HEAD"); + } else { + $formats_nav .= + $cgi->a({-href => href(action=>"blob_plain", -replay=>1)}, + "raw"); + } + git_print_page_nav('','', $hash_base,$co{'tree'},$hash_base, $formats_nav); + git_print_header_div('commit', esc_html($co{'title'}), $hash_base); + } else { + print "
\n" . + "

\n" . + "
".esc_html($hash)."
\n"; + } + git_print_page_path($file_name, "blob", $hash_base); + print "
\n"; + if ($mimetype =~ m!^image/!) { + print qq!!.esc_attr($file_name).qq!$hash, + hash_base=>$hash_base, file_name=>$file_name)) . + qq!" />\n!; + } else { + my $nr; + while (my $line = <$fd>) { + chomp $line; + $nr++; + $line = untabify($line); + printf qq!
%4i %s
\n!, + $nr, esc_attr(href(-replay => 1)), $nr, $nr, + $highlight ? sanitize($line) : esc_html($line, -nbsp=>1); + } + } + close $fd + or print "Reading blob failed.\n"; + print "
"; + git_footer_html(); +} + +sub git_tree { + if (!defined $hash_base) { + $hash_base = "HEAD"; + } + if (!defined $hash) { + if (defined $file_name) { + $hash = git_get_hash_by_path($hash_base, $file_name, "tree"); + } else { + $hash = $hash_base; + } + } + die_error(404, "No such tree") unless defined($hash); + + my $show_sizes = gitweb_check_feature('show-sizes'); + my $have_blame = gitweb_check_feature('blame'); + + my @entries = (); + { + local $/ = "\0"; + open my $fd, "-|", git_cmd(), "ls-tree", '-z', + ($show_sizes ? '-l' : ()), @extra_options, $hash + or die_error(500, "Open git-ls-tree failed"); + @entries = map { chomp; $_ } <$fd>; + close $fd + or die_error(404, "Reading tree failed"); + } + + my $refs = git_get_references(); + my $ref = format_ref_marker($refs, $hash_base); + git_header_html(); + my $basedir = ''; + if (defined $hash_base && (my %co = parse_commit($hash_base))) { + my @views_nav = (); + if (defined $file_name) { + push @views_nav, + $cgi->a({-href => href(action=>"history", -replay=>1)}, + "history"), + $cgi->a({-href => href(action=>"tree", + hash_base=>"HEAD", file_name=>$file_name)}, + "HEAD"), + } + my $snapshot_links = format_snapshot_links($hash); + if (defined $snapshot_links) { + # FIXME: Should be available when we have no hash base as well. + push @views_nav, $snapshot_links; + } + git_print_page_nav('tree','', $hash_base, undef, undef, + join(' | ', @views_nav)); + git_print_header_div('commit', esc_html($co{'title'}) . $ref, $hash_base); + } else { + undef $hash_base; + print "
\n"; + print "

\n"; + print "
".esc_html($hash)."
\n"; + } + if (defined $file_name) { + $basedir = $file_name; + if ($basedir ne '' && substr($basedir, -1) ne '/') { + $basedir .= '/'; + } + git_print_page_path($file_name, 'tree', $hash_base); + } + print "
\n"; + print "\n"; + my $alternate = 1; + # '..' (top directory) link if possible + if (defined $hash_base && + defined $file_name && $file_name =~ m![^/]+$!) { + if ($alternate) { + print "\n"; + } else { + print "\n"; + } + $alternate ^= 1; + + my $up = $file_name; + $up =~ s!/?[^/]+$!!; + undef $up unless $up; + # based on git_print_tree_entry + print '\n"; + print ''."\n" if $show_sizes; + print '\n"; + print "\n"; + + print "\n"; + } + foreach my $line (@entries) { + my %t = parse_ls_tree_line($line, -z => 1, -l => $show_sizes); + + if ($alternate) { + print "\n"; + } else { + print "\n"; + } + $alternate ^= 1; + + git_print_tree_entry(\%t, $basedir, $hash_base, $have_blame); + + print "\n"; + } + print "
' . mode_str('040000') . " '; + print $cgi->a({-href => href(action=>"tree", + hash_base=>$hash_base, + file_name=>$up)}, + ".."); + print "
\n" . + "
"; + git_footer_html(); +} + +sub sanitize_for_filename { + my $name = shift; + + $name =~ s!/!-!g; + $name =~ s/[^[:alnum:]_.-]//g; + + return $name; +} + +sub snapshot_name { + my ($project, $hash) = @_; + + # path/to/project.git -> project + # path/to/project/.git -> project + my $name = to_utf8($project); + $name =~ s,([^/])/*\.git$,$1,; + $name = sanitize_for_filename(basename($name)); + + my $ver = $hash; + if ($hash =~ /^[0-9a-fA-F]+$/) { + # shorten SHA-1 hash + my $full_hash = git_get_full_hash($project, $hash); + if ($full_hash =~ /^$hash/ && length($hash) > 7) { + $ver = git_get_short_hash($project, $hash); + } + } elsif ($hash =~ m!^refs/tags/(.*)$!) { + # tags don't need shortened SHA-1 hash + $ver = $1; + } else { + # branches and other need shortened SHA-1 hash + my $strip_refs = join '|', map { quotemeta } get_branch_refs(); + if ($hash =~ m!^refs/($strip_refs|remotes)/(.*)$!) { + my $ref_dir = (defined $1) ? $1 : ''; + $ver = $2; + + $ref_dir = sanitize_for_filename($ref_dir); + # for refs neither in heads nor remotes we want to + # add a ref dir to archive name + if ($ref_dir ne '' and $ref_dir ne 'heads' and $ref_dir ne 'remotes') { + $ver = $ref_dir . '-' . $ver; + } + } + $ver .= '-' . git_get_short_hash($project, $hash); + } + # special case of sanitization for filename - we change + # slashes to dots instead of dashes + # in case of hierarchical branch names + $ver =~ s!/!.!g; + $ver =~ s/[^[:alnum:]_.-]//g; + + # name = project-version_string + $name = "$name-$ver"; + + return wantarray ? ($name, $name) : $name; +} + +sub exit_if_unmodified_since { + my ($latest_epoch) = @_; + our $cgi; + + my $if_modified = $cgi->http('IF_MODIFIED_SINCE'); + if (defined $if_modified) { + my $since; + if (eval { require HTTP::Date; 1; }) { + $since = HTTP::Date::str2time($if_modified); + } elsif (eval { require Time::ParseDate; 1; }) { + $since = Time::ParseDate::parsedate($if_modified, GMT => 1); + } + if (defined $since && $latest_epoch <= $since) { + my %latest_date = parse_date($latest_epoch); + print $cgi->header( + -last_modified => $latest_date{'rfc2822'}, + -status => '304 Not Modified'); + goto DONE_GITWEB; + } + } +} + +sub git_snapshot { + my $format = $input_params{'snapshot_format'}; + if (!@snapshot_fmts) { + die_error(403, "Snapshots not allowed"); + } + # default to first supported snapshot format + $format ||= $snapshot_fmts[0]; + if ($format !~ m/^[a-z0-9]+$/) { + die_error(400, "Invalid snapshot format parameter"); + } elsif (!exists($known_snapshot_formats{$format})) { + die_error(400, "Unknown snapshot format"); + } elsif ($known_snapshot_formats{$format}{'disabled'}) { + die_error(403, "Snapshot format not allowed"); + } elsif (!grep($_ eq $format, @snapshot_fmts)) { + die_error(403, "Unsupported snapshot format"); + } + + my $type = git_get_type("$hash^{}"); + if (!$type) { + die_error(404, 'Object does not exist'); + } elsif ($type eq 'blob') { + die_error(400, 'Object is not a tree-ish'); + } + + my ($name, $prefix) = snapshot_name($project, $hash); + my $filename = "$name$known_snapshot_formats{$format}{'suffix'}"; + + my %co = parse_commit($hash); + exit_if_unmodified_since($co{'committer_epoch'}) if %co; + + my $cmd = quote_command( + git_cmd(), 'archive', + "--format=$known_snapshot_formats{$format}{'format'}", + "--prefix=$prefix/", $hash); + if (exists $known_snapshot_formats{$format}{'compressor'}) { + $cmd .= ' | ' . quote_command(@{$known_snapshot_formats{$format}{'compressor'}}); + } + + $filename =~ s/(["\\])/\\$1/g; + my %latest_date; + if (%co) { + %latest_date = parse_date($co{'committer_epoch'}, $co{'committer_tz'}); + } + + print $cgi->header( + -type => $known_snapshot_formats{$format}{'type'}, + -content_disposition => 'inline; filename="' . $filename . '"', + %co ? (-last_modified => $latest_date{'rfc2822'}) : (), + -status => '200 OK'); + + open my $fd, "-|", $cmd + or die_error(500, "Execute git-archive failed"); + local *FCGI::Stream::PRINT = $FCGI_Stream_PRINT_raw; + binmode STDOUT, ':raw'; + print <$fd>; + binmode STDOUT, ':utf8'; # as set at the beginning of gitweb.cgi + close $fd; +} + +sub git_log_generic { + my ($fmt_name, $body_subr, $base, $parent, $file_name, $file_hash) = @_; + + my $head = git_get_head_hash($project); + if (!defined $base) { + $base = $head; + } + if (!defined $page) { + $page = 0; + } + my $refs = git_get_references(); + + my $commit_hash = $base; + if (defined $parent) { + $commit_hash = "$parent..$base"; + } + my @commitlist = + parse_commits($commit_hash, 101, (100 * $page), + defined $file_name ? ($file_name, "--full-history") : ()); + + my $ftype; + if (!defined $file_hash && defined $file_name) { + # some commits could have deleted file in question, + # and not have it in tree, but one of them has to have it + for (my $i = 0; $i < @commitlist; $i++) { + $file_hash = git_get_hash_by_path($commitlist[$i]{'id'}, $file_name); + last if defined $file_hash; + } + } + if (defined $file_hash) { + $ftype = git_get_type($file_hash); + } + if (defined $file_name && !defined $ftype) { + die_error(500, "Unknown type of object"); + } + my %co; + if (defined $file_name) { + %co = parse_commit($base) + or die_error(404, "Unknown commit object"); + } + + + my $paging_nav = format_paging_nav($fmt_name, $page, $#commitlist >= 100); + my $next_link = ''; + if ($#commitlist >= 100) { + $next_link = + $cgi->a({-href => href(-replay=>1, page=>$page+1), + -accesskey => "n", -title => "Alt-n"}, "next"); + } + my $patch_max = gitweb_get_feature('patches'); + if ($patch_max && !defined $file_name && + !gitweb_check_feature('email-privacy')) { + if ($patch_max < 0 || @commitlist <= $patch_max) { + $paging_nav .= " ⋅ " . + $cgi->a({-href => href(action=>"patches", -replay=>1)}, + "patches"); + } + } + + git_header_html(); + git_print_page_nav($fmt_name,'', $hash,$hash,$hash, $paging_nav); + if (defined $file_name) { + git_print_header_div('commit', esc_html($co{'title'}), $base); + } else { + git_print_header_div('summary', $project) + } + git_print_page_path($file_name, $ftype, $hash_base) + if (defined $file_name); + + $body_subr->(\@commitlist, 0, 99, $refs, $next_link, + $file_name, $file_hash, $ftype); + + git_footer_html(); +} + +sub git_log { + git_log_generic('log', \&git_log_body, + $hash, $hash_parent); +} + +sub git_commit { + $hash ||= $hash_base || "HEAD"; + my %co = parse_commit($hash) + or die_error(404, "Unknown commit object"); + + my $parent = $co{'parent'}; + my $parents = $co{'parents'}; # listref + + # we need to prepare $formats_nav before any parameter munging + my $formats_nav; + if (!defined $parent) { + # --root commitdiff + $formats_nav .= '(initial)'; + } elsif (@$parents == 1) { + # single parent commit + $formats_nav .= + '(parent: ' . + $cgi->a({-href => href(action=>"commit", + hash=>$parent)}, + esc_html(substr($parent, 0, 7))) . + ')'; + } else { + # merge commit + $formats_nav .= + '(merge: ' . + join(' ', map { + $cgi->a({-href => href(action=>"commit", + hash=>$_)}, + esc_html(substr($_, 0, 7))); + } @$parents ) . + ')'; + } + if (gitweb_check_feature('patches') && @$parents <= 1 && + !gitweb_check_feature('email-privacy')) { + $formats_nav .= " | " . + $cgi->a({-href => href(action=>"patch", -replay=>1)}, + "patch"); + } + + if (!defined $parent) { + $parent = "--root"; + } + my @difftree; + open my $fd, "-|", git_cmd(), "diff-tree", '-r', "--no-commit-id", + @diff_opts, + (@$parents <= 1 ? $parent : '-c'), + $hash, "--" + or die_error(500, "Open git-diff-tree failed"); + @difftree = map { chomp; $_ } <$fd>; + close $fd or die_error(404, "Reading git-diff-tree failed"); + + # non-textual hash id's can be cached + my $expires; + if ($hash =~ m/^$oid_regex$/) { + $expires = "+1d"; + } + my $refs = git_get_references(); + my $ref = format_ref_marker($refs, $co{'id'}); + + git_header_html(undef, $expires); + git_print_page_nav('commit', '', + $hash, $co{'tree'}, $hash, + $formats_nav); + + if (defined $co{'parent'}) { + git_print_header_div('commitdiff', esc_html($co{'title'}) . $ref, $hash); + } else { + git_print_header_div('tree', esc_html($co{'title'}) . $ref, $co{'tree'}, $hash); + } + print "
\n" . + "\n"; + git_print_authorship_rows(\%co); + print "\n"; + print "" . + "" . + "" . + "" . + "\n"; + + foreach my $par (@$parents) { + print "" . + "" . + "" . + "" . + "\n"; + } + print "
commit$co{'id'}
tree" . + $cgi->a({-href => href(action=>"tree", hash=>$co{'tree'}, hash_base=>$hash), + class => "list"}, $co{'tree'}) . + "" . + $cgi->a({-href => href(action=>"tree", hash=>$co{'tree'}, hash_base=>$hash)}, + "tree"); + my $snapshot_links = format_snapshot_links($hash); + if (defined $snapshot_links) { + print " | " . $snapshot_links; + } + print "
parent" . + $cgi->a({-href => href(action=>"commit", hash=>$par), + class => "list"}, $par) . + "" . + $cgi->a({-href => href(action=>"commit", hash=>$par)}, "commit") . + " | " . + $cgi->a({-href => href(action=>"commitdiff", hash=>$hash, hash_parent=>$par)}, "diff") . + "
". + "
\n"; + + print "
\n"; + git_print_log($co{'comment'}); + print "
\n"; + + git_difftree_body(\@difftree, $hash, @$parents); + + git_footer_html(); +} + +sub git_object { + # object is defined by: + # - hash or hash_base alone + # - hash_base and file_name + my $type; + + # - hash or hash_base alone + if ($hash || ($hash_base && !defined $file_name)) { + my $object_id = $hash || $hash_base; + + open my $fd, "-|", quote_command( + git_cmd(), 'cat-file', '-t', $object_id) . ' 2> /dev/null' + or die_error(404, "Object does not exist"); + $type = <$fd>; + defined $type && chomp $type; + close $fd + or die_error(404, "Object does not exist"); + + # - hash_base and file_name + } elsif ($hash_base && defined $file_name) { + $file_name =~ s,/+$,,; + + system(git_cmd(), "cat-file", '-e', $hash_base) == 0 + or die_error(404, "Base object does not exist"); + + # here errors should not happen + open my $fd, "-|", git_cmd(), "ls-tree", $hash_base, "--", $file_name + or die_error(500, "Open git-ls-tree failed"); + my $line = <$fd>; + close $fd; + + #'100644 blob 0fa3f3a66fb6a137f6ec2c19351ed4d807070ffa panic.c' + unless ($line && $line =~ m/^([0-9]+) (.+) ($oid_regex)\t/) { + die_error(404, "File or directory for given base does not exist"); + } + $type = $2; + $hash = $3; + } else { + die_error(400, "Not enough information to find object"); + } + + print $cgi->redirect(-uri => href(action=>$type, -full=>1, + hash=>$hash, hash_base=>$hash_base, + file_name=>$file_name), + -status => '302 Found'); +} + +sub git_blobdiff { + my $format = shift || 'html'; + my $diff_style = $input_params{'diff_style'} || 'inline'; + + my $fd; + my @difftree; + my %diffinfo; + my $expires; + + # preparing $fd and %diffinfo for git_patchset_body + # new style URI + if (defined $hash_base && defined $hash_parent_base) { + if (defined $file_name) { + # read raw output + open $fd, "-|", git_cmd(), "diff-tree", '-r', @diff_opts, + $hash_parent_base, $hash_base, + "--", (defined $file_parent ? $file_parent : ()), $file_name + or die_error(500, "Open git-diff-tree failed"); + @difftree = map { chomp; $_ } <$fd>; + close $fd + or die_error(404, "Reading git-diff-tree failed"); + @difftree + or die_error(404, "Blob diff not found"); + + } elsif (defined $hash && + $hash =~ $oid_regex) { + # try to find filename from $hash + + # read filtered raw output + open $fd, "-|", git_cmd(), "diff-tree", '-r', @diff_opts, + $hash_parent_base, $hash_base, "--" + or die_error(500, "Open git-diff-tree failed"); + @difftree = + # ':100644 100644 03b21826... 3b93d5e7... M ls-files.c' + # $hash == to_id + grep { /^:[0-7]{6} [0-7]{6} $oid_regex $hash/ } + map { chomp; $_ } <$fd>; + close $fd + or die_error(404, "Reading git-diff-tree failed"); + @difftree + or die_error(404, "Blob diff not found"); + + } else { + die_error(400, "Missing one of the blob diff parameters"); + } + + if (@difftree > 1) { + die_error(400, "Ambiguous blob diff specification"); + } + + %diffinfo = parse_difftree_raw_line($difftree[0]); + $file_parent ||= $diffinfo{'from_file'} || $file_name; + $file_name ||= $diffinfo{'to_file'}; + + $hash_parent ||= $diffinfo{'from_id'}; + $hash ||= $diffinfo{'to_id'}; + + # non-textual hash id's can be cached + if ($hash_base =~ m/^$oid_regex$/ && + $hash_parent_base =~ m/^$oid_regex$/) { + $expires = '+1d'; + } + + # open patch output + open $fd, "-|", git_cmd(), "diff-tree", '-r', @diff_opts, + '-p', ($format eq 'html' ? "--full-index" : ()), + $hash_parent_base, $hash_base, + "--", (defined $file_parent ? $file_parent : ()), $file_name + or die_error(500, "Open git-diff-tree failed"); + } + + # old/legacy style URI -- not generated anymore since 1.4.3. + if (!%diffinfo) { + die_error('404 Not Found', "Missing one of the blob diff parameters") + } + + # header + if ($format eq 'html') { + my $formats_nav = + $cgi->a({-href => href(action=>"blobdiff_plain", -replay=>1)}, + "raw"); + $formats_nav .= diff_style_nav($diff_style); + git_header_html(undef, $expires); + if (defined $hash_base && (my %co = parse_commit($hash_base))) { + git_print_page_nav('','', $hash_base,$co{'tree'},$hash_base, $formats_nav); + git_print_header_div('commit', esc_html($co{'title'}), $hash_base); + } else { + print "

$formats_nav
\n"; + print "
".esc_html("$hash vs $hash_parent")."
\n"; + } + if (defined $file_name) { + git_print_page_path($file_name, "blob", $hash_base); + } else { + print "
\n"; + } + + } elsif ($format eq 'plain') { + print $cgi->header( + -type => 'text/plain', + -charset => 'utf-8', + -expires => $expires, + -content_disposition => 'inline; filename="' . "$file_name" . '.patch"'); + + print "X-Git-Url: " . $cgi->self_url() . "\n\n"; + + } else { + die_error(400, "Unknown blobdiff format"); + } + + # patch + if ($format eq 'html') { + print "
\n"; + + git_patchset_body($fd, $diff_style, + [ \%diffinfo ], $hash_base, $hash_parent_base); + close $fd; + + print "
\n"; # class="page_body" + git_footer_html(); + + } else { + while (my $line = <$fd>) { + $line =~ s!a/($hash|$hash_parent)!'a/'.esc_path($diffinfo{'from_file'})!eg; + $line =~ s!b/($hash|$hash_parent)!'b/'.esc_path($diffinfo{'to_file'})!eg; + + print $line; + + last if $line =~ m!^\+\+\+!; + } + local $/ = undef; + print <$fd>; + close $fd; + } +} + +sub git_blobdiff_plain { + git_blobdiff('plain'); +} + +# assumes that it is added as later part of already existing navigation, +# so it returns "| foo | bar" rather than just "foo | bar" +sub diff_style_nav { + my ($diff_style, $is_combined) = @_; + $diff_style ||= 'inline'; + + return "" if ($is_combined); + + my @styles = (inline => 'inline', 'sidebyside' => 'side by side'); + my %styles = @styles; + @styles = + @styles[ map { $_ * 2 } 0..$#styles/2 ]; + + return join '', + map { " | ".$_ } + map { + $_ eq $diff_style ? $styles{$_} : + $cgi->a({-href => href(-replay=>1, diff_style => $_)}, $styles{$_}) + } @styles; +} + +sub git_commitdiff { + my %params = @_; + my $format = $params{-format} || 'html'; + my $diff_style = $input_params{'diff_style'} || 'inline'; + + my ($patch_max) = gitweb_get_feature('patches'); + if ($format eq 'patch') { + die_error(403, "Patch view not allowed") unless $patch_max; + } + + $hash ||= $hash_base || "HEAD"; + my %co = parse_commit($hash) + or die_error(404, "Unknown commit object"); + + # choose format for commitdiff for merge + if (! defined $hash_parent && @{$co{'parents'}} > 1) { + $hash_parent = '--cc'; + } + # we need to prepare $formats_nav before almost any parameter munging + my $formats_nav; + if ($format eq 'html') { + $formats_nav = + $cgi->a({-href => href(action=>"commitdiff_plain", -replay=>1)}, + "raw"); + if ($patch_max && @{$co{'parents'}} <= 1 && + !gitweb_check_feature('email-privacy')) { + $formats_nav .= " | " . + $cgi->a({-href => href(action=>"patch", -replay=>1)}, + "patch"); + } + $formats_nav .= diff_style_nav($diff_style, @{$co{'parents'}} > 1); + + if (defined $hash_parent && + $hash_parent ne '-c' && $hash_parent ne '--cc') { + # commitdiff with two commits given + my $hash_parent_short = $hash_parent; + if ($hash_parent =~ m/^$oid_regex$/) { + $hash_parent_short = substr($hash_parent, 0, 7); + } + $formats_nav .= + ' (from'; + for (my $i = 0; $i < @{$co{'parents'}}; $i++) { + if ($co{'parents'}[$i] eq $hash_parent) { + $formats_nav .= ' parent ' . ($i+1); + last; + } + } + $formats_nav .= ': ' . + $cgi->a({-href => href(-replay=>1, + hash=>$hash_parent, hash_base=>undef)}, + esc_html($hash_parent_short)) . + ')'; + } elsif (!$co{'parent'}) { + # --root commitdiff + $formats_nav .= ' (initial)'; + } elsif (scalar @{$co{'parents'}} == 1) { + # single parent commit + $formats_nav .= + ' (parent: ' . + $cgi->a({-href => href(-replay=>1, + hash=>$co{'parent'}, hash_base=>undef)}, + esc_html(substr($co{'parent'}, 0, 7))) . + ')'; + } else { + # merge commit + if ($hash_parent eq '--cc') { + $formats_nav .= ' | ' . + $cgi->a({-href => href(-replay=>1, + hash=>$hash, hash_parent=>'-c')}, + 'combined'); + } else { # $hash_parent eq '-c' + $formats_nav .= ' | ' . + $cgi->a({-href => href(-replay=>1, + hash=>$hash, hash_parent=>'--cc')}, + 'compact'); + } + $formats_nav .= + ' (merge: ' . + join(' ', map { + $cgi->a({-href => href(-replay=>1, + hash=>$_, hash_base=>undef)}, + esc_html(substr($_, 0, 7))); + } @{$co{'parents'}} ) . + ')'; + } + } + + my $hash_parent_param = $hash_parent; + if (!defined $hash_parent_param) { + # --cc for multiple parents, --root for parentless + $hash_parent_param = + @{$co{'parents'}} > 1 ? '--cc' : $co{'parent'} || '--root'; + } + + # read commitdiff + my $fd; + my @difftree; + if ($format eq 'html') { + open $fd, "-|", git_cmd(), "diff-tree", '-r', @diff_opts, + "--no-commit-id", "--patch-with-raw", "--full-index", + $hash_parent_param, $hash, "--" + or die_error(500, "Open git-diff-tree failed"); + + while (my $line = <$fd>) { + chomp $line; + # empty line ends raw part of diff-tree output + last unless $line; + push @difftree, scalar parse_difftree_raw_line($line); + } + + } elsif ($format eq 'plain') { + open $fd, "-|", git_cmd(), "diff-tree", '-r', @diff_opts, + '-p', $hash_parent_param, $hash, "--" + or die_error(500, "Open git-diff-tree failed"); + } elsif ($format eq 'patch') { + # For commit ranges, we limit the output to the number of + # patches specified in the 'patches' feature. + # For single commits, we limit the output to a single patch, + # diverging from the git-format-patch default. + my @commit_spec = (); + if ($hash_parent) { + if ($patch_max > 0) { + push @commit_spec, "-$patch_max"; + } + push @commit_spec, '-n', "$hash_parent..$hash"; + } else { + if ($params{-single}) { + push @commit_spec, '-1'; + } else { + if ($patch_max > 0) { + push @commit_spec, "-$patch_max"; + } + push @commit_spec, "-n"; + } + push @commit_spec, '--root', $hash; + } + open $fd, "-|", git_cmd(), "format-patch", @diff_opts, + '--encoding=utf8', '--stdout', @commit_spec + or die_error(500, "Open git-format-patch failed"); + } else { + die_error(400, "Unknown commitdiff format"); + } + + # non-textual hash id's can be cached + my $expires; + if ($hash =~ m/^$oid_regex$/) { + $expires = "+1d"; + } + + # write commit message + if ($format eq 'html') { + my $refs = git_get_references(); + my $ref = format_ref_marker($refs, $co{'id'}); + + git_header_html(undef, $expires); + git_print_page_nav('commitdiff','', $hash,$co{'tree'},$hash, $formats_nav); + git_print_header_div('commit', esc_html($co{'title'}) . $ref, $hash); + print "
\n" . + "\n"; + git_print_authorship_rows(\%co); + print "
". + "
\n"; + print "
\n"; + if (@{$co{'comment'}} > 1) { + print "
\n"; + git_print_log($co{'comment'}, -final_empty_line=> 1, -remove_title => 1); + print "
\n"; # class="log" + } + + } elsif ($format eq 'plain') { + my $refs = git_get_references("tags"); + my $tagname = git_get_rev_name_tags($hash); + my $filename = basename($project) . "-$hash.patch"; + + print $cgi->header( + -type => 'text/plain', + -charset => 'utf-8', + -expires => $expires, + -content_disposition => 'inline; filename="' . "$filename" . '"'); + my %ad = parse_date($co{'author_epoch'}, $co{'author_tz'}); + print "From: " . to_utf8($co{'author'}) . "\n"; + print "Date: $ad{'rfc2822'} ($ad{'tz_local'})\n"; + print "Subject: " . to_utf8($co{'title'}) . "\n"; + + print "X-Git-Tag: $tagname\n" if $tagname; + print "X-Git-Url: " . $cgi->self_url() . "\n\n"; + + foreach my $line (@{$co{'comment'}}) { + print to_utf8($line) . "\n"; + } + print "---\n\n"; + } elsif ($format eq 'patch') { + my $filename = basename($project) . "-$hash.patch"; + + print $cgi->header( + -type => 'text/plain', + -charset => 'utf-8', + -expires => $expires, + -content_disposition => 'inline; filename="' . "$filename" . '"'); + } + + # write patch + if ($format eq 'html') { + my $use_parents = !defined $hash_parent || + $hash_parent eq '-c' || $hash_parent eq '--cc'; + git_difftree_body(\@difftree, $hash, + $use_parents ? @{$co{'parents'}} : $hash_parent); + print "
\n"; + + git_patchset_body($fd, $diff_style, + \@difftree, $hash, + $use_parents ? @{$co{'parents'}} : $hash_parent); + close $fd; + print "
\n"; # class="page_body" + git_footer_html(); + + } elsif ($format eq 'plain') { + local $/ = undef; + print <$fd>; + close $fd + or print "Reading git-diff-tree failed\n"; + } elsif ($format eq 'patch') { + local $/ = undef; + print <$fd>; + close $fd + or print "Reading git-format-patch failed\n"; + } +} + +sub git_commitdiff_plain { + git_commitdiff(-format => 'plain'); +} + +# format-patch-style patches +sub git_patch { + git_commitdiff(-format => 'patch', -single => 1); +} + +sub git_patches { + git_commitdiff(-format => 'patch'); +} + +sub git_history { + git_log_generic('history', \&git_history_body, + $hash_base, $hash_parent_base, + $file_name, $hash); +} + +sub git_search { + $searchtype ||= 'commit'; + + # check if appropriate features are enabled + gitweb_check_feature('search') + or die_error(403, "Search is disabled"); + if ($searchtype eq 'pickaxe') { + # pickaxe may take all resources of your box and run for several minutes + # with every query - so decide by yourself how public you make this feature + gitweb_check_feature('pickaxe') + or die_error(403, "Pickaxe search is disabled"); + } + if ($searchtype eq 'grep') { + # grep search might be potentially CPU-intensive, too + gitweb_check_feature('grep') + or die_error(403, "Grep search is disabled"); + } + + if (!defined $searchtext) { + die_error(400, "Text field is empty"); + } + if (!defined $hash) { + $hash = git_get_head_hash($project); + } + my %co = parse_commit($hash); + if (!%co) { + die_error(404, "Unknown commit object"); + } + if (!defined $page) { + $page = 0; + } + + if ($searchtype eq 'commit' || + $searchtype eq 'author' || + $searchtype eq 'committer') { + git_search_message(%co); + } elsif ($searchtype eq 'pickaxe') { + git_search_changes(%co); + } elsif ($searchtype eq 'grep') { + git_search_files(%co); + } else { + die_error(400, "Unknown search type"); + } +} + +sub git_search_help { + git_header_html(); + git_print_page_nav('','', $hash,$hash,$hash); + print <Pattern is by default a normal string that is matched precisely (but without +regard to case, except in the case of pickaxe). However, when you check the re checkbox, +the pattern entered is recognized as the POSIX extended +regular expression (also case +insensitive).

+
+
commit
+
The commit messages and authorship information will be scanned for the given pattern.
+EOT + my $have_grep = gitweb_check_feature('grep'); + if ($have_grep) { + print <grep +
All files in the currently selected tree (HEAD unless you are explicitly browsing + a different one) are searched for the given pattern. On large trees, this search can take +a while and put some strain on the server, so please use it with some consideration. Note that +due to git-grep peculiarity, currently if regexp mode is turned off, the matches are +case-sensitive.
+EOT + } + print <author +
Name and e-mail of the change author and date of birth of the patch will be scanned for the given pattern.
+
committer
+
Name and e-mail of the committer and date of commit will be scanned for the given pattern.
+EOT + my $have_pickaxe = gitweb_check_feature('pickaxe'); + if ($have_pickaxe) { + print <pickaxe +
All commits that caused the string to appear or disappear from any file (changes that +added, removed or "modified" the string) will be listed. This search can take a while and +takes a lot of strain on the server, so please use it wisely. Note that since you may be +interested even in changes just changing the case as well, this search is case sensitive.
+EOT + } + print "
\n"; + git_footer_html(); +} + +sub git_shortlog { + git_log_generic('shortlog', \&git_shortlog_body, + $hash, $hash_parent); +} + +## ...................................................................... +## feeds (RSS, Atom; OPML) + +sub git_feed { + my $format = shift || 'atom'; + my $have_blame = gitweb_check_feature('blame'); + + # Atom: http://www.atomenabled.org/developers/syndication/ + # RSS: https://web.archive.org/web/20030729001534/http://www.notestips.com/80256B3A007F2692/1/NAMO5P9UPQ + if ($format ne 'rss' && $format ne 'atom') { + die_error(400, "Unknown web feed format"); + } + + # log/feed of current (HEAD) branch, log of given branch, history of file/directory + my $head = $hash || 'HEAD'; + my @commitlist = parse_commits($head, 150, 0, $file_name); + + my %latest_commit; + my %latest_date; + my $content_type = "application/$format+xml"; + if (defined $cgi->http('HTTP_ACCEPT') && + $cgi->Accept('text/xml') > $cgi->Accept($content_type)) { + # browser (feed reader) prefers text/xml + $content_type = 'text/xml'; + } + if (defined($commitlist[0])) { + %latest_commit = %{$commitlist[0]}; + my $latest_epoch = $latest_commit{'committer_epoch'}; + exit_if_unmodified_since($latest_epoch); + %latest_date = parse_date($latest_epoch, $latest_commit{'committer_tz'}); + } + print $cgi->header( + -type => $content_type, + -charset => 'utf-8', + %latest_date ? (-last_modified => $latest_date{'rfc2822'}) : (), + -status => '200 OK'); + + # Optimization: skip generating the body if client asks only + # for Last-Modified date. + return if ($cgi->request_method() eq 'HEAD'); + + # header variables + my $title = "$site_name - $project/$action"; + my $feed_type = 'log'; + if (defined $hash) { + $title .= " - '$hash'"; + $feed_type = 'branch log'; + if (defined $file_name) { + $title .= " :: $file_name"; + $feed_type = 'history'; + } + } elsif (defined $file_name) { + $title .= " - $file_name"; + $feed_type = 'history'; + } + $title .= " $feed_type"; + $title = esc_html($title); + my $descr = git_get_project_description($project); + if (defined $descr) { + $descr = esc_html($descr); + } else { + $descr = "$project " . + ($format eq 'rss' ? 'RSS' : 'Atom') . + " feed"; + } + my $owner = git_get_project_owner($project); + $owner = esc_html($owner); + + #header + my $alt_url; + if (defined $file_name) { + $alt_url = href(-full=>1, action=>"history", hash=>$hash, file_name=>$file_name); + } elsif (defined $hash) { + $alt_url = href(-full=>1, action=>"log", hash=>$hash); + } else { + $alt_url = href(-full=>1, action=>"summary"); + } + $alt_url = esc_attr($alt_url); + print qq!\n!; + if ($format eq 'rss') { + print < + +XML + print "$title\n" . + "$alt_url\n" . + "$descr\n" . + "en\n" . + # project owner is responsible for 'editorial' content + "$owner\n"; + if (defined $logo || defined $favicon) { + # prefer the logo to the favicon, since RSS + # doesn't allow both + my $img = esc_url($logo || $favicon); + print "\n" . + "$img\n" . + "$title\n" . + "$alt_url\n" . + "\n"; + } + if (%latest_date) { + print "$latest_date{'rfc2822'}\n"; + print "$latest_date{'rfc2822'}\n"; + } + print "gitweb v.$version/$git_version\n"; + } elsif ($format eq 'atom') { + print < +XML + print "$title\n" . + "$descr\n" . + '' . "\n" . + '' . "\n" . + "" . esc_url(href(-full=>1)) . "\n" . + # use project owner for feed author + "$owner\n"; + if (defined $favicon) { + print "" . esc_url($favicon) . "\n"; + } + if (defined $logo) { + # not twice as wide as tall: 72 x 27 pixels + print "" . esc_url($logo) . "\n"; + } + if (! %latest_date) { + # dummy date to keep the feed valid until commits trickle in: + print "1970-01-01T00:00:00Z\n"; + } else { + print "$latest_date{'iso-8601'}\n"; + } + print "gitweb\n"; + } + + # contents + for (my $i = 0; $i <= $#commitlist; $i++) { + my %co = %{$commitlist[$i]}; + my $commit = $co{'id'}; + # we read 150, we always show 30 and the ones more recent than 48 hours + if (($i >= 20) && ((time - $co{'committer_epoch'}) > 48*60*60)) { + last; + } + my %cd = parse_date($co{'committer_epoch'}, $co{'committer_tz'}); + + # get list of changed files + open my $fd, "-|", git_cmd(), "diff-tree", '-r', @diff_opts, + $co{'parent'} || "--root", + $co{'id'}, "--", (defined $file_name ? $file_name : ()) + or next; + my @difftree = map { chomp; $_ } <$fd>; + close $fd + or next; + + # print element (entry, item) + my $co_url = href(-full=>1, action=>"commitdiff", hash=>$commit); + if ($format eq 'rss') { + print "\n" . + "" . esc_html($co{'title'}) . "\n" . + "" . esc_html($co{'author'}) . "\n" . + "$cd{'rfc2822'}\n" . + "$co_url\n" . + "" . esc_html($co_url) . "\n" . + "" . esc_html($co{'title'}) . "\n" . + "" . + "\n" . + "" . esc_html($co{'title'}) . "\n" . + "$cd{'iso-8601'}\n" . + "\n" . + " " . esc_html($co{'author_name'}) . "\n"; + if ($co{'author_email'}) { + print " " . esc_html($co{'author_email'}) . "\n"; + } + print "\n" . + # use committer for contributor + "\n" . + " " . esc_html($co{'committer_name'}) . "\n"; + if ($co{'committer_email'}) { + print " " . esc_html($co{'committer_email'}) . "\n"; + } + print "\n" . + "$cd{'iso-8601'}\n" . + "\n" . + "" . esc_html($co_url) . "\n" . + "\n" . + "
\n"; + } + my $comment = $co{'comment'}; + print "
\n";
+		foreach my $line (@$comment) {
+			$line = esc_html($line);
+			print "$line\n";
+		}
+		print "
    \n"; + foreach my $difftree_line (@difftree) { + my %difftree = parse_difftree_raw_line($difftree_line); + next if !$difftree{'from_id'}; + + my $file = $difftree{'file'} || $difftree{'to_file'}; + + print "
  • " . + "[" . + $cgi->a({-href => href(-full=>1, action=>"blobdiff", + hash=>$difftree{'to_id'}, hash_parent=>$difftree{'from_id'}, + hash_base=>$co{'id'}, hash_parent_base=>$co{'parent'}, + file_name=>$file, file_parent=>$difftree{'from_file'}), + -title => "diff"}, 'D'); + if ($have_blame) { + print $cgi->a({-href => href(-full=>1, action=>"blame", + file_name=>$file, hash_base=>$commit), + -title => "blame"}, 'B'); + } + # if this is not a feed of a file history + if (!defined $file_name || $file_name ne $file) { + print $cgi->a({-href => href(-full=>1, action=>"history", + file_name=>$file, hash=>$commit), + -title => "history"}, 'H'); + } + $file = esc_path($file); + print "] ". + "$file
  • \n"; + } + if ($format eq 'rss') { + print "
]]>\n" . + "\n" . + "\n"; + } elsif ($format eq 'atom') { + print "\n
\n" . + "
\n" . + "\n"; + } + } + + # end of feed + if ($format eq 'rss') { + print "
\n\n"; + } elsif ($format eq 'atom') { + print "\n"; + } +} + +sub git_rss { + git_feed('rss'); +} + +sub git_atom { + git_feed('atom'); +} + +sub git_opml { + my @list = git_get_projects_list($project_filter, $strict_export); + if (!@list) { + die_error(404, "No projects found"); + } + + print $cgi->header( + -type => 'text/xml', + -charset => 'utf-8', + -content_disposition => 'inline; filename="opml.xml"'); + + my $title = esc_html($site_name); + my $filter = " within subdirectory "; + if (defined $project_filter) { + $filter .= esc_html($project_filter); + } else { + $filter = ""; + } + print < + + + $title OPML Export$filter + + + +XML + + foreach my $pr (@list) { + my %proj = %$pr; + my $head = git_get_head_hash($proj{'path'}); + if (!defined $head) { + next; + } + $git_dir = "$projectroot/$proj{'path'}"; + my %co = parse_commit($head); + if (!%co) { + next; + } + + my $path = esc_html(chop_str($proj{'path'}, 25, 5)); + my $rss = esc_attr(href('project' => $proj{'path'}, 'action' => 'rss', -full => 1)); + my $html = esc_attr(href('project' => $proj{'path'}, 'action' => 'summary', -full => 1)); + print "\n"; + } + print < + + +XML +} diff --git a/gitweb/meson.build b/gitweb/meson.build new file mode 100644 index 0000000..88a54b4 --- /dev/null +++ b/gitweb/meson.build @@ -0,0 +1,89 @@ +gitweb_config = configuration_data() +gitweb_config.set_quoted('PERL_PATH', target_perl.full_path()) +gitweb_config.set_quoted('CSSMIN', '') +gitweb_config.set_quoted('JSMIN', '') +gitweb_config.set_quoted('GIT_BINDIR', get_option('prefix') / get_option('bindir')) +gitweb_config.set_quoted('GITWEB_CONFIG', get_option('gitweb_config')) +gitweb_config.set_quoted('GITWEB_CONFIG_SYSTEM', get_option('gitweb_config_system')) +gitweb_config.set_quoted('GITWEB_CONFIG_COMMON', get_option('gitweb_config_common')) +gitweb_config.set_quoted('GITWEB_HOME_LINK_STR', get_option('gitweb_home_link_str')) +gitweb_config.set_quoted('GITWEB_SITENAME', get_option('gitweb_sitename')) +gitweb_config.set_quoted('GITWEB_PROJECTROOT', get_option('gitweb_projectroot')) +gitweb_config.set_quoted('GITWEB_PROJECT_MAXDEPTH', get_option('gitweb_project_maxdepth')) +gitweb_config.set_quoted('GITWEB_EXPORT_OK', get_option('gitweb_export_ok')) +gitweb_config.set_quoted('GITWEB_STRICT_EXPORT', get_option('gitweb_strict_export')) +gitweb_config.set_quoted('GITWEB_BASE_URL', get_option('gitweb_base_url')) +gitweb_config.set_quoted('GITWEB_LIST', get_option('gitweb_list')) +gitweb_config.set_quoted('GITWEB_HOMETEXT', get_option('gitweb_hometext')) +gitweb_config.set_quoted('GITWEB_CSS', get_option('gitweb_css')) +gitweb_config.set_quoted('GITWEB_LOGO', get_option('gitweb_logo')) +gitweb_config.set_quoted('GITWEB_FAVICON', get_option('gitweb_favicon')) +gitweb_config.set_quoted('GITWEB_JS', get_option('gitweb_js')) +gitweb_config.set_quoted('GITWEB_SITE_HTML_HEAD_STRING', get_option('gitweb_site_html_head_string')) +gitweb_config.set_quoted('GITWEB_SITE_HEADER', get_option('gitweb_site_header')) +gitweb_config.set_quoted('GITWEB_SITE_FOOTER', get_option('gitweb_site_footer')) +gitweb_config.set_quoted('HIGHLIGHT_BIN', get_option('highlight_bin')) + +configure_file( + input: 'GITWEB-BUILD-OPTIONS.in', + output: 'GITWEB-BUILD-OPTIONS', + configuration: gitweb_config, +) + +test_dependencies += custom_target( + input: 'gitweb.perl', + output: 'gitweb.cgi', + command: [ + shell, + meson.current_source_dir() / 'generate-gitweb-cgi.sh', + meson.current_build_dir() / 'GITWEB-BUILD-OPTIONS', + git_version_file.full_path(), + '@INPUT@', + '@OUTPUT@', + ], + install: true, + install_dir: get_option('datadir') / 'gitweb', + depends: [git_version_file], +) + +javascript_files = [ + meson.current_source_dir() / 'static/js/adjust-timezone.js', + meson.current_source_dir() / 'static/js/blame_incremental.js', + meson.current_source_dir() / 'static/js/javascript-detection.js', + meson.current_source_dir() / 'static/js/lib/common-lib.js', + meson.current_source_dir() / 'static/js/lib/cookies.js', + meson.current_source_dir() / 'static/js/lib/datetime.js', +] + +test_dependencies += custom_target( + input: javascript_files, + output: 'gitweb.js', + command: [ + shell, + meson.current_source_dir() / 'generate-gitweb-js.sh', + '@OUTPUT@', + ] + javascript_files, + install: true, + install_dir: get_option('datadir') / 'gitweb/static', +) + +foreach asset : [ + 'static/git-favicon.png', + 'static/git-logo.png', + 'static/gitweb.css', +] + if meson.version().version_compare('>=1.3.0') + fs.copyfile(asset, + install: true, + install_dir: get_option('datadir') / 'gitweb' / fs.parent(asset), + ) + else + configure_file( + input: asset, + output: fs.stem(asset), + copy: true, + install: true, + install_dir: get_option('datadir') / 'gitweb' / fs.parent(asset), + ) + endif +endforeach diff --git a/gitweb/static/git-favicon.png b/gitweb/static/git-favicon.png new file mode 100644 index 0000000000000000000000000000000000000000..aae35a70e70351fe6dcb3e905e2e388cf0cb0ac3 GIT binary patch literal 115 zcmeAS@N?(olHy`uVBq!ia0y~yU=RRdCT0c(hNQXTpBNYzI0Jk_Tn{iXFf=gy|Np=1 zPYnkH1B0rki(?4K_2dXv7M>d&Y^;209E}3eyNtBE60@>WBw84X1Gh3VOgy3-eKNmn RAp-*ggQu&X%Q~loCIIV}9(@1+ literal 0 HcmV?d00001 diff --git a/gitweb/static/git-logo.png b/gitweb/static/git-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..f4ede2e944868b9a08401dafeb2b944c7166fd0a GIT binary patch literal 207 zcmeAS@N?(olHy`uVBq!ia0y~yVDMmIV36iuW?*1=p=qhZz`!68;1lBd|NsAlgxC%1 zS1~j&oI88`00YB|7tg+b|2CCpxjO>`Ly@P8V@SoVw^J{2H7JNUuP^uh|G(dRZMz9a z$ph!P(-oVUnO(b2F7s5;o!Rihg!f?mMW%~kFE%kZ_=Pwd5?6=vNFW?*1o N@O1TaS?83{1OT9kP}2Ya literal 0 HcmV?d00001 diff --git a/gitweb/static/gitweb.css b/gitweb/static/gitweb.css new file mode 100644 index 0000000..48d2e51 --- /dev/null +++ b/gitweb/static/gitweb.css @@ -0,0 +1,686 @@ +body { + font-family: sans-serif; + font-size: small; + border: solid #d9d8d1; + border-width: 1px; + margin: 10px; + background-color: #ffffff; + color: #000000; +} + +a { + color: #0000cc; +} + +a:hover, a:visited, a:active { + color: #880000; +} + +span.cntrl { + border: dashed #aaaaaa; + border-width: 1px; + padding: 0px 2px 0px 2px; + margin: 0px 2px 0px 2px; +} + +img.logo { + float: right; + border-width: 0px; +} + +img.avatar { + vertical-align: middle; +} + +img.blob { + max-height: 100%; + max-width: 100%; +} + +a.list img.avatar { + border-style: none; +} + +div.page_header { + height: 25px; + padding: 8px; + font-size: 150%; + font-weight: bold; + background-color: #d9d8d1; +} + +div.page_header a:visited, a.header { + color: #0000cc; +} + +div.page_header a:hover { + color: #880000; +} + +div.page_nav { + padding: 8px; +} + +div.page_nav a:visited { + color: #0000cc; +} + +div.page_path { + padding: 8px; + font-weight: bold; + border: solid #d9d8d1; + border-width: 0px 0px 1px; +} + +div.page_footer { + height: 22px; + padding: 4px 8px; + background-color: #d9d8d1; +} + +div.page_footer_text { + line-height: 22px; + float: left; + color: #555555; + font-style: italic; +} + +div#generating_info { + margin: 4px; + font-size: smaller; + text-align: center; + color: #505050; +} + +div.page_body { + padding: 8px; + font-family: monospace; +} + +div.title, a.title { + display: block; + padding: 6px 8px; + font-weight: bold; + background-color: #edece6; + text-decoration: none; + color: #000000; +} + +div.readme { + padding: 8px; +} + +a.title:hover { + background-color: #d9d8d1; +} + +div.title_text { + padding: 6px 0px; + border: solid #d9d8d1; + border-width: 0px 0px 1px; + font-family: monospace; +} + +div.log_body { + padding: 8px 8px 8px 150px; +} + +span.age { + position: relative; + float: left; + width: 142px; + font-style: italic; +} + +span.signoff { + color: #888888; +} + +div.log_link { + padding: 0px 8px; + font-size: 70%; + font-family: sans-serif; + font-style: normal; + position: relative; + float: left; + width: 136px; +} + +div.list_head { + padding: 6px 8px 4px; + border: solid #d9d8d1; + border-width: 1px 0px 0px; + font-style: italic; +} + +.author_date, .author { + font-style: italic; +} + +div.author_date { + padding: 8px; + border: solid #d9d8d1; + border-width: 0px 0px 1px 0px; +} + +a.list { + text-decoration: none; + color: #000000; +} + +a.subject, a.name { + font-weight: bold; +} + +table.tags a.subject { + font-weight: normal; +} + +a.list:hover { + text-decoration: underline; + color: #880000; +} + +a.text { + text-decoration: none; + color: #0000cc; +} + +a.text:visited { + text-decoration: none; + color: #880000; +} + +a.text:hover { + text-decoration: underline; + color: #880000; +} + +table { + padding: 8px 4px; + border-spacing: 0; +} + +table.diff_tree { + font-family: monospace; +} + +table.combined.diff_tree th { + text-align: center; +} + +table.combined.diff_tree td { + padding-right: 24px; +} + +table.combined.diff_tree th.link, +table.combined.diff_tree td.link { + padding: 0px 2px; +} + +table.combined.diff_tree td.nochange a { + color: #6666ff; +} + +table.combined.diff_tree td.nochange a:hover, +table.combined.diff_tree td.nochange a:visited { + color: #d06666; +} + +table.blame { + border-collapse: collapse; +} + +table.blame td { + padding: 0px 5px; + font-size: 100%; + vertical-align: top; +} + +th { + padding: 2px 5px; + font-size: 100%; + text-align: left; +} + +/* do not change row style on hover for 'blame' view */ +tr.light, +table.blame .light:hover { + background-color: #ffffff; +} + +tr.dark, +table.blame .dark:hover { + background-color: #f6f6f0; +} + +/* currently both use the same, but it can change */ +tr.light:hover, +tr.dark:hover { + background-color: #edece6; +} + +/* boundary commits in 'blame' view */ +/* and commits without "previous" */ +tr.boundary td.sha1, +tr.no-previous td.linenr { + font-weight: bold; +} + +/* for 'blame_incremental', during processing */ +tr.color1 { background-color: #f6fff6; } +tr.color2 { background-color: #f6f6ff; } +tr.color3 { background-color: #fff6f6; } + +td { + padding: 2px 5px; + font-size: 100%; + vertical-align: top; +} + +td.link, td.selflink { + padding: 2px 5px; + font-family: sans-serif; + font-size: 70%; +} + +td.selflink { + padding-right: 0px; +} + +td.sha1 { + font-family: monospace; +} + +.error { + color: red; + background-color: yellow; +} + +td.current_head { + text-decoration: underline; +} + +td.category { + background-color: #d9d8d1; + border-top: 1px solid #000000; + border-left: 1px solid #000000; + font-weight: bold; +} + +table.diff_tree span.file_status.new { + color: #008000; +} + +table.diff_tree span.file_status.deleted { + color: #c00000; +} + +table.diff_tree span.file_status.moved, +table.diff_tree span.file_status.mode_chnge { + color: #777777; +} + +table.diff_tree span.file_status.copied { + color: #70a070; +} + +/* noage: "No commits" */ +table.project_list td.noage { + color: #808080; + font-style: italic; +} + +/* age2: 60*60*24*2 <= age */ +table.project_list td.age2, table.blame td.age2 { + font-style: italic; +} + +/* age1: 60*60*2 <= age < 60*60*24*2 */ +table.project_list td.age1 { + color: #009900; + font-style: italic; +} + +table.blame td.age1 { + color: #009900; + background: transparent; +} + +/* age0: age < 60*60*2 */ +table.project_list td.age0 { + color: #009900; + font-style: italic; + font-weight: bold; +} + +table.blame td.age0 { + color: #009900; + background: transparent; + font-weight: bold; +} + +td.pre, div.pre, div.diff { + font-family: monospace; + font-size: 12px; + white-space: pre; +} + +td.mode { + font-family: monospace; +} + +/* progress of blame_interactive */ +div#progress_bar { + height: 2px; + margin-bottom: -2px; + background-color: #d8d9d0; +} +div#progress_info { + float: right; + text-align: right; +} + +/* format of (optional) objects size in 'tree' view */ +td.size { + font-family: monospace; + text-align: right; +} + +/* styling of diffs (patchsets): commitdiff and blobdiff views */ +div.diff.header, +div.diff.extended_header { + white-space: normal; +} + +div.diff.header { + font-weight: bold; + + background-color: #edece6; + + margin-top: 4px; + padding: 4px 0px 2px 0px; + border: solid #d9d8d1; + border-width: 1px 0px 1px 0px; +} + +div.diff.header a.path { + text-decoration: underline; +} + +div.diff.extended_header, +div.diff.extended_header a.path, +div.diff.extended_header a.hash { + color: #777777; +} + +div.diff.extended_header .info { + color: #b0b0b0; +} + +div.diff.extended_header { + background-color: #f6f5ee; + padding: 2px 0px 2px 0px; +} + +div.diff a.list, +div.diff a.path, +div.diff a.hash { + text-decoration: none; +} + +div.diff a.list:hover, +div.diff a.path:hover, +div.diff a.hash:hover { + text-decoration: underline; +} + +div.diff.to_file a.path, +div.diff.to_file { + color: #007000; +} + +div.diff.add { + color: #008800; +} + +div.diff.add span.marked { + background-color: #aaffaa; +} + +div.diff.from_file a.path, +div.diff.from_file { + color: #aa0000; +} + +div.diff.rem { + color: #cc0000; +} + +div.diff.rem span.marked { + background-color: #ffaaaa; +} + +div.diff.chunk_header a, +div.diff.chunk_header { + color: #990099; +} + +div.diff.chunk_header { + border: dotted #ffe0ff; + border-width: 1px 0px 0px 0px; + margin-top: 2px; +} + +div.diff.chunk_header span.chunk_info { + background-color: #ffeeff; +} + +div.diff.chunk_header span.section { + color: #aa22aa; +} + +div.diff.incomplete { + color: #cccccc; +} + +div.diff.nodifferences { + font-weight: bold; + color: #600000; +} + +/* side-by-side diff */ +div.chunk_block { + overflow: hidden; +} + +div.chunk_block div.old { + float: left; + width: 50%; + overflow: hidden; +} + +div.chunk_block div.new { + margin-left: 50%; + width: 50%; +} + +div.chunk_block.rem div.old div.diff.rem { + background-color: #fff5f5; +} +div.chunk_block.add div.new div.diff.add { + background-color: #f8fff8; +} +div.chunk_block.chg div div.diff { + background-color: #fffff0; +} +div.chunk_block.ctx div div.diff.ctx { + color: #404040; +} + + +div.index_include { + border: solid #d9d8d1; + border-width: 0px 0px 1px; + padding: 12px 8px; +} + +div.search { + font-size: 100%; + font-weight: normal; + margin: 4px 8px; + float: right; + top: 56px; + right: 12px +} + +div.projsearch { + text-align: center; + margin: 20px 0px; +} + +div.projsearch form { + margin-bottom: 2px; +} + +td.linenr { + text-align: right; +} + +a.linenr { + color: #999999; + text-decoration: none +} + +a.rss_logo { + float: right; + padding: 3px 5px; + line-height: 10px; + border: 1px solid; + border-color: #fcc7a5 #7d3302 #3e1a01 #ff954e; + color: #ffffff; + background-color: #ff6600; + font-weight: bold; + font-family: sans-serif; + font-size: 70%; + text-align: center; + text-decoration: none; +} + +a.rss_logo:hover { + background-color: #ee5500; +} + +a.rss_logo.generic { + background-color: #ff8800; +} + +a.rss_logo.generic:hover { + background-color: #ee7700; +} + +span.refs span { + padding: 0px 4px; + font-size: 70%; + font-weight: normal; + border: 1px solid; + background-color: #ffaaff; + border-color: #ffccff #ff00ee #ff00ee #ffccff; +} + +span.refs span a { + text-decoration: none; + color: inherit; +} + +span.refs span a:hover { + text-decoration: underline; +} + +span.refs span.indirect { + font-style: italic; +} + +span.refs span.ref { + background-color: #aaaaff; + border-color: #ccccff #0033cc #0033cc #ccccff; +} + +span.refs span.tag { + background-color: #ffffaa; + border-color: #ffffcc #ffee00 #ffee00 #ffffcc; +} + +span.refs span.head { + background-color: #aaffaa; + border-color: #ccffcc #00cc33 #00cc33 #ccffcc; +} + +span.atnight { + color: #cc0000; +} + +span.match { + color: #e00000; +} + +div.binary { + font-style: italic; +} + +div.remote { + margin: .5em; + border: 1px solid #d9d8d1; + display: inline-block; +} + +/* JavaScript-based timezone manipulation */ + +.popup { /* timezone selection UI */ + position: absolute; + /* "top: 0; right: 0;" would be better, if not for bugs in browsers */ + top: 0; left: 0; + border: 1px solid; + padding: 2px; + background-color: #f0f0f0; + font-style: normal; + color: #000000; + cursor: auto; +} + +.close-button { /* close timezone selection UI without selecting */ + /* float doesn't work within absolutely positioned container, + * if width of container is not set explicitly */ + /* float: right; */ + position: absolute; + top: 0px; right: 0px; + border: 1px solid green; + margin: 1px 1px 1px 1px; + padding-bottom: 2px; + width: 12px; + height: 10px; + font-size: 9px; + font-weight: bold; + text-align: center; + background-color: #fff0f0; + cursor: pointer; +} + + +/* Style definition generated by highlight 2.4.5, http://andre-simon.de/doku/highlight/en/highlight.php */ + +/* Highlighting theme definition: */ + +.num { color:#2928ff; } +.esc { color:#ff00ff; } +.str { color:#ff0000; } +.dstr { color:#818100; } +.slc { color:#838183; font-style:italic; } +.com { color:#838183; font-style:italic; } +.dir { color:#008200; } +.sym { color:#000000; } +.line { color:#555555; } +.kwa { color:#000000; font-weight:bold; } +.kwb { color:#830000; } +.kwc { color:#000000; font-weight:bold; } +.kwd { color:#010181; } diff --git a/gitweb/static/js/README b/gitweb/static/js/README new file mode 100644 index 0000000..f8460ed --- /dev/null +++ b/gitweb/static/js/README @@ -0,0 +1,20 @@ +GIT web interface (gitweb) - JavaScript +======================================= + +This directory holds JavaScript code used by gitweb (GIT web interface). +Scripts from there would be concatenated together in the order specified +by gitweb/Makefile into gitweb/static/gitweb.js, during building of +gitweb/gitweb.cgi (during gitweb building). The resulting file (or its +minification) would then be installed / deployed together with gitweb. + +Scripts in 'lib/' subdirectory compose generic JavaScript library, +providing features required by gitweb but in no way limited to gitweb +only. In the future those scripts could be replaced by some JavaScript +library / framework, like e.g. jQuery, YUI, Prototype, MooTools, Dojo, +ExtJS, Script.aculo.us or SproutCore. + +All scripts that manipulate gitweb output should be put outside 'lib/', +directly in this directory ('gitweb/static/js/'). Those scripts would +have to be rewritten if gitweb moves to using some JavaScript library. + +See also comments in gitweb/Makefile. diff --git a/gitweb/static/js/adjust-timezone.js b/gitweb/static/js/adjust-timezone.js new file mode 100644 index 0000000..0c67779 --- /dev/null +++ b/gitweb/static/js/adjust-timezone.js @@ -0,0 +1,330 @@ +// Copyright (C) 2011, John 'Warthog9' Hawley +// 2011, Jakub Narebski + +/** + * @fileOverview Manipulate dates in gitweb output, adjusting timezone + * @license GPLv2 or later + */ + +/** + * Get common timezone, add UI for changing timezones, and adjust + * dates to use requested common timezone. + * + * This function is called during onload event (added to window.onload). + * + * @param {String} tzDefault: default timezone, if there is no cookie + * @param {Object} tzCookieInfo: object literal with info about cookie to store timezone + * @param {String} tzCookieInfo.name: name of cookie to store timezone + * @param {String} tzClassName: denotes elements with date to be adjusted + */ +function onloadTZSetup(tzDefault, tzCookieInfo, tzClassName) { + var tzCookieTZ = getCookie(tzCookieInfo.name, tzCookieInfo); + var tz = tzDefault; + + if (tzCookieTZ) { + // set timezone to value saved in a cookie + tz = tzCookieTZ; + // refresh cookie, so its expiration counts from last use of gitweb + setCookie(tzCookieInfo.name, tzCookieTZ, tzCookieInfo); + } + + // add UI for changing timezone + addChangeTZ(tz, tzCookieInfo, tzClassName); + + // server-side of gitweb produces datetime in UTC, + // so if tz is 'utc' there is no need for changes + var nochange = tz === 'utc'; + + // adjust dates to use specified common timezone + fixDatetimeTZ(tz, tzClassName, nochange); +} + + +/* ...................................................................... */ +/* Changing dates to use requested timezone */ + +/** + * Replace RFC-2822 dates contained in SPAN elements with tzClassName + * CSS class with equivalent dates in given timezone. + * + * @param {String} tz: numeric timezone in '(-|+)HHMM' format, or 'utc', or 'local' + * @param {String} tzClassName: specifies elements to be changed + * @param {Boolean} nochange: markup for timezone change, but don't change it + */ +function fixDatetimeTZ(tz, tzClassName, nochange) { + // sanity check, method should be ensured by common-lib.js + if (!document.getElementsByClassName) { + return; + } + + // translate to timezone in '(-|+)HHMM' format + tz = normalizeTimezoneInfo(tz); + + // NOTE: result of getElementsByClassName should probably be cached + var classesFound = document.getElementsByClassName(tzClassName, "span"); + for (var i = 0, len = classesFound.length; i < len; i++) { + var curElement = classesFound[i]; + + curElement.title = 'Click to change timezone'; + if (!nochange) { + // we use *.firstChild.data (W3C DOM) instead of *.innerHTML + // as the latter doesn't always work everywhere in every browser + var epoch = parseRFC2822Date(curElement.firstChild.data); + var adjusted = formatDateRFC2882(epoch, tz); + + curElement.firstChild.data = adjusted; + } + } +} + + +/* ...................................................................... */ +/* Adding triggers, generating timezone menu, displaying and hiding */ + +/** + * Adds triggers for UI to change common timezone used for dates in + * gitweb output: it marks up and/or creates item to click to invoke + * timezone change UI, creates timezone UI fragment to be attached, + * and installs appropriate onclick trigger (via event delegation). + * + * @param {String} tzSelected: pre-selected timezone, + * 'utc' or 'local' or '(-|+)HHMM' + * @param {Object} tzCookieInfo: object literal with info about cookie to store timezone + * @param {String} tzClassName: specifies elements to install trigger + */ +function addChangeTZ(tzSelected, tzCookieInfo, tzClassName) { + // make link to timezone UI discoverable + addCssRule('.'+tzClassName + ':hover', + 'text-decoration: underline; cursor: help;'); + + // create form for selecting timezone (to be saved in a cookie) + var tzSelectFragment = document.createDocumentFragment(); + tzSelectFragment = createChangeTZForm(tzSelectFragment, + tzSelected, tzCookieInfo, tzClassName); + + // event delegation handler for timezone selection UI (clicking on entry) + // see http://www.nczonline.net/blog/2009/06/30/event-delegation-in-javascript/ + // assumes that there is no existing document.onclick handler + document.onclick = function onclickHandler(event) { + //IE doesn't pass in the event object + event = event || window.event; + + //IE uses srcElement as the target + var target = event.target || event.srcElement; + + switch (target.className) { + case tzClassName: + // don't display timezone menu if it is already displayed + if (tzSelectFragment.childNodes.length > 0) { + displayChangeTZForm(target, tzSelectFragment); + } + break; + } // end switch + }; +} + +/** + * Create DocumentFragment with UI for changing common timezone in + * which dates are shown in. + * + * @param {DocumentFragment} documentFragment: where attach UI + * @param {String} tzSelected: default (pre-selected) timezone + * @param {Object} tzCookieInfo: object literal with info about cookie to store timezone + * @returns {DocumentFragment} + */ +function createChangeTZForm(documentFragment, tzSelected, tzCookieInfo, tzClassName) { + var div = document.createElement("div"); + div.className = 'popup'; + + /* '
X
' */ + var closeButton = document.createElement('div'); + closeButton.className = 'close-button'; + closeButton.title = '(click on this box to close)'; + closeButton.appendChild(document.createTextNode('X')); + closeButton.onclick = closeTZFormHandler(documentFragment, tzClassName); + div.appendChild(closeButton); + + /* 'Select timezone:
' */ + div.appendChild(document.createTextNode('Select timezone: ')); + var br = document.createElement('br'); + br.clear = 'all'; + div.appendChild(br); + + /* '' */ + var select = document.createElement("select"); + select.name = "tzoffset"; + //select.style.clear = 'all'; + select.appendChild(generateTZOptions(tzSelected)); + select.onchange = selectTZHandler(documentFragment, tzCookieInfo, tzClassName); + div.appendChild(select); + + documentFragment.appendChild(div); + + return documentFragment; +} + + +/** + * Hide (remove from DOM) timezone change UI, ensuring that it is not + * garbage collected and that it can be re-enabled later. + * + * @param {DocumentFragment} documentFragment: contains detached UI + * @param {HTMLSelectElement} target: select element inside of UI + * @param {String} tzClassName: specifies element where UI was installed + * @returns {DocumentFragment} documentFragment + */ +function removeChangeTZForm(documentFragment, target, tzClassName) { + // find containing element, where we appended timezone selection UI + // `target' is somewhere inside timezone menu + var container = target.parentNode, popup = target; + while (container && + container.className !== tzClassName) { + popup = container; + container = container.parentNode; + } + // safety check if we found correct container, + // and if it isn't deleted already + if (!container || !popup || + container.className !== tzClassName || + popup.className !== 'popup') { + return documentFragment; + } + + // timezone selection UI was appended as last child + // see also displayChangeTZForm function + var removed = popup.parentNode.removeChild(popup); + if (documentFragment.firstChild !== removed) { // the only child + // re-append it so it would be available for next time + documentFragment.appendChild(removed); + } + // all of inline style was added by this script + // it is not really needed to remove it, but it is a good practice + container.removeAttribute('style'); + + return documentFragment; +} + + +/** + * Display UI for changing common timezone for dates in gitweb output. + * To be used from 'onclick' event handler. + * + * @param {HTMLElement} target: where to install/display UI + * @param {DocumentFragment} tzSelectFragment: timezone selection UI + */ +function displayChangeTZForm(target, tzSelectFragment) { + // for absolute positioning to be related to target element + target.style.position = 'relative'; + target.style.display = 'inline-block'; + + // show/display UI for changing timezone + target.appendChild(tzSelectFragment); +} + + +/* ...................................................................... */ +/* List of timezones for timezone selection menu */ + +/** + * Generate list of timezones for creating timezone select UI + * + * @returns {Object[]} list of e.g. { value: '+0100', descr: 'GMT+01:00' } + */ +function generateTZList() { + var timezones = [ + { value: "utc", descr: "UTC/GMT"}, + { value: "local", descr: "Local (per browser)"} + ]; + + // generate all full hour timezones (no fractional timezones) + for (var x = -12, idx = timezones.length; x <= +14; x++, idx++) { + var hours = (x >= 0 ? '+' : '-') + padLeft(x >=0 ? x : -x, 2); + timezones[idx] = { value: hours + '00', descr: 'UTC' + hours + ':00'}; + if (x === 0) { + timezones[idx].descr = 'UTC\u00B100:00'; // 'UTC±00:00' + } + } + + return timezones; +} + +/** + * Generate elements for timezone select UI + * + * @param {String} tzSelected: default timezone + * @returns {DocumentFragment} list of options elements to appendChild + */ +function generateTZOptions(tzSelected) { + var elems = document.createDocumentFragment(); + var timezones = generateTZList(); + + for (var i = 0, len = timezones.length; i < len; i++) { + var tzone = timezones[i]; + var option = document.createElement("option"); + if (tzone.value === tzSelected) { + option.defaultSelected = true; + } + option.value = tzone.value; + option.appendChild(document.createTextNode(tzone.descr)); + + elems.appendChild(option); + } + + return elems; +} + + +/* ...................................................................... */ +/* Event handlers and/or their generators */ + +/** + * Create event handler that select timezone and closes timezone select UI. + * To be used as $('select[name="tzselect"]').onchange handler. + * + * @param {DocumentFragment} tzSelectFragment: timezone selection UI + * @param {Object} tzCookieInfo: object literal with info about cookie to store timezone + * @param {String} tzCookieInfo.name: name of cookie to save result of selection + * @param {String} tzClassName: specifies element where UI was installed + * @returns {Function} event handler + */ +function selectTZHandler(tzSelectFragment, tzCookieInfo, tzClassName) { + //return function selectTZ(event) { + return function (event) { + event = event || window.event; + var target = event.target || event.srcElement; + + var selected = target.options.item(target.selectedIndex); + removeChangeTZForm(tzSelectFragment, target, tzClassName); + + if (selected) { + selected.defaultSelected = true; + setCookie(tzCookieInfo.name, selected.value, tzCookieInfo); + fixDatetimeTZ(selected.value, tzClassName); + } + }; +} + +/** + * Create event handler that closes timezone select UI. + * To be used e.g. as $('.closebutton').onclick handler. + * + * @param {DocumentFragment} tzSelectFragment: timezone selection UI + * @param {String} tzClassName: specifies element where UI was installed + * @returns {Function} event handler + */ +function closeTZFormHandler(tzSelectFragment, tzClassName) { + //return function closeTZForm(event) { + return function (event) { + event = event || window.event; + var target = event.target || event.srcElement; + + removeChangeTZForm(tzSelectFragment, target, tzClassName); + }; +} + +/* end of adjust-timezone.js */ diff --git a/gitweb/static/js/blame_incremental.js b/gitweb/static/js/blame_incremental.js new file mode 100644 index 0000000..e100d82 --- /dev/null +++ b/gitweb/static/js/blame_incremental.js @@ -0,0 +1,692 @@ +// Copyright (C) 2007, Fredrik Kuivinen +// 2007, Petr Baudis +// 2008-2011, Jakub Narebski + +/** + * @fileOverview JavaScript side of Ajax-y 'blame_incremental' view in gitweb + * @license GPLv2 or later + */ + +/* ============================================================ */ +/* + * This code uses DOM methods instead of (nonstandard) innerHTML + * to modify page. + * + * innerHTML is non-standard IE extension, though supported by most + * browsers; however Firefox up to version 1.5 didn't implement it in + * a strict mode (application/xml+xhtml mimetype). + * + * Also my simple benchmarks show that using elem.firstChild.data = + * 'content' is slightly faster than elem.innerHTML = 'content'. It + * is however more fragile (text element fragment must exists), and + * less feature-rich (we cannot add HTML). + * + * Note that DOM 2 HTML is preferred over generic DOM 2 Core; the + * equivalent using DOM 2 Core is usually shown in comments. + */ + + +/* ............................................................ */ +/* utility/helper functions (and variables) */ + +var projectUrl; // partial query + separator ('?' or ';') + +// 'commits' is an associative map. It maps SHA1s to Commit objects. +var commits = {}; + +/** + * constructor for Commit objects, used in 'blame' + * @class Represents a blamed commit + * @param {String} sha1: SHA-1 identifier of a commit + */ +function Commit(sha1) { + if (this instanceof Commit) { + this.sha1 = sha1; + this.nprevious = 0; /* number of 'previous', effective parents */ + } else { + return new Commit(sha1); + } +} + +/* ............................................................ */ +/* progress info, timing, error reporting */ + +var blamedLines = 0; +var totalLines = '???'; +var div_progress_bar; +var div_progress_info; + +/** + * Detects how many lines does a blamed file have, + * This information is used in progress info + * + * @returns {Number|String} Number of lines in file, or string '...' + */ +function countLines() { + var table = + document.getElementById('blame_table') || + document.getElementsByTagName('table')[0]; + + if (table) { + return table.getElementsByTagName('tr').length - 1; // for header + } else { + return '...'; + } +} + +/** + * update progress info and length (width) of progress bar + * + * @globals div_progress_info, div_progress_bar, blamedLines, totalLines + */ +function updateProgressInfo() { + if (!div_progress_info) { + div_progress_info = document.getElementById('progress_info'); + } + if (!div_progress_bar) { + div_progress_bar = document.getElementById('progress_bar'); + } + if (!div_progress_info && !div_progress_bar) { + return; + } + + var percentage = Math.floor(100.0*blamedLines/totalLines); + + if (div_progress_info) { + div_progress_info.firstChild.data = blamedLines + ' / ' + totalLines + + ' (' + padLeftStr(percentage, 3, '\u00A0') + '%)'; + } + + if (div_progress_bar) { + //div_progress_bar.setAttribute('style', 'width: '+percentage+'%;'); + div_progress_bar.style.width = percentage + '%'; + } +} + + +var t_interval_server = ''; +var cmds_server = ''; +var t0 = new Date(); + +/** + * write how much it took to generate data, and to run script + * + * @globals t0, t_interval_server, cmds_server + */ +function writeTimeInterval() { + var info_time = document.getElementById('generating_time'); + if (!info_time || !t_interval_server) { + return; + } + var t1 = new Date(); + info_time.firstChild.data += ' + (' + + t_interval_server + ' sec server blame_data / ' + + (t1.getTime() - t0.getTime())/1000 + ' sec client JavaScript)'; + + var info_cmds = document.getElementById('generating_cmd'); + if (!info_time || !cmds_server) { + return; + } + info_cmds.firstChild.data += ' + ' + cmds_server; +} + +/** + * show an error message alert to user within page (in progress info area) + * @param {String} str: plain text error message (no HTML) + * + * @globals div_progress_info + */ +function errorInfo(str) { + if (!div_progress_info) { + div_progress_info = document.getElementById('progress_info'); + } + if (div_progress_info) { + div_progress_info.className = 'error'; + div_progress_info.firstChild.data = str; + } +} + +/* ............................................................ */ +/* coloring rows during blame_data (git blame --incremental) run */ + +/** + * used to extract N from 'colorN', where N is a number, + * @constant + */ +var colorRe = /\bcolor([0-9]*)\b/; + +/** + * return N if , otherwise return null + * (some browsers require CSS class names to begin with letter) + * + * @param {HTMLElement} tr: table row element to check + * @param {String} tr.className: 'class' attribute of tr element + * @returns {Number|null} N if tr.className == 'colorN', otherwise null + * + * @globals colorRe + */ +function getColorNo(tr) { + if (!tr) { + return null; + } + var className = tr.className; + if (className) { + var match = colorRe.exec(className); + if (match) { + return parseInt(match[1], 10); + } + } + return null; +} + +var colorsFreq = [0, 0, 0]; +/** + * return one of given possible colors (currently least used one) + * example: chooseColorNoFrom(2, 3) returns 2 or 3 + * + * @param {Number[]} arguments: one or more numbers + * assumes that 1 <= arguments[i] <= colorsFreq.length + * @returns {Number} Least used color number from arguments + * @globals colorsFreq + */ +function chooseColorNoFrom() { + // choose the color which is least used + var colorNo = arguments[0]; + for (var i = 1; i < arguments.length; i++) { + if (colorsFreq[arguments[i]-1] < colorsFreq[colorNo-1]) { + colorNo = arguments[i]; + } + } + colorsFreq[colorNo-1]++; + return colorNo; +} + +/** + * given two neighbor elements, find color which would be different + * from color of both of neighbors; used to 3-color blame table + * + * @param {HTMLElement} tr_prev + * @param {HTMLElement} tr_next + * @returns {Number} color number N such that + * colorN != tr_prev.className && colorN != tr_next.className + */ +function findColorNo(tr_prev, tr_next) { + var color_prev = getColorNo(tr_prev); + var color_next = getColorNo(tr_next); + + + // neither of neighbors has color set + // THEN we can use any of 3 possible colors + if (!color_prev && !color_next) { + return chooseColorNoFrom(1,2,3); + } + + // either both neighbors have the same color, + // or only one of neighbors have color set + // THEN we can use any color except given + var color; + if (color_prev === color_next) { + color = color_prev; // = color_next; + } else if (!color_prev) { + color = color_next; + } else if (!color_next) { + color = color_prev; + } + if (color) { + return chooseColorNoFrom((color % 3) + 1, ((color+1) % 3) + 1); + } + + // neighbors have different colors + // THEN there is only one color left + return (3 - ((color_prev + color_next) % 3)); +} + +/* ............................................................ */ +/* coloring rows like 'blame' after 'blame_data' finishes */ + +/** + * returns true if given row element (tr) is first in commit group + * to be used only after 'blame_data' finishes (after processing) + * + * @param {HTMLElement} tr: table row + * @returns {Boolean} true if TR is first in commit group + */ +function isStartOfGroup(tr) { + return tr.firstChild.className === 'sha1'; +} + +/** + * change colors to use zebra coloring (2 colors) instead of 3 colors + * concatenate neighbor commit groups belonging to the same commit + * + * @globals colorRe + */ +function fixColorsAndGroups() { + var colorClasses = ['light', 'dark']; + var linenum = 1; + var tr, prev_group; + var colorClass = 0; + var table = + document.getElementById('blame_table') || + document.getElementsByTagName('table')[0]; + + while ((tr = document.getElementById('l'+linenum))) { + // index origin is 0, which is table header; start from 1 + //while ((tr = table.rows[linenum])) { // <- it is slower + if (isStartOfGroup(tr, linenum, document)) { + if (prev_group && + prev_group.firstChild.firstChild.href === + tr.firstChild.firstChild.href) { + // we have to concatenate groups + var prev_rows = prev_group.firstChild.rowSpan || 1; + var curr_rows = tr.firstChild.rowSpan || 1; + prev_group.firstChild.rowSpan = prev_rows + curr_rows; + //tr.removeChild(tr.firstChild); + tr.deleteCell(0); // DOM2 HTML way + } else { + colorClass = (colorClass + 1) % 2; + prev_group = tr; + } + } + var tr_class = tr.className; + tr.className = tr_class.replace(colorRe, colorClasses[colorClass]); + linenum++; + } +} + + +/* ============================================================ */ +/* main part: parsing response */ + +/** + * Function called for each blame entry, as soon as it finishes. + * It updates page via DOM manipulation, adding sha1 info, etc. + * + * @param {Commit} commit: blamed commit + * @param {Object} group: object representing group of lines, + * which blame the same commit (blame entry) + * + * @globals blamedLines + */ +function handleLine(commit, group) { + /* + This is the structure of the HTML fragment we are working + with: + + + + 123 + # times (my ext3 doesn't). + + */ + + var resline = group.resline; + + // format date and time string only once per commit + if (!commit.info) { + /* e.g. 'Kay Sievers, 2005-08-07 21:49:46 +0200' */ + commit.info = commit.author + ', ' + + formatDateISOLocal(commit.authorTime, commit.authorTimezone); + } + + // color depends on group of lines, not only on blamed commit + var colorNo = findColorNo( + document.getElementById('l'+(resline-1)), + document.getElementById('l'+(resline+group.numlines)) + ); + + // loop over lines in commit group + for (var i = 0; i < group.numlines; i++, resline++) { + var tr = document.getElementById('l'+resline); + if (!tr) { + break; + } + /* + + + 123 + # times (my ext3 doesn't). + + */ + var td_sha1 = tr.firstChild; + var a_sha1 = td_sha1.firstChild; + var a_linenr = td_sha1.nextSibling.firstChild; + + /* */ + var tr_class = ''; + if (colorNo !== null) { + tr_class = 'color'+colorNo; + } + if (commit.boundary) { + tr_class += ' boundary'; + } + if (commit.nprevious === 0) { + tr_class += ' no-previous'; + } else if (commit.nprevious > 1) { + tr_class += ' multiple-previous'; + } + tr.className = tr_class; + + /* ? */ + if (i === 0) { + td_sha1.title = commit.info; + td_sha1.rowSpan = group.numlines; + + a_sha1.href = projectUrl + 'a=commit;h=' + commit.sha1; + if (a_sha1.firstChild) { + a_sha1.firstChild.data = commit.sha1.substr(0, 8); + } else { + a_sha1.appendChild( + document.createTextNode(commit.sha1.substr(0, 8))); + } + if (group.numlines >= 2) { + var fragment = document.createDocumentFragment(); + var br = document.createElement("br"); + var match = commit.author.match(/\b([A-Z])\B/g); + if (match) { + var text = document.createTextNode( + match.join('')); + } + if (br && text) { + var elem = fragment || td_sha1; + elem.appendChild(br); + elem.appendChild(text); + if (fragment) { + td_sha1.appendChild(fragment); + } + } + } + } else { + //tr.removeChild(td_sha1); // DOM2 Core way + tr.deleteCell(0); // DOM2 HTML way + } + + /* 123 */ + var linenr_commit = + ('previous' in commit ? commit.previous : commit.sha1); + var linenr_filename = + ('file_parent' in commit ? commit.file_parent : commit.filename); + a_linenr.href = projectUrl + 'a=blame_incremental' + + ';hb=' + linenr_commit + + ';f=' + encodeURIComponent(linenr_filename) + + '#l' + (group.srcline + i); + + blamedLines++; + + //updateProgressInfo(); + } +} + +// ---------------------------------------------------------------------- + +/**#@+ + * @constant + */ +var sha1Re = /^([0-9a-f]{40}) ([0-9]+) ([0-9]+) ([0-9]+)/; +var infoRe = /^([a-z-]+) ?(.*)/; +var endRe = /^END ?([^ ]*) ?(.*)/; +/**@-*/ + +var curCommit = new Commit(); +var curGroup = {}; + +/** + * Parse output from 'git blame --incremental [...]', received via + * XMLHttpRequest from server (blamedataUrl), and call handleLine + * (which updates page) as soon as blame entry is completed. + * + * @param {String[]} lines: new complete lines from blamedata server + * + * @globals commits, curCommit, curGroup, t_interval_server, cmds_server + * @globals sha1Re, infoRe, endRe + */ +function processBlameLines(lines) { + var match; + + for (var i = 0, len = lines.length; i < len; i++) { + + if ((match = sha1Re.exec(lines[i]))) { + var sha1 = match[1]; + var srcline = parseInt(match[2], 10); + var resline = parseInt(match[3], 10); + var numlines = parseInt(match[4], 10); + + var c = commits[sha1]; + if (!c) { + c = new Commit(sha1); + commits[sha1] = c; + } + curCommit = c; + + curGroup.srcline = srcline; + curGroup.resline = resline; + curGroup.numlines = numlines; + + } else if ((match = infoRe.exec(lines[i]))) { + var info = match[1]; + var data = match[2]; + switch (info) { + case 'filename': + curCommit.filename = unquote(data); + // 'filename' information terminates the entry + handleLine(curCommit, curGroup); + updateProgressInfo(); + break; + case 'author': + curCommit.author = data; + break; + case 'author-time': + curCommit.authorTime = parseInt(data, 10); + break; + case 'author-tz': + curCommit.authorTimezone = data; + break; + case 'previous': + curCommit.nprevious++; + // store only first 'previous' header + if (!('previous' in curCommit)) { + var parts = data.split(' ', 2); + curCommit.previous = parts[0]; + curCommit.file_parent = unquote(parts[1]); + } + break; + case 'boundary': + curCommit.boundary = true; + break; + } // end switch + + } else if ((match = endRe.exec(lines[i]))) { + t_interval_server = match[1]; + cmds_server = match[2]; + + } else if (lines[i] !== '') { + // malformed line + + } // end if (match) + + } // end for (lines) +} + +/** + * Process new data and return pointer to end of processed part + * + * @param {String} unprocessed: new data (from nextReadPos) + * @param {Number} nextReadPos: end of last processed data + * @return {Number} end of processed data (new value for nextReadPos) + */ +function processData(unprocessed, nextReadPos) { + var lastLineEnd = unprocessed.lastIndexOf('\n'); + if (lastLineEnd !== -1) { + var lines = unprocessed.substring(0, lastLineEnd).split('\n'); + nextReadPos += lastLineEnd + 1 /* 1 == '\n'.length */; + + processBlameLines(lines); + } // end if + + return nextReadPos; +} + +/** + * Handle XMLHttpRequest errors + * + * @param {XMLHttpRequest} xhr: XMLHttpRequest object + * @param {Number} [xhr.pollTimer] ID of the timeout to clear + * + * @globals commits + */ +function handleError(xhr) { + errorInfo('Server error: ' + + xhr.status + ' - ' + (xhr.statusText || 'Error contacting server')); + + if (typeof xhr.pollTimer === "number") { + clearTimeout(xhr.pollTimer); + delete xhr.pollTimer; + } + commits = {}; // free memory +} + +/** + * Called after XMLHttpRequest finishes (loads) + * + * @param {XMLHttpRequest} xhr: XMLHttpRequest object + * @param {Number} [xhr.pollTimer] ID of the timeout to clear + * + * @globals commits + */ +function responseLoaded(xhr) { + if (typeof xhr.pollTimer === "number") { + clearTimeout(xhr.pollTimer); + delete xhr.pollTimer; + } + + fixColorsAndGroups(); + writeTimeInterval(); + commits = {}; // free memory +} + +/** + * handler for XMLHttpRequest onreadystatechange event + * @see startBlame + * + * @param {XMLHttpRequest} xhr: XMLHttpRequest object + * @param {Number} xhr.prevDataLength: previous value of xhr.responseText.length + * @param {Number} xhr.nextReadPos: start of unread part of xhr.responseText + * @param {Number} [xhr.pollTimer] ID of the timeout (to reset or cancel) + * @param {Boolean} fromTimer: if handler was called from timer + */ +function handleResponse(xhr, fromTimer) { + + /* + * xhr.readyState + * + * Value Constant (W3C) Description + * ------------------------------------------------------------------- + * 0 UNSENT open() has not been called yet. + * 1 OPENED send() has not been called yet. + * 2 HEADERS_RECEIVED send() has been called, and headers + * and status are available. + * 3 LOADING Downloading; responseText holds partial data. + * 4 DONE The operation is complete. + */ + + if (xhr.readyState !== 4 && xhr.readyState !== 3) { + return; + } + + // the server returned error + // try ... catch block is to work around bug in IE8 + try { + if (xhr.readyState === 3 && xhr.status !== 200) { + return; + } + } catch (e) { + return; + } + if (xhr.readyState === 4 && xhr.status !== 200) { + handleError(xhr); + return; + } + + // In konqueror xhr.responseText is sometimes null here... + if (xhr.responseText === null) { + return; + } + + + // extract new whole (complete) lines, and process them + if (xhr.prevDataLength !== xhr.responseText.length) { + xhr.prevDataLength = xhr.responseText.length; + var unprocessed = xhr.responseText.substring(xhr.nextReadPos); + xhr.nextReadPos = processData(unprocessed, xhr.nextReadPos); + } + + // did we finish work? + if (xhr.readyState === 4) { + responseLoaded(xhr); + return; + } + + // if we get from timer, we have to restart it + // otherwise onreadystatechange gives us partial response, timer not needed + if (fromTimer) { + setTimeout(function () { + handleResponse(xhr, true); + }, 1000); + + } else if (typeof xhr.pollTimer === "number") { + clearTimeout(xhr.pollTimer); + delete xhr.pollTimer; + } +} + +// ============================================================ +// ------------------------------------------------------------ + +/** + * Incrementally update line data in blame_incremental view in gitweb. + * + * @param {String} blamedataUrl: URL to server script generating blame data. + * @param {String} bUrl: partial URL to project, used to generate links. + * + * Called from 'blame_incremental' view after loading table with + * file contents, a base for blame view. + * + * @globals t0, projectUrl, div_progress_bar, totalLines +*/ +function startBlame(blamedataUrl, bUrl) { + + var xhr = createRequestObject(); + if (!xhr) { + errorInfo('ERROR: XMLHttpRequest not supported'); + return; + } + + t0 = new Date(); + projectUrl = bUrl + (bUrl.indexOf('?') === -1 ? '?' : ';'); + if ((div_progress_bar = document.getElementById('progress_bar'))) { + //div_progress_bar.setAttribute('style', 'width: 100%;'); + div_progress_bar.style.cssText = 'width: 100%;'; + } + totalLines = countLines(); + updateProgressInfo(); + + /* add extra properties to xhr object to help processing response */ + xhr.prevDataLength = -1; // used to detect if we have new data + xhr.nextReadPos = 0; // where unread part of response starts + + xhr.onreadystatechange = function () { + handleResponse(xhr, false); + }; + + xhr.open('GET', blamedataUrl); + xhr.setRequestHeader('Accept', 'text/plain'); + xhr.send(null); + + // not all browsers call onreadystatechange event on each server flush + // poll response using timer every second to handle this issue + xhr.pollTimer = setTimeout(function () { + handleResponse(xhr, true); + }, 1000); +} + +/* end of blame_incremental.js */ diff --git a/gitweb/static/js/javascript-detection.js b/gitweb/static/js/javascript-detection.js new file mode 100644 index 0000000..fa2596f --- /dev/null +++ b/gitweb/static/js/javascript-detection.js @@ -0,0 +1,43 @@ +// Copyright (C) 2007, Fredrik Kuivinen +// 2007, Petr Baudis +// 2008-2011, Jakub Narebski + +/** + * @fileOverview Detect if JavaScript is enabled, and pass it to server-side + * @license GPLv2 or later + */ + + +/* ============================================================ */ +/* Manipulating links */ + +/** + * used to check if link has 'js' query parameter already (at end), + * and other reasons to not add 'js=1' param at the end of link + * @constant + */ +var jsExceptionsRe = /[;?]js=[01](#.*)?$/; + +/** + * Add '?js=1' or ';js=1' to the end of every link in the document + * that doesn't have 'js' query parameter set already. + * + * Links with 'js=1' lead to JavaScript version of given action, if it + * exists (currently there is only 'blame_incremental' for 'blame') + * + * To be used as `window.onload` handler + * + * @globals jsExceptionsRe + */ +function fixLinks() { + var allLinks = document.getElementsByTagName("a") || document.links; + for (var i = 0, len = allLinks.length; i < len; i++) { + var link = allLinks[i]; + if (!jsExceptionsRe.test(link)) { + link.href = link.href.replace(/(#|$)/, + (link.href.indexOf('?') === -1 ? '?' : ';') + 'js=1$1'); + } + } +} + +/* end of javascript-detection.js */ diff --git a/gitweb/static/js/lib/common-lib.js b/gitweb/static/js/lib/common-lib.js new file mode 100644 index 0000000..99e3eb8 --- /dev/null +++ b/gitweb/static/js/lib/common-lib.js @@ -0,0 +1,224 @@ +// Copyright (C) 2007, Fredrik Kuivinen +// 2007, Petr Baudis +// 2008-2011, Jakub Narebski + +/** + * @fileOverview Generic JavaScript code (helper functions) + * @license GPLv2 or later + */ + + +/* ============================================================ */ +/* ............................................................ */ +/* Padding */ + +/** + * pad INPUT on the left with STR that is assumed to have visible + * width of single character (for example nonbreakable spaces), + * to WIDTH characters + * + * example: padLeftStr(12, 3, '\u00A0') == '\u00A012' + * ('\u00A0' is nonbreakable space) + * + * @param {Number|String} input: number to pad + * @param {Number} width: visible width of output + * @param {String} str: string to prefix to string, defaults to '\u00A0' + * @returns {String} INPUT prefixed with STR x (WIDTH - INPUT.length) + */ +function padLeftStr(input, width, str) { + var prefix = ''; + if (typeof str === 'undefined') { + ch = '\u00A0'; // using ' ' doesn't work in all browsers + } + + width -= input.toString().length; + while (width > 0) { + prefix += str; + width--; + } + return prefix + input; +} + +/** + * Pad INPUT on the left to WIDTH, using given padding character CH, + * for example padLeft('a', 3, '_') is '__a' + * padLeft(4, 2) is '04' (same as padLeft(4, 2, '0')) + * + * @param {String} input: input value converted to string. + * @param {Number} width: desired length of output. + * @param {String} ch: single character to prefix to string, defaults to '0'. + * + * @returns {String} Modified string, at least SIZE length. + */ +function padLeft(input, width, ch) { + var s = input + ""; + if (typeof ch === 'undefined') { + ch = '0'; + } + + while (s.length < width) { + s = ch + s; + } + return s; +} + + +/* ............................................................ */ +/* Handling browser incompatibilities */ + +/** + * Create XMLHttpRequest object in cross-browser way + * @returns XMLHttpRequest object, or null + */ +function createRequestObject() { + try { + return new XMLHttpRequest(); + } catch (e) {} + try { + return window.createRequest(); + } catch (e) {} + try { + return new ActiveXObject("Msxml2.XMLHTTP"); + } catch (e) {} + try { + return new ActiveXObject("Microsoft.XMLHTTP"); + } catch (e) {} + + return null; +} + + +/** + * Insert rule giving specified STYLE to given SELECTOR at the end of + * first CSS stylesheet. + * + * @param {String} selector: CSS selector, e.g. '.class' + * @param {String} style: rule contents, e.g. 'background-color: red;' + */ +function addCssRule(selector, style) { + var stylesheet = document.styleSheets[0]; + + var theRules = []; + if (stylesheet.cssRules) { // W3C way + theRules = stylesheet.cssRules; + } else if (stylesheet.rules) { // IE way + theRules = stylesheet.rules; + } + + if (stylesheet.insertRule) { // W3C way + stylesheet.insertRule(selector + ' { ' + style + ' }', theRules.length); + } else if (stylesheet.addRule) { // IE way + stylesheet.addRule(selector, style); + } +} + + +/* ............................................................ */ +/* Support for legacy browsers */ + +/** + * Provides getElementsByClassName method, if there is no native + * implementation of this method. + * + * NOTE that there are limits and differences compared to native + * getElementsByClassName as defined by e.g.: + * https://developer.mozilla.org/en/DOM/document.getElementsByClassName + * https://www.whatwg.org/specs/web-apps/current-work/multipage/dom.html#dom-getelementsbyclassname + * https://www.whatwg.org/specs/web-apps/current-work/multipage/dom.html#dom-document-getelementsbyclassname + * + * Namely, this implementation supports only single class name as + * argument and not set of space-separated tokens representing classes, + * it returns Array of nodes rather than live NodeList, and has + * additional optional argument where you can limit search to given tags + * (via getElementsByTagName). + * + * Based on + * https://code.google.com/p/getelementsbyclassname/ + * http://www.dustindiaz.com/getelementsbyclass/ + * https://stackoverflow.com/questions/1818865/do-we-have-getelementsbyclassname-in-javascript + * + * See also https://johnresig.com/blog/getelementsbyclassname-speed-comparison/ + * + * @param {String} class: name of _single_ class to find + * @param {String} [taghint] limit search to given tags + * @returns {Node[]} array of matching elements + */ +if (!('getElementsByClassName' in document)) { + document.getElementsByClassName = function (classname, taghint) { + taghint = taghint || "*"; + var elements = (taghint === "*" && document.all) ? + document.all : + document.getElementsByTagName(taghint); + var pattern = new RegExp("(^|\\s)" + classname + "(\\s|$)"); + var matches= []; + for (var i = 0, j = 0, n = elements.length; i < n; i++) { + var el= elements[i]; + if (el.className && pattern.test(el.className)) { + // matches.push(el); + matches[j] = el; + j++; + } + } + return matches; + }; +} // end if + + +/* ............................................................ */ +/* unquoting/unescaping filenames */ + +/**#@+ + * @constant + */ +var escCodeRe = /\\([^0-7]|[0-7]{1,3})/g; +var octEscRe = /^[0-7]{1,3}$/; +var maybeQuotedRe = /^\"(.*)\"$/; +/**#@-*/ + +/** + * unquote maybe C-quoted filename (as used by git, i.e. it is + * in double quotes '"' if there is any escape character used) + * e.g. 'aa' -> 'aa', '"a\ta"' -> 'a a' + * + * @param {String} str: git-quoted string + * @returns {String} Unquoted and unescaped string + * + * @globals escCodeRe, octEscRe, maybeQuotedRe + */ +function unquote(str) { + function unq(seq) { + var es = { + // character escape codes, aka escape sequences (from C) + // replacements are to some extent JavaScript specific + t: "\t", // tab (HT, TAB) + n: "\n", // newline (NL) + r: "\r", // return (CR) + f: "\f", // form feed (FF) + b: "\b", // backspace (BS) + a: "\x07", // alarm (bell) (BEL) + e: "\x1B", // escape (ESC) + v: "\v" // vertical tab (VT) + }; + + if (seq.search(octEscRe) !== -1) { + // octal char sequence + return String.fromCharCode(parseInt(seq, 8)); + } else if (seq in es) { + // C escape sequence, aka character escape code + return es[seq]; + } + // quoted ordinary character + return seq; + } + + var match = str.match(maybeQuotedRe); + if (match) { + str = match[1]; + // perhaps str = eval('"'+str+'"'); would be enough? + str = str.replace(escCodeRe, + function (substr, p1, offset, s) { return unq(p1); }); + } + return str; +} + +/* end of common-lib.js */ diff --git a/gitweb/static/js/lib/cookies.js b/gitweb/static/js/lib/cookies.js new file mode 100644 index 0000000..66b9a07 --- /dev/null +++ b/gitweb/static/js/lib/cookies.js @@ -0,0 +1,114 @@ +/** + * @fileOverview Accessing cookies from JavaScript + * @license GPLv2 or later + */ + +/* + * Based on subsection "Cookies in JavaScript" of "Professional + * JavaScript for Web Developers" by Nicholas C. Zakas and cookie + * plugin from jQuery (dual licensed under the MIT and GPL licenses) + */ + + +/** + * Create a cookie with the given name and value, + * and other optional parameters. + * + * @example + * setCookie('foo', 'bar'); // will be deleted when browser exits + * setCookie('foo', 'bar', { expires: new Date(Date.parse('Jan 1, 2012')) }); + * setCookie('foo', 'bar', { expires: 7 }); // 7 days = 1 week + * setCookie('foo', 'bar', { expires: 14, path: '/' }); + * + * @param {String} sName: Unique name of a cookie (letters, numbers, underscores). + * @param {String} sValue: The string value stored in a cookie. + * @param {Object} [options] An object literal containing key/value pairs + * to provide optional cookie attributes. + * @param {String|Number|Date} [options.expires] Either literal string to be used as cookie expires, + * or an integer specifying the expiration date from now on in days, + * or a Date object to be used as cookie expiration date. + * If a negative value is specified or a date in the past), + * the cookie will be deleted. + * If set to null or omitted, the cookie will be a session cookie + * and will not be retained when the browser exits. + * @param {String} [options.path] Restrict access of a cookie to particular directory + * (default: path of page that created the cookie). + * @param {String} [options.domain] Override what web sites are allowed to access cookie + * (default: domain of page that created the cookie). + * @param {Boolean} [options.secure] If true, the secure attribute of the cookie will be set + * and the cookie would be accessible only from secure sites + * (cookie transmission will require secure protocol like HTTPS). + */ +function setCookie(sName, sValue, options) { + options = options || {}; + if (sValue === null) { + sValue = ''; + option.expires = 'delete'; + } + + var sCookie = sName + '=' + encodeURIComponent(sValue); + + if (options.expires) { + var oExpires = options.expires, sDate; + if (oExpires === 'delete') { + sDate = 'Thu, 01 Jan 1970 00:00:00 GMT'; + } else if (typeof oExpires === 'string') { + sDate = oExpires; + } else { + var oDate; + if (typeof oExpires === 'number') { + oDate = new Date(); + oDate.setTime(oDate.getTime() + (oExpires * 24 * 60 * 60 * 1000)); // days to ms + } else { + oDate = oExpires; + } + sDate = oDate.toGMTString(); + } + sCookie += '; expires=' + sDate; + } + + if (options.path) { + sCookie += '; path=' + (options.path); + } + if (options.domain) { + sCookie += '; domain=' + (options.domain); + } + if (options.secure) { + sCookie += '; secure'; + } + document.cookie = sCookie; +} + +/** + * Get the value of a cookie with the given name. + * + * @param {String} sName: Unique name of a cookie (letters, numbers, underscores) + * @returns {String|null} The string value stored in a cookie + */ +function getCookie(sName) { + var sRE = '(?:; )?' + sName + '=([^;]*);?'; + var oRE = new RegExp(sRE); + if (oRE.test(document.cookie)) { + return decodeURIComponent(RegExp['$1']); + } else { + return null; + } +} + +/** + * Delete cookie with given name + * + * @param {String} sName: Unique name of a cookie (letters, numbers, underscores) + * @param {Object} [options] An object literal containing key/value pairs + * to provide optional cookie attributes. + * @param {String} [options.path] Must be the same as when setting a cookie + * @param {String} [options.domain] Must be the same as when setting a cookie + */ +function deleteCookie(sName, options) { + options = options || {}; + options.expires = 'delete'; + + setCookie(sName, '', options); +} + +/* end of cookies.js */ diff --git a/gitweb/static/js/lib/datetime.js b/gitweb/static/js/lib/datetime.js new file mode 100644 index 0000000..f78c60a --- /dev/null +++ b/gitweb/static/js/lib/datetime.js @@ -0,0 +1,176 @@ +// Copyright (C) 2007, Fredrik Kuivinen +// 2007, Petr Baudis +// 2008-2011, Jakub Narebski + +/** + * @fileOverview Datetime manipulation: parsing and formatting + * @license GPLv2 or later + */ + + +/* ............................................................ */ +/* parsing and retrieving datetime related information */ + +/** + * used to extract hours and minutes from timezone info, e.g '-0900' + * @constant + */ +var tzRe = /^([+\-])([0-9][0-9])([0-9][0-9])$/; + +/** + * convert numeric timezone +/-ZZZZ to offset from UTC in seconds + * + * @param {String} timezoneInfo: numeric timezone '(+|-)HHMM' + * @returns {Number} offset from UTC in seconds for timezone + * + * @globals tzRe + */ +function timezoneOffset(timezoneInfo) { + var match = tzRe.exec(timezoneInfo); + var tz_sign = (match[1] === '-' ? -1 : +1); + var tz_hour = parseInt(match[2],10); + var tz_min = parseInt(match[3],10); + + return tz_sign*(((tz_hour*60) + tz_min)*60); +} + +/** + * return local (browser) timezone as offset from UTC in seconds + * + * @returns {Number} offset from UTC in seconds for local timezone + */ +function localTimezoneOffset() { + // getTimezoneOffset returns the time-zone offset from UTC, + // in _minutes_, for the current locale + return ((new Date()).getTimezoneOffset() * -60); +} + +/** + * return local (browser) timezone as numeric timezone '(+|-)HHMM' + * + * @returns {String} locat timezone as -/+ZZZZ + */ +function localTimezoneInfo() { + var tzOffsetMinutes = (new Date()).getTimezoneOffset() * -1; + + return formatTimezoneInfo(0, tzOffsetMinutes); +} + + +/** + * Parse RFC-2822 date into a Unix timestamp (into epoch) + * + * @param {String} date: date in RFC-2822 format, e.g. 'Thu, 21 Dec 2000 16:01:07 +0200' + * @returns {Number} epoch i.e. seconds since '00:00:00 1970-01-01 UTC' + */ +function parseRFC2822Date(date) { + // Date.parse accepts the IETF standard (RFC 1123 Section 5.2.14 and elsewhere) + // date syntax, which is defined in RFC 2822 (obsoletes RFC 822) + // and returns number of _milli_seconds since January 1, 1970, 00:00:00 UTC + return Date.parse(date) / 1000; +} + + +/* ............................................................ */ +/* formatting date */ + +/** + * format timezone offset as numerical timezone '(+|-)HHMM' or '(+|-)HH:MM' + * + * @param {Number} hours: offset in hours, e.g. 2 for '+0200' + * @param {Number} [minutes] offset in minutes, e.g. 30 for '-4030'; + * it is split into hours if not 0 <= minutes < 60, + * for example 1200 would give '+0100'; + * defaults to 0 + * @param {String} [sep] separator between hours and minutes part, + * default is '', might be ':' for W3CDTF (rfc-3339) + * @returns {String} timezone in '(+|-)HHMM' or '(+|-)HH:MM' format + */ +function formatTimezoneInfo(hours, minutes, sep) { + minutes = minutes || 0; // to be able to use formatTimezoneInfo(hh) + sep = sep || ''; // default format is +/-ZZZZ + + if (minutes < 0 || minutes > 59) { + hours = minutes > 0 ? Math.floor(minutes / 60) : Math.ceil(minutes / 60); + minutes = Math.abs(minutes - 60*hours); // sign of minutes is sign of hours + // NOTE: this works correctly because there is no UTC-00:30 timezone + } + + var tzSign = hours >= 0 ? '+' : '-'; + if (hours < 0) { + hours = -hours; // sign is stored in tzSign + } + + return tzSign + padLeft(hours, 2, '0') + sep + padLeft(minutes, 2, '0'); +} + +/** + * translate 'utc' and 'local' to numerical timezone + * @param {String} timezoneInfo: might be 'utc' or 'local' (browser) + */ +function normalizeTimezoneInfo(timezoneInfo) { + switch (timezoneInfo) { + case 'utc': + return '+0000'; + case 'local': // 'local' is browser timezone + return localTimezoneInfo(); + } + return timezoneInfo; +} + + +/** + * return date in local time formatted in iso-8601 like format + * 'yyyy-mm-dd HH:MM:SS +/-ZZZZ' e.g. '2005-08-07 21:49:46 +0200' + * + * @param {Number} epoch: seconds since '00:00:00 1970-01-01 UTC' + * @param {String} timezoneInfo: numeric timezone '(+|-)HHMM' + * @returns {String} date in local time in iso-8601 like format + */ +function formatDateISOLocal(epoch, timezoneInfo) { + // date corrected by timezone + var localDate = new Date(1000 * (epoch + + timezoneOffset(timezoneInfo))); + var localDateStr = // e.g. '2005-08-07' + localDate.getUTCFullYear() + '-' + + padLeft(localDate.getUTCMonth()+1, 2, '0') + '-' + + padLeft(localDate.getUTCDate(), 2, '0'); + var localTimeStr = // e.g. '21:49:46' + padLeft(localDate.getUTCHours(), 2, '0') + ':' + + padLeft(localDate.getUTCMinutes(), 2, '0') + ':' + + padLeft(localDate.getUTCSeconds(), 2, '0'); + + return localDateStr + ' ' + localTimeStr + ' ' + timezoneInfo; +} + +/** + * return date in local time formatted in rfc-2822 format + * e.g. 'Thu, 21 Dec 2000 16:01:07 +0200' + * + * @param {Number} epoch: seconds since '00:00:00 1970-01-01 UTC' + * @param {String} timezoneInfo: numeric timezone '(+|-)HHMM' + * @param {Boolean} [padDay] e.g. 'Sun, 07 Aug' if true, 'Sun, 7 Aug' otherwise + * @returns {String} date in local time in rfc-2822 format + */ +function formatDateRFC2882(epoch, timezoneInfo, padDay) { + // A short textual representation of a month, three letters + var months = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]; + // A textual representation of a day, three letters + var days = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]; + // date corrected by timezone + var localDate = new Date(1000 * (epoch + + timezoneOffset(timezoneInfo))); + var localDateStr = // e.g. 'Sun, 7 Aug 2005' or 'Sun, 07 Aug 2005' + days[localDate.getUTCDay()] + ', ' + + (padDay ? padLeft(localDate.getUTCDate(),2,'0') : localDate.getUTCDate()) + ' ' + + months[localDate.getUTCMonth()] + ' ' + + localDate.getUTCFullYear(); + var localTimeStr = // e.g. '21:49:46' + padLeft(localDate.getUTCHours(), 2, '0') + ':' + + padLeft(localDate.getUTCMinutes(), 2, '0') + ':' + + padLeft(localDate.getUTCSeconds(), 2, '0'); + + return localDateStr + ' ' + localTimeStr + ' ' + timezoneInfo; +} + +/* end of datetime.js */ diff --git a/shared.mak b/shared.mak new file mode 100644 index 0000000..0e74920 --- /dev/null +++ b/shared.mak @@ -0,0 +1,132 @@ +### Remove GNU make implicit rules + +## This speeds things up since we don't need to look for and stat() a +## "foo.c,v" every time a rule referring to "foo.c" is in play. See +## "make -p -f/dev/null | grep ^%::'". +%:: %,v +%:: RCS/%,v +%:: RCS/% +%:: s.% +%:: SCCS/s.% + +## Likewise delete default $(SUFFIXES). See: +## +## info make --index-search=.SUFFIXES +.SUFFIXES: + +### Flags affecting all rules + +# A GNU make extension since gmake 3.72 (released in late 1994) to +# remove the target of rules if commands in those rules fail. The +# default is to only do that if make itself receives a signal. Affects +# all targets, see: +# +# info make --index-search=.DELETE_ON_ERROR +.DELETE_ON_ERROR: + +### Global variables + +## comma, empty, space: handy variables as these tokens are either +## special or can be hard to spot among other Makefile syntax. +comma := , +empty := +space := $(empty) $(empty) + +### Quieting +## common +QUIET_SUBDIR0 = +$(MAKE) -C # space to separate -C and subdir +QUIET_SUBDIR1 = + +ifneq ($(findstring w,$(firstword -$(MAKEFLAGS))),w) +PRINT_DIR = --no-print-directory +else # "make -w" +NO_SUBDIR = : +endif + +ifneq ($(findstring s,$(firstword -$(MAKEFLAGS))),s) +ifndef V +## common + QUIET_SUBDIR0 = +@subdir= + QUIET_SUBDIR1 = ;$(NO_SUBDIR) echo ' ' SUBDIR $$subdir; \ + $(MAKE) $(PRINT_DIR) -C $$subdir + + QUIET = @ + QUIET_GEN = @echo ' ' GEN $@; + + QUIET_MKDIR_P_PARENT = @echo ' ' MKDIR -p $(@D); + +## Used in "Makefile" + QUIET_CARGO = @echo ' ' CARGO $@; + QUIET_CC = @echo ' ' CC $@; + QUIET_AR = @echo ' ' AR $@; + QUIET_LINK = @echo ' ' LINK $@; + QUIET_BUILT_IN = @echo ' ' BUILTIN $@; + QUIET_CP = @echo ' ' CP $< $@; + QUIET_LNCP = @echo ' ' LN/CP $@; + QUIET_XGETTEXT = @echo ' ' XGETTEXT $@; + QUIET_MSGINIT = @echo ' ' MSGINIT $@; + QUIET_MSGFMT = @echo ' ' MSGFMT $@; + QUIET_MSGMERGE = @echo ' ' MSGMERGE $@; + QUIET_GCOV = @echo ' ' GCOV $@; + QUIET_SP = @echo ' ' SP $<; + QUIET_HDR = @echo ' ' HDR $(<:hcc=h); + QUIET_RC = @echo ' ' RC $@; + +## Used in "Makefile": SPATCH + QUIET_SPATCH = @echo ' ' SPATCH $< \>$@; + QUIET_SPATCH_TEST = @echo ' ' SPATCH TEST $(@:.build/%=%); + QUIET_SPATCH_CAT = @echo ' ' SPATCH CAT $(@:%.patch=%.d/)\*\*.patch \>$@; + +## Used in "Documentation/Makefile" + QUIET_ASCIIDOC = @echo ' ' ASCIIDOC $@; + QUIET_XMLTO = @echo ' ' XMLTO $@; + QUIET_DB2TEXI = @echo ' ' DB2TEXI $@; + QUIET_MAKEINFO = @echo ' ' MAKEINFO $@; + QUIET_DBLATEX = @echo ' ' DBLATEX $@; + QUIET_XSLTPROC = @echo ' ' XSLTPROC $@; + QUIET_GEN = @echo ' ' GEN $@; + QUIET_STDERR = 2> /dev/null + + QUIET_LINT_GITLINK = @echo ' ' LINT GITLINK $<; + QUIET_LINT_MANSEC = @echo ' ' LINT MAN SEC $<; + QUIET_LINT_DELIMSEC = @echo ' ' LINT DEL SEC $<; + QUIET_LINT_DOCSTYLE = @echo ' ' LINT DOCSTYLE $<; + QUIET_LINT_MANEND = @echo ' ' LINT MAN END $<; + + export V +endif +endif + +### Templates + +## mkdir_p_parent: lazily "mkdir -p" the path needed for a $@ +## file. Uses $(wildcard) to avoid the "mkdir -p" if it's not +## needed. +## +## Is racy, but in a good way; we might redundantly (and safely) +## "mkdir -p" when running in parallel, but won't need to exhaustively create +## individual rules for "a" -> "prefix" -> "dir" -> "file" if given a +## "a/prefix/dir/file". This can instead be inserted at the start of +## the "a/prefix/dir/file" rule. +define mkdir_p_parent_template +$(if $(wildcard $(@D)),,$(QUIET_MKDIR_P_PARENT)$(shell mkdir -p $(@D))) +endef + +## Getting sick of writing -L$(SOMELIBDIR) $(CC_LD_DYNPATH)$(SOMELIBDIR)? +## Write $(call libpath_template,$(SOMELIBDIR)) instead, perhaps? +## With CC_LD_DYNPATH set to either an empty string or to "-L", the +## the directory is not shown the second time. +define libpath_template +-L$(1) $(if $(filter-out -L,$(CC_LD_DYNPATH)),$(CC_LD_DYNPATH)$(1)) +endef + +# Populate build information into a file via GIT-VERSION-GEN. Requires the +# absolute path to the root source directory as well as input and output files +# as arguments, in that order. +define version_gen +GIT_BUILT_FROM_COMMIT="$(GIT_BUILT_FROM_COMMIT)" \ +GIT_DATE="$(GIT_DATE)" \ +GIT_USER_AGENT="$(GIT_USER_AGENT)" \ +GIT_VERSION="$(GIT_VERSION_OVERRIDE)" \ +$(SHELL_PATH) "$(1)/GIT-VERSION-GEN" "$(1)" "$(2)" "$(3)" +endef -- 2.50.1