dev-vcs/git: Sync with Gentoo

It's from Gentoo commit 37305aa8f6bd146c01c41ae9aca55b52c82c5193.
This commit is contained in:
Flatcar Buildbot 2023-03-20 07:23:52 +00:00
parent 6761c2d61d
commit b3382292d2
5 changed files with 3 additions and 194 deletions

View File

@ -1,9 +1,9 @@
DIST git-2.39.1.tar.xz 7160744 BLAKE2B 5ebf583232da9f6a937ebba17858b9fa8b550e3d0d981ff5603f77673cce69bf9a3c1c18c61d4c5e6b2f629a8173eaef8a09c913961d175571e4e6b00d2b194d SHA512 b1821a814947f01adf98206a7e9a01da9daa617b1192e8ef6968b05af8d874f028fb26b5f828a9c48f734ef2c276f4d23bdc898ba46fb7aaa96dbe68081037e9
DIST git-2.39.2.tar.xz 7163224 BLAKE2B bcd9abdaf8ce626de7aec2da666395f80212772aa17516f903459d683597184efeeb1d6aad536e0bf722a9bf63df98549fdeaab5b0a48e5500faf83ebf5d7516 SHA512 fdca70bee19401c5c7a6d2f3d70bd80b6ba99f6a9f97947de31d4366ee3a78a18d5298abb25727ec8ef67131bca673e48dff2a5a050b6e032884ab04066b20cb
DIST git-2.40.0.rc0.tar.xz 7179140 BLAKE2B 275149105e27fa938d3769f29546a04fbea0daa10796933e9c50bc6a9c482fdb13dbe0f0f3bef3373fe98afeca7f4cc80c32cc5b38fdcdf60b7dc951a3fdaf61 SHA512 123d400cce9a66f7399fe9e74b79e8ec709b7ca45d4cf8296af27ad1b866fccebc6493cc056c08d4678d3720bace60193d55ff6014137965317c16e65536ac20
DIST git-2.40.0.tar.xz 7183692 BLAKE2B e2687ceb7e341170f063013c8c0e8be81456cda786981f36170aefb714687d78cf850287e5fb35d98e7326f611881ffd27d380728c87b9306fbbe46c1b989d69 SHA512 a2720f8f9a0258c0bb5e23badcfd68a147682e45a5d039a42c47128296c508109d5039029db89311a35db97a9008585e84ed11b400846502c9be913d67f0fd90
DIST git-htmldocs-2.39.1.tar.xz 1504172 BLAKE2B 64a73a2b04e99b0bf18ab2153076bcbae9c114a31c9ab998c5e068f0ce6c0f5140e9db79008b1b01688db6a6d53e84fde8b2319eb45394aa6bc2df361be78520 SHA512 9b67b0464acd5651ef5871f724c59c910195cdd7020ecdcadced6e8e1275269ee1ca5808881e67e908114cb4ef27c959320f1e3d718b6699d3761281b6179600
DIST git-htmldocs-2.39.2.tar.xz 1505352 BLAKE2B 8b60f9bae9585aab8782f38ff64391a9c8fddeed47d84a989b4f65db87c14e927d1d975fa5239435e921dbc74a75c600d6f0485defa1a235a66355c0f8d9060d SHA512 fe0982e653784285bce7f158956892900ae9c88aa986261de4184e3349e34ee54f92d7280e143031b107872fc1729814489f6e355aa12dc415d724da65ec3716
DIST git-htmldocs-2.40.0.rc0.tar.xz 1516032 BLAKE2B 076dfa70832fedf4f39bbe321aa74b7928a70040858624026662dc686a4af051fc8206daaee6748d560153bf38972210fb24095439f176b979727df8674bbab4 SHA512 288084d6789f2544be5f9932e43f542cc785e99df9ef42252a9ba87159bec9e845bcb7594e7e6011cbba1f75285b74e48cba2e86c6c9299c95b439b0803de4ed
DIST git-htmldocs-2.40.0.tar.xz 1517292 BLAKE2B 1baa47344e8c41c529e6ea21fa9ab406d146e128c2154beceb1d2ddf993cebdfeca132ed86811ef062df38e1ecc541dd2285bca0d985e7cc8ce185422d53f9b4 SHA512 3295e6b5e69430b3900ade93484cc357dd69d59ffccef0b62648d4aefc5d3e0820bd81dde2ec5ad0f3983c9c3a3648fa6edea508e870ebc0194cf6d42137aee2
DIST git-manpages-2.39.1.tar.xz 557012 BLAKE2B 1a39a1192443eac0d12da059e0146d286272054192810d12f53f627bd341c3dc50bea2078cccac0d46c5e95502a1d669f072ab387e1662c890e5c38d46ad3f31 SHA512 4f1fdd7abf53dd60478ffc5f926777cd00446a3799ad9a2129814adfe7d3d63e13dd9bcdcea699fd139c9e134cf45857f0585ce2772e4fb93e4930c8f86a08bb
DIST git-manpages-2.39.2.tar.xz 557080 BLAKE2B 6b2dbb33f1041ba802582f529638b8b4574309efbf5af94add5c676efed7314743d5b326ec18a95ba85a4fe818e95913069717034a9863879e1d03ee32b839ba SHA512 6326ff43564fc42ca0a424edd17896434e11c09ce21f4bfd4d4975aaaf7f2c0d823da0e89b267557b0b7799c342db88d84685d24f589f53edfe486208c1f15b1
DIST git-manpages-2.40.0.rc0.tar.xz 562536 BLAKE2B 1c41631408a9bea0d73dbc8df2242c3cbaf2330bc96f3e3020b10397568dc6e32b631d6ff0dd8026adc5c36398e44d88dcf773f49b706e446b93e84c94942081 SHA512 1444417c073c8a5d74edeb45e521761ce42d911c3f8dcc329a54a78d9078a81d4d1f9e8a1772d03f4196bc6b013fe092d0ea422ca43292399dd22c84813ddba7
DIST git-manpages-2.40.0.tar.xz 562824 BLAKE2B 9e57d39c2111fc3a109d355773bc191110ac421bc1b0b9bdeeb13e84d8934ad4bdb8c7439bc7fe8f1ffea505e4ea6d14ab4fdf689e828e841cf0af8d2eb3cd12 SHA512 7415db5fd30239c7f5a51c40c3e94870ef6896df0d78a88bbe91c38d58e4d9802e12e1de2cb03634a52ad4a5877c9d7e1194c284567288ed4263788c48c8cff0

View File

@ -1,46 +0,0 @@
Searching for pages using the MediaWiki API returns at most 500 results
(hi Patrick). To get a list of all pages in a larger wiki, we need to run
repeated searches...
Source: https://github.com/moy/Git-Mediawiki/issues/32
Author: anarcat https://github.com/anarcat
diff --git a/contrib/mw-to-git/git-remote-mediawiki.perl b/contrib/mw-to-git/git-remote-mediawiki.perl
index 8dd74a9..f2ce311 100755
--- a/contrib/mw-to-git/git-remote-mediawiki.perl
+++ b/contrib/mw-to-git/git-remote-mediawiki.perl
@@ -259,16 +259,29 @@ sub get_mw_tracked_categories {
sub get_mw_all_pages {
my $pages = shift;
# No user-provided list, get the list of pages from the API.
- my $mw_pages = $mediawiki->list({
+ my $query = {
action => 'query',
list => 'allpages',
aplimit => 'max'
- });
- if (!defined($mw_pages)) {
+ };
+ my $curpage;
+ my $oldpage = '';
+ while (1) {
+ if (defined($curpage)) {
+ if ($oldpage eq $curpage) {
+ last;
+ }
+ $query->{apfrom} = $curpage;
+ $oldpage = $curpage;
+ }
+ my $mw_pages = $mediawiki->list($query);
+ if (!defined($mw_pages)) {
fatal_mw_error("get the list of wiki pages");
- }
- foreach my $page (@{$mw_pages}) {
+ }
+ foreach my $page (@{$mw_pages}) {
$pages->{$page->{title}} = $page;
+ $curpage = $page->{title};
+ }
}
return;
}

View File

@ -1,97 +0,0 @@
source:
https://gist.github.com/anarcat/f821fa285c6b8b6b16a5
https://github.com/moy/Git-Mediawiki/issues/10
From 147224cfe6143c44b16aec0bb6d6a506a6b96ced Mon Sep 17 00:00:00 2001
From: Kevin <kevin@ki-ai.org>
Date: Fri, 28 Aug 2015 15:53:37 -0500
Subject: [PATCH] Add namespace support to git-mediawiki
Signed-off-by: Kevin <kevin@ki-ai.org>
---
contrib/mw-to-git/git-remote-mediawiki.perl | 34 +++++++++++++++++++++++++++--
1 file changed, 32 insertions(+), 2 deletions(-)
diff --git a/contrib/mw-to-git/git-remote-mediawiki.perl b/contrib/mw-to-git/git-remote-mediawiki.perl
index 8dd74a9..662a5b5 100755
--- a/contrib/mw-to-git/git-remote-mediawiki.perl
+++ b/contrib/mw-to-git/git-remote-mediawiki.perl
@@ -17,6 +17,7 @@ use Git;
use Git::Mediawiki qw(clean_filename smudge_filename connect_maybe
EMPTY HTTP_CODE_OK);
use DateTime::Format::ISO8601;
+use Scalar::Util;
use warnings;
# By default, use UTF-8 to communicate with Git and the user
@@ -63,6 +64,10 @@ chomp(@tracked_pages);
my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.${remotename}.categories"));
chomp(@tracked_categories);
+# Just like @tracked_categories, but for MediaWiki namespaces.
+my @tracked_namespaces = split(/[ \n]/, run_git("config --get-all remote.${remotename}.namespaces"));
+chomp(@tracked_namespaces);
+
# Import media files on pull
my $import_media = run_git("config --get --bool remote.${remotename}.mediaimport");
chomp($import_media);
@@ -256,6 +261,23 @@ sub get_mw_tracked_categories {
return;
}
+sub get_mw_tracked_namespaces {
+ my $pages = shift;
+ foreach my $local_namespace (@tracked_namespaces) {
+ my $mw_pages = $mediawiki->list( {
+ action => 'query',
+ list => 'allpages',
+ apnamespace => get_mw_namespace_id($local_namespace),
+ aplimit => 'max' } )
+ || die $mediawiki->{error}->{code} . ': '
+ . $mediawiki->{error}->{details} . "\n";
+ foreach my $page (@{$mw_pages}) {
+ $pages->{$page->{title}} = $page;
+ }
+ }
+ return;
+}
+
sub get_mw_all_pages {
my $pages = shift;
# No user-provided list, get the list of pages from the API.
@@ -319,6 +341,10 @@ sub get_mw_pages {
$user_defined = 1;
get_mw_tracked_categories(\%pages);
}
+ if (@tracked_namespaces) {
+ $user_defined = 1;
+ get_mw_tracked_namespaces(\%pages);
+ }
if (!$user_defined) {
get_mw_all_pages(\%pages);
}
@@ -1263,7 +1289,6 @@ my %cached_mw_namespace_id;
sub get_mw_namespace_id {
$mediawiki = connect_maybe($mediawiki, $remotename, $url);
my $name = shift;
-
if (!exists $namespace_id{$name}) {
# Look at configuration file, if the record for that namespace is
# already cached. Namespaces are stored in form:
@@ -1331,7 +1356,12 @@ sub get_mw_namespace_id {
sub get_mw_namespace_id_for_page {
my $namespace = shift;
if ($namespace =~ /^([^:]*):/) {
- return get_mw_namespace_id($namespace);
+ my ($ns, $id) = split(/:/, $namespace);
+ if (Scalar::Util::looks_like_number($id)) {
+ return get_mw_namespace_id($ns);
+ } else{
+ return
+ }
} else {
return;
}
--
2.5.0

View File

@ -1,48 +0,0 @@
From 2593304723c6def159c10b9060dafa78a775a057 Mon Sep 17 00:00:00 2001
From: Lyubomyr Shaydariv <lyubomyr-shaydariv@users.noreply.github.com>
Date: Fri, 11 Sep 2015 00:41:17 +0300
Subject: [PATCH] git-remote-mediawiki: support subpages as subdirectories
This is a fix for https://github.com/moy/Git-Mediawiki/issues/22
The subdirectories option is enabled using -c remote.origin.subpageDirs=true
during the cloning and it is not recommended to be modified in or
removed from .git/config after the cloning.
---
contrib/mw-to-git/git-remote-mediawiki.perl | 10 +++++++++-
1 file changed, 9 insertions(+), 1 deletion(-)
diff --git a/contrib/mw-to-git/git-remote-mediawiki.perl b/contrib/mw-to-git/git-remote-mediawiki.perl
index 8dd74a9..f3624be 100755
--- a/contrib/mw-to-git/git-remote-mediawiki.perl
+++ b/contrib/mw-to-git/git-remote-mediawiki.perl
@@ -63,6 +63,11 @@
my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.${remotename}.categories"));
chomp(@tracked_categories);
+# Use subdirectories for subpages
+my $use_subpage_dirs = run_git("config --get --bool remote.${remotename}.subpageDirs");
+chomp($use_subpage_dirs);
+$use_subpage_dirs = ($use_subpage_dirs eq 'true');
+
# Import media files on pull
my $import_media = run_git("config --get --bool remote.${remotename}.mediaimport");
chomp($import_media);
@@ -689,6 +694,9 @@ sub fe_escape_path {
$path =~ s/\\/\\\\/g;
$path =~ s/"/\\"/g;
$path =~ s/\n/\\n/g;
+ if ($use_subpage_dirs) {
+ $path =~ s/%2F/\//g;
+ }
return qq("${path}");
}
@@ -927,7 +935,7 @@ sub mw_import_revids {
# If this is a revision of the media page for new version
# of a file do one common commit for both file and media page.
# Else do commit only for that page.
- print {*STDERR} "${n}/", scalar(@{$revision_ids}), ": Revision #$rev->{revid} of $commit{title}\n";
+ print {*STDERR} "${n}/", scalar(@{$revision_ids}), ": Revision #$rev->{revid} of ", fe_escape_path($commit{title}), "\n";
import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
}