From 3e7743488278d846f33545925011ab6e561597af Mon Sep 17 00:00:00 2001 From: Marc Beninca Date: Wed, 10 Apr 2024 16:53:59 +0000 Subject: [PATCH] base --- .gitignore | 3 + apk.alpine.py | 59 ++ deb.apt-mirror/mirror/deb | 1 + deb.apt-mirror/mirror/deb.debian.org | 1 + deb.apt-mirror/mirror/download.docker.com | 1 + deb.apt-mirror/mirror/packages.gitlab.com | 1 + deb.apt-mirror/mirror/pkgs.zabbly.com | 1 + deb.check.py | 44 + deb.fix.sh | 25 + deb.fork | 1065 +++++++++++++++++++++ deb.list.d/bookworm/gitlab.list | 3 + deb.list.d/bookworm/incus.list | 2 + deb.list.d/debian.list | 8 + deb.list.d/docker.list | 2 + deb.list.d/mirror.list | 2 + deb.sync.py | 41 + msys2.fix.sh | 13 + msys2.i.py | 60 ++ msys2.s.py | 60 ++ rpm.alma.py | 95 ++ rpm.epel.py | 62 ++ 21 files changed, 1549 insertions(+) create mode 100644 .gitignore create mode 100755 apk.alpine.py create mode 120000 deb.apt-mirror/mirror/deb create mode 120000 deb.apt-mirror/mirror/deb.debian.org create mode 120000 deb.apt-mirror/mirror/download.docker.com create mode 120000 deb.apt-mirror/mirror/packages.gitlab.com create mode 120000 deb.apt-mirror/mirror/pkgs.zabbly.com create mode 100755 deb.check.py create mode 100755 deb.fix.sh create mode 100755 deb.fork create mode 100644 deb.list.d/bookworm/gitlab.list create mode 100644 deb.list.d/bookworm/incus.list create mode 100644 deb.list.d/debian.list create mode 100644 deb.list.d/docker.list create mode 100644 deb.list.d/mirror.list create mode 100755 deb.sync.py create mode 100755 msys2.fix.sh create mode 100755 msys2.i.py create mode 100755 msys2.s.py create mode 100755 rpm.alma.py create mode 100755 rpm.epel.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..00ffb67 --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +/deb.apt-mirror/var +/root +/squashfs diff --git a/apk.alpine.py b/apk.alpine.py new file mode 100755 index 0000000..94ddd0e --- /dev/null +++ b/apk.alpine.py @@ -0,0 +1,59 @@ +#! /usr/bin/env python3 + +import os +import subprocess + +ROOT = 'rsync://rsync.kyberorg.fi/alpine' +ROOT = 'rsync://alpine.mirror.wearetriple.com/alpine' +ROOT = 'rsync://mirrors.dotsrc.org/alpine' +ROOT = 'rsync://uk.alpinelinux.org/alpine' +ARCH = 'x86_64' +VERSIONS = [ + 'latest-stable', +] + +TARGETS = { + 'latest-stable': [ + 'releases', + 'main', + 'community', + ], +} + + +def sync(source, target): + args = ['rsync', + '--archive', + # '--checksum', + '--delete-before', + # '--dry-run', + '--no-motd', + '--partial', + '--progress', + '--verbose', + source, + target, + ] + print() + print() + print('←', source) + print('→', target) + subprocess.call(args) + + +def main(): + file = os.path.realpath(__file__) + root = os.path.dirname(file) + root = os.path.join(root, 'root', 'apk', 'alpine') + sources = [] + for version in VERSIONS: + for target in TARGETS[version]: + sources.append(os.path.join(version, target, ARCH) + os.sep) + for source in sources: + target = os.path.join(root, source) + os.makedirs(os.path.dirname(target), exist_ok=True) + sync(os.path.join(ROOT, source), target) + + +if __name__ == '__main__': + main() diff --git a/deb.apt-mirror/mirror/deb b/deb.apt-mirror/mirror/deb new file mode 120000 index 0000000..e35f9c9 --- /dev/null +++ b/deb.apt-mirror/mirror/deb @@ -0,0 +1 @@ +../../root/deb \ No newline at end of file diff --git a/deb.apt-mirror/mirror/deb.debian.org b/deb.apt-mirror/mirror/deb.debian.org new file mode 120000 index 0000000..b3324a8 --- /dev/null +++ b/deb.apt-mirror/mirror/deb.debian.org @@ -0,0 +1 @@ +deb/debian \ No newline at end of file diff --git a/deb.apt-mirror/mirror/download.docker.com b/deb.apt-mirror/mirror/download.docker.com new file mode 120000 index 0000000..1ddd4f1 --- /dev/null +++ b/deb.apt-mirror/mirror/download.docker.com @@ -0,0 +1 @@ +deb/docker \ No newline at end of file diff --git a/deb.apt-mirror/mirror/packages.gitlab.com b/deb.apt-mirror/mirror/packages.gitlab.com new file mode 120000 index 0000000..ad139b6 --- /dev/null +++ b/deb.apt-mirror/mirror/packages.gitlab.com @@ -0,0 +1 @@ +deb/gitlab \ No newline at end of file diff --git a/deb.apt-mirror/mirror/pkgs.zabbly.com b/deb.apt-mirror/mirror/pkgs.zabbly.com new file mode 120000 index 0000000..f86566b --- /dev/null +++ b/deb.apt-mirror/mirror/pkgs.zabbly.com @@ -0,0 +1 @@ +deb/incus \ No newline at end of file diff --git a/deb.check.py b/deb.check.py new file mode 100755 index 0000000..2923b06 --- /dev/null +++ b/deb.check.py @@ -0,0 +1,44 @@ +#! /usr/bin/python3 -B + +import os +import shutil +import subprocess + +ALGO_NAME = "SHA256" +VAR_NAME = "var" + + +if __name__ == "__main__": + root_directory = os.path.dirname(os.path.realpath(__file__)) + root_directory = os.path.join(root_directory, 'deb.apt-mirror') + hashes_file = os.path.join(root_directory, VAR_NAME, ALGO_NAME) + with open(hashes_file) as f: + lines = f.readlines() + hashes_by_names = {} + for line in lines: + hash, name = line.split() + hashes_by_names[name] = hash + files = len(hashes_by_names) + i = 1 + ko = 0 + os.chdir(os.path.join(root_directory, "mirror")) + command = ALGO_NAME.lower() + 'sum "{}"' + for name, hash in sorted(hashes_by_names.items()): + columns, rows = shutil.get_terminal_size() + progress = " ".join([str(files), str(ko), str(i), ""]) + available = columns - len(progress) - 1 + short_name = name[-available:] + padding = " " * (available- len(short_name)) + print("\r", progress, short_name, padding, sep="", end="", flush=True) + output = subprocess.getoutput(command.format(name)) + h, *_ = output.split() + if h != hash: + print() + try: + os.remove(name) + except: + pass + ko += 1 + i += 1 + print() + print(ko) diff --git a/deb.fix.sh b/deb.fix.sh new file mode 100755 index 0000000..04152cf --- /dev/null +++ b/deb.fix.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash +FILE="$(realpath "${BASH_SOURCE[0]}")" +ROOT="$(dirname "${FILE}")" + +ERROR='→ ERROR! ERROR! ERROR! ←' +DISTS=( +'bookworm' 'bookworm-backports' 'bookworm-updates' +) +MISSING='Contents-all.gz' +SECTIONS=('main' 'non-free-firmware' 'contrib' 'non-free') + +DEBIAN_ROOT='debian/dists' +LOCAL_ROOT="${ROOT}/root/deb/debian/${DEBIAN_ROOT}" +REMOTE_ROOT="https://deb.debian.org/${DEBIAN_ROOT}" + +for dist in "${DISTS[@]}" ; do + for section in "${SECTIONS[@]}" ; do + cd "${LOCAL_ROOT}/${dist}/${section}" + rm --force "${MISSING}" + wget "${REMOTE_ROOT}/${dist}/${section}/${MISSING}" &> /dev/null + if [ ${?} -ne 0 ] ; then + echo "${ERROR}" + fi + done +done diff --git a/deb.fork b/deb.fork new file mode 100755 index 0000000..8dd2969 --- /dev/null +++ b/deb.fork @@ -0,0 +1,1065 @@ +#!/usr/bin/perl + +=pod + +=head1 NAME + +apt-mirror - apt sources mirroring tool + +=head1 SYNOPSIS + +apt-mirror [configfile] + +=head1 DESCRIPTION + +A small and efficient tool that lets you mirror a part of or +the whole Debian GNU/Linux distribution or any other apt sources. + +Main features: + * It uses a config similar to APT's F + * It's fully pool compliant + * It supports multithreaded downloading + * It supports multiple architectures at the same time + * It can automatically remove unneeded files + * It works well on an overloaded Internet connection + * It never produces an inconsistent mirror including while mirroring + * It works on all POSIX compliant systems with Perl and wget + +=head1 COMMENTS + +apt-mirror uses F as a configuration file. +By default it is tuned to official Debian or Ubuntu mirrors. Change +it for your needs. + +After you setup the configuration file you may run as root: + + # su - apt-mirror -c apt-mirror + +Or uncomment the line in F to enable daily mirror updates. + +=head1 FILES + +F + Main configuration file + +F + Cron configuration template + +F + Mirror places here + +F + Place for temporarily downloaded indexes + +F + Log files placed here. URLs and MD5 checksums also here. + +=head1 CONFIGURATION EXAMPLES + +The mirror.list configuration supports many options, the file is well commented explaining each option. +Here are some sample mirror configuration lines showing the various supported ways: + +Normal: +deb http://example.com/debian stable main contrib non-free + +Arch Specific: (many other architectures are supported) +deb-powerpc http://example.com/debian stable main contrib non-free + +HTTP and FTP Auth or non-standard port: +deb http://user:pass@example.com:8080/debian stable main contrib non-free + +HTTPS with sending Basic HTTP authentication information (plaintext username and password) for all requests: +(this was default behaviour of Wget 1.10.2 and prior and is needed for some servers with new version of Wget) +set auth_no_challenge 1 +deb https://user:pass@example.com:443/debian stable main contrib non-free + +HTTPS without checking certificate: +set no_check_certificate 1 +deb https://example.com:443/debian stable main contrib non-free + +Source Mirroring: +deb-src http://example.com/debian stable main contrib non-free + +=head1 AUTHORS + +Dmitry N. Hramtsov Ehdn@nsu.ruE +Brandon Holtsclaw Eme@brandonholtsclaw.comE + +=cut + +use warnings; +use strict; +use File::Copy; +use File::Compare; +use File::Path qw(make_path); +use File::Basename; +use Fcntl qw(:flock); + +my $config_file; + +my %config_variables = ( + "defaultarch" => `dpkg --print-architecture 2>/dev/null` || 'i386', + "nthreads" => 20, + "base_path" => '/var/spool/apt-mirror', + "mirror_path" => '$base_path/mirror', + "skel_path" => '$base_path/skel', + "var_path" => '$base_path/var', + "cleanscript" => '$var_path/clean.sh', + "_contents" => 1, + "_autoclean" => 0, + "_tilde" => 0, + "limit_rate" => '100m', + "run_postmirror" => 1, + "auth_no_challenge" => 0, + "no_check_certificate" => 0, + "unlink" => 0, + "postmirror_script" => '$var_path/postmirror.sh', + "use_proxy" => 'off', + "http_proxy" => '', + "https_proxy" => '', + "proxy_user" => '', + "proxy_password" => '' +); + +my @config_binaries = (); +my @config_sources = (); + +my @index_urls; +my @childrens = (); +my %skipclean = (); +my %clean_directory = (); + +###################################################################################### +## Setting up $config_file variable + +$config_file = "/etc/apt/mirror.list"; # Default value +if ( $_ = shift ) +{ + die("apt-mirror: invalid config file specified") unless -e $_; + $config_file = $_; +} + +chomp $config_variables{"defaultarch"}; + +###################################################################################### +## Common subroutines + +sub round_number +{ + my $n = shift; + my $minus = $n < 0 ? '-' : ''; + $n = abs($n); + $n = int( ( $n + .05 ) * 10 ) / 10; + $n .= '.0' unless $n =~ /\./; + $n .= '0' if substr( $n, ( length($n) - 1 ), 1 ) eq '.'; + chop $n if $n =~ /\.\d\d0$/; + return "$minus$n"; +} + +sub format_bytes +{ + my $bytes = shift; + my $bytes_out = '0'; + my $size_name = 'bytes'; + my $KiB = 1024; + my $MiB = 1024 * 1024; + my $GiB = 1024 * 1024 * 1024; + + if ( $bytes >= $KiB ) + { + $bytes_out = $bytes / $KiB; + $size_name = 'KiB'; + if ( $bytes >= $MiB ) + { + $bytes_out = $bytes / $MiB; + $size_name = 'MiB'; + if ( $bytes >= $GiB ) + { + $bytes_out = $bytes / $GiB; + $size_name = 'GiB'; + } + } + $bytes_out = round_number($bytes_out); + } + else + { + $bytes_out = $bytes; + $size_name = 'bytes'; + } + + return "$bytes_out $size_name"; +} + +sub get_variable +{ + my $value = $config_variables{ shift @_ }; + my $count = 16; + while ( $value =~ s/\$(\w+)/$config_variables{$1}/xg ) + { + die("apt-mirror: too many substitution while evaluating variable") if ( $count-- ) < 0; + } + return $value; +} + +sub quoted_path +{ + my $path = shift; + $path =~ s/'/'\\''/g; + return "'" . $path . "'"; +} + +sub lock_aptmirror +{ + open( LOCK_FILE, '>', get_variable("var_path") . "/apt-mirror.lock" ); + my $lock = flock( LOCK_FILE, LOCK_EX | LOCK_NB ); + if ( !$lock ) + { + die("apt-mirror is already running, exiting"); + } +} + +sub unlock_aptmirror +{ + close(LOCK_FILE); + unlink( get_variable("var_path") . "/apt-mirror.lock" ); +} + +sub download_urls +{ + my $stage = shift; + my @urls; + my $i = 0; + my $pid; + my $nthreads = get_variable("nthreads"); + my @args = (); + local $| = 1; + + @urls = @_; + $nthreads = @urls if @urls < $nthreads; + + if ( get_variable("auth_no_challenge") == 1 ) { push( @args, "--auth-no-challenge" ); } + if ( get_variable("no_check_certificate") == 1 ) { push( @args, "--no-check-certificate" ); } + if ( get_variable("unlink") == 1 ) { push( @args, "--unlink" ); } + if ( length( get_variable("use_proxy") ) && ( get_variable("use_proxy") eq 'yes' || get_variable("use_proxy") eq 'on' ) ) + { + if ( length( get_variable("http_proxy") ) || length( get_variable("https_proxy") ) ) { push( @args, "-e use_proxy=yes" ); } + if ( length( get_variable("http_proxy") ) ) { push( @args, "-e http_proxy=" . get_variable("http_proxy") ); } + if ( length( get_variable("https_proxy") ) ) { push( @args, "-e https_proxy=" . get_variable("https_proxy") ); } + if ( length( get_variable("proxy_user") ) ) { push( @args, "-e proxy_user=" . get_variable("proxy_user") ); } + if ( length( get_variable("proxy_password") ) ) { push( @args, "-e proxy_password=" . get_variable("proxy_password") ); } + } + print "Downloading " . scalar(@urls) . " $stage files using $nthreads threads...\n"; + + while ( scalar @urls ) + { + my @part = splice( @urls, 0, int( @urls / $nthreads ) ); + open URLS, ">" . get_variable("var_path") . "/$stage-urls.$i" or die("apt-mirror: can't write to intermediate file ($stage-urls.$i)"); + foreach (@part) { print URLS "$_\n"; } + close URLS or die("apt-mirror: can't close intermediate file ($stage-urls.$i)"); + + $pid = fork(); + + die("apt-mirror: can't do fork in download_urls") if !defined($pid); + + if ( $pid == 0 ) + { + exec 'wget', '--no-cache', '--limit-rate=' . get_variable("limit_rate"), '-t', '5', '-r', '-N', '-l', 'inf', '-o', get_variable("var_path") . "/$stage-log.$i", '-i', get_variable("var_path") . "/$stage-urls.$i", @args; + + # shouldn't reach this unless exec fails + die("\n\nCould not run wget, please make sure its installed and in your path\n\n"); + } + + push @childrens, $pid; + $i++; + $nthreads--; + } + + print "Begin time: " . localtime() . "\n[" . scalar(@childrens) . "]... "; + while ( scalar @childrens ) + { + my $dead = wait(); + @childrens = grep { $_ != $dead } @childrens; + print "[" . scalar(@childrens) . "]... "; + } + print "\nEnd time: " . localtime() . "\n\n"; +} + +## Parse config + +sub parse_config_line +{ + my $pattern_deb_line = qr/^[\t ]*(?deb-src|deb)(?:-(?[\w\-]+))?[\t ]+(?:\[(?[^\]]+)\][\t ]+)?(?[^\s]+)[\t ]+(?.+)$/; + my $line = $_; + my %config; + if ( $line =~ $pattern_deb_line ) { + $config{'type'} = $+{type}; + $config{'arch'} = $+{arch}; + $config{'options'} = $+{options} ? $+{options} : ""; + $config{'uri'} = $+{uri}; + $config{'components'} = $+{components}; + if ( $config{'options'} =~ /arch=((?[\w\-]+)[,]*)/g ) { + $config{'arch'} = $+{arch}; + } + $config{'components'} = [ split /\s+/, $config{'components'} ]; + } elsif ( $line =~ /set[\t ]+(?[^\s]+)[\t ]+(?"[^"]+"|'[^']+'|[^\s]+)/ ) { + $config{'type'} = 'set'; + $config{'key'} = $+{key}; + $config{'value'} = $+{value}; + $config{'value'} =~ s/^'(.*)'$/$1/; + $config{'value'} =~ s/^"(.*)"$/$1/; + } elsif ( $line =~ /(?clean|skip-clean)[\t ]+(?[^\s]+)/ ) { + $config{'type'} = $+{type}; + $config{'uri'} = $+{uri}; + } + + return %config; +} + +open CONFIG, "<$config_file" or die("apt-mirror: can't open config file ($config_file)"); +while () +{ + next if /^\s*#/; + next unless /\S/; + my $line = $_; + my %config_line = parse_config_line; + + if ( $config_line{'type'} eq "set" ) { + $config_variables{ $config_line{'key'} } = $config_line{'value'}; + next; + } elsif ( $config_line{'type'} eq "deb" ) { + my $arch = $config_line{'arch'}; + $arch = get_variable("defaultarch") if ! defined $config_line{'arch'}; + push @config_binaries, [ $arch, $config_line{'uri'}, @{$config_line{'components'}} ]; + next; + } elsif ( $config_line{'type'} eq "deb-src" ) { + push @config_sources, [ $config_line{'uri'}, @{$config_line{'components'}} ]; + next; + } elsif ( $config_line{'type'} =~ /(skip-clean|clean)/ ) { + my $link = $config_line{'uri'}; + $link =~ s[^(\w+)://][]; + $link =~ s[/$][]; + $link =~ s[~][%7E]g if get_variable("_tilde"); + if ( $config_line{'type'} eq "skip-clean" ) { + $skipclean{ $link } = 1; + } elsif ( $config_line{'type'} eq "clean" ) { + $clean_directory{ $link } = 1; + } + next; + } + + die("apt-mirror: invalid line in config file ($.: $line ...)"); +} +close CONFIG; + +die("Please explicitly specify 'defaultarch' in mirror.list") unless get_variable("defaultarch"); + +###################################################################################### +## Create the 3 needed directories if they don't exist yet +my @needed_directories = ( get_variable("mirror_path"), get_variable("skel_path"), get_variable("var_path") ); +foreach my $needed_directory (@needed_directories) +{ + unless ( -d $needed_directory ) + { + make_path($needed_directory) or die("apt-mirror: can't create $needed_directory directory"); + } +} +# +####################################################################################### + +lock_aptmirror(); + +###################################################################################### +## Skel download + +my %urls_to_download = (); +my ( $url, $arch ); + +sub remove_double_slashes +{ + local $_ = shift; + while (s[/\./][/]g) { } + while (s[(? ) + { + chomp $line; + if ($checksums) + { + if ( $line =~ /^ +(.*)$/ ) + { + my @parts = split( / +/, $1 ); + if ( @parts == 3 ) + { + my ( $sha1, $size, $filename ) = @parts; + if ( $filename =~ m{^$component/i18n/Translation-[^./]*\.(bz2|xz)$} ) + { + add_url_to_download( $dist_uri . $filename, $size ); + } + } + else + { + warn("Malformed checksum line \"$1\" in $release_uri"); + } + } + else + { + $checksums = 0; + } + } + if ( not $checksums ) + { + if ( $line eq "SHA256:" ) + { + $checksums = 1; + } + } + } +} + +sub process_translation_index +{ + # Extract all translation files from the dists/$DIST/$COMPONENT/i18n/Index + # file. Fall back to parsing dists/$DIST/Release if i18n/Index is not found. + + my $dist_uri = remove_double_slashes(shift); + my $component = shift; + my ( $base_uri, $index_uri, $index_path, $line ) = ''; + + $base_uri = $dist_uri . $component . "/i18n/"; + $index_uri = $base_uri . "Index"; + $index_path = get_variable("skel_path") . "/" . sanitise_uri($index_uri); + + unless ( open STREAM, "<$index_path" ) + { + find_translation_files_in_release( $dist_uri, $component ); + return; + } + + my $checksums = 0; + while ( $line = ) + { + chomp $line; + if ($checksums) + { + if ( $line =~ /^ +(.*)$/ ) + { + my @parts = split( / +/, $1 ); + if ( @parts == 3 ) + { + my ( $sha1, $size, $filename ) = @parts; + add_url_to_download( $base_uri . $filename, $size ); + } + else + { + warn("Malformed checksum line \"$1\" in $index_uri"); + } + } + else + { + $checksums = 0; + } + } + if ( not $checksums ) + { + if ( $line eq "SHA256:" or $line eq "SHA1:" or $line eq "MD5Sum:" ) + { + $checksums = 1; + } + } + } + + close STREAM; +} + +print "Processing translation indexes: ["; + +foreach (@config_binaries) +{ + my ( $arch, $uri, $distribution, @components ) = @{$_}; + print "T"; + if (@components) + { + $url = $uri . "/dists/" . $distribution . "/"; + + my $component; + foreach $component (@components) + { + process_translation_index( $url, $component ); + } + } +} + +print "]\n\n"; + +push( @index_urls, sort keys %urls_to_download ); +download_urls( "translation", sort keys %urls_to_download ); + +foreach ( keys %urls_to_download ) +{ + s[^(\w+)://][]; + s[~][%7E]g if get_variable("_tilde"); + $skipclean{$_} = 1; +} + +###################################################################################### +## DEP-11 index download + +%urls_to_download = (); + +sub find_dep11_files_in_release +{ + # Look in the dists/$DIST/Release file for the DEP-11 files that belong + # to the given component and architecture. + + my $dist_uri = shift; + my $component = shift; + my $arch = shift; + my ( $release_uri, $release_path, $line ) = ''; + + $release_uri = $dist_uri . "Release"; + $release_path = get_variable("skel_path") . "/" . sanitise_uri($release_uri); + + unless ( open STREAM, "<$release_path" ) + { + warn( "Failed to open Release file from " . $release_uri ); + return; + } + + my $checksums = 0; + while ( $line = ) + { + chomp $line; + if ($checksums) + { + if ( $line =~ /^ +(.*)$/ ) + { + my @parts = split( / +/, $1 ); + if ( @parts == 3 ) + { + my ( $sha1, $size, $filename ) = @parts; + if ( $filename =~ m{^$component/dep11/(Components-${arch}\.yml|icons-[^./]+\.tar)\.(gz|bz2|xz)$} ) + { + add_url_to_download( $dist_uri . $filename, $size ); + } + } + else + { + warn("Malformed checksum line \"$1\" in $release_uri"); + } + } + else + { + $checksums = 0; + } + } + if ( not $checksums ) + { + if ( $line eq "SHA256:" ) + { + $checksums = 1; + } + } + } +} + +print "Processing DEP-11 indexes: ["; + +foreach (@config_binaries) +{ + my ( $arch, $uri, $distribution, @components ) = @{$_}; + print "D"; + if (@components) + { + $url = $uri . "/dists/" . $distribution . "/"; + + my $component; + foreach $component (@components) + { + find_dep11_files_in_release( $url, $component, $arch ); + } + } +} + +print "]\n\n"; + +push( @index_urls, sort keys %urls_to_download ); +download_urls( "dep11", sort keys %urls_to_download ); + +foreach ( keys %urls_to_download ) +{ + s[^(\w+)://][]; + s[~][%7E]g if get_variable("_tilde"); + $skipclean{$_} = 1; +} + +###################################################################################### +## Main download preparations + +%urls_to_download = (); + +open FILES_ALL, ">" . get_variable("var_path") . "/ALL" or die("apt-mirror: can't write to intermediate file (ALL)"); +open FILES_NEW, ">" . get_variable("var_path") . "/NEW" or die("apt-mirror: can't write to intermediate file (NEW)"); +open FILES_MD5, ">" . get_variable("var_path") . "/MD5" or die("apt-mirror: can't write to intermediate file (MD5)"); +open FILES_SHA1, ">" . get_variable("var_path") . "/SHA1" or die("apt-mirror: can't write to intermediate file (SHA1)"); +open FILES_SHA256, ">" . get_variable("var_path") . "/SHA256" or die("apt-mirror: can't write to intermediate file (SHA256)"); + +my %stat_cache = (); + +sub _stat +{ + my ($filename) = shift; + return @{ $stat_cache{$filename} } if exists $stat_cache{$filename}; + my @res = stat($filename); + $stat_cache{$filename} = \@res; + return @res; +} + +sub clear_stat_cache +{ + %stat_cache = (); +} + +sub need_update +{ + my $filename = shift; + my $size_on_server = shift; + + my ( undef, undef, undef, undef, undef, undef, undef, $size ) = _stat($filename); + + return 1 unless ($size); + return 0 if $size_on_server == $size; + return 1; +} + +sub remove_spaces($) +{ + my $hashref = shift; + foreach ( keys %{$hashref} ) + { + while ( substr( $hashref->{$_}, 0, 1 ) eq ' ' ) + { + substr( $hashref->{$_}, 0, 1 ) = ''; + } + } +} + +sub process_index +{ + my $uri = shift; + my $index = shift; + my ( $path, $package, $mirror, $files ) = ''; + + $path = sanitise_uri($uri); + local $/ = "\n\n"; + $mirror = get_variable("mirror_path") . "/" . $path; + + if (-e "$path/$index.gz" ) + { + system("gunzip < $path/$index.gz > $path/$index"); + } + elsif (-e "$path/$index.xz" ) + { + system("xz -d < $path/$index.xz > $path/$index"); + } + elsif (-e "$path/$index.bz2" ) + { + system("bzip2 -d < $path/$index.bz2 > $path/$index"); + } + + unless ( open STREAM, "<$path/$index" ) + { + warn("apt-mirror: can't open index $path/$index in process_index"); + return; + } + + while ( $package = ) + { + local $/ = "\n"; + chomp $package; + my ( undef, %lines ) = split( /^([\w\-]+:)/m, $package ); + + $lines{"Directory:"} = "" unless defined $lines{"Directory:"}; + chomp(%lines); + remove_spaces( \%lines ); + + if ( exists $lines{"Filename:"} ) + { # Packages index + $skipclean{ remove_double_slashes( $path . "/" . $lines{"Filename:"} ) } = 1; + print FILES_ALL remove_double_slashes( $path . "/" . $lines{"Filename:"} ) . "\n"; + print FILES_MD5 $lines{"MD5sum:"} . " " . remove_double_slashes( $path . "/" . $lines{"Filename:"} ) . "\n" if defined $lines{"MD5sum:"}; + print FILES_SHA1 $lines{"SHA1:"} . " " . remove_double_slashes( $path . "/" . $lines{"Filename:"} ) . "\n" if defined $lines{"SHA1:"}; + print FILES_SHA256 $lines{"SHA256:"} . " " . remove_double_slashes( $path . "/" . $lines{"Filename:"} ) . "\n" if defined $lines{"SHA256:"}; + if ( need_update( $mirror . "/" . $lines{"Filename:"}, $lines{"Size:"} ) ) + { + print FILES_NEW remove_double_slashes( $uri . "/" . $lines{"Filename:"} ) . "\n"; + add_url_to_download( $uri . "/" . $lines{"Filename:"}, $lines{"Size:"} ); + } + } + elsif ( exists $lines{"Files:"} ) + { # Sources index + foreach ( split( /\n/, $lines{"Files:"} ) ) + { + next if $_ eq ''; + my @file = split; + die("apt-mirror: invalid Sources format") if @file != 3; + $skipclean{ remove_double_slashes( $path . "/" . $lines{"Directory:"} . "/" . $file[2] ) } = 1; + print FILES_ALL remove_double_slashes( $path . "/" . $lines{"Directory:"} . "/" . $file[2] ) . "\n"; + print FILES_MD5 $file[0] . " " . remove_double_slashes( $path . "/" . $lines{"Directory:"} . "/" . $file[2] ) . "\n"; + if ( need_update( $mirror . "/" . $lines{"Directory:"} . "/" . $file[2], $file[1] ) ) + { + print FILES_NEW remove_double_slashes( $uri . "/" . $lines{"Directory:"} . "/" . $file[2] ) . "\n"; + add_url_to_download( $uri . "/" . $lines{"Directory:"} . "/" . $file[2], $file[1] ); + } + } + } + } + + close STREAM; +} + +print "Processing indexes: ["; + +foreach (@config_sources) +{ + my ( $uri, $distribution, @components ) = @{$_}; + print "S"; + if (@components) + { + my $component; + foreach $component (@components) + { + process_index( $uri, "/dists/$distribution/$component/source/Sources" ); + } + } + else + { + process_index( $uri, "/$distribution/Sources" ); + } +} + +foreach (@config_binaries) +{ + my ( $arch, $uri, $distribution, @components ) = @{$_}; + print "P"; + if (@components) + { + my $component; + foreach $component (@components) + { + process_index( $uri, "/dists/$distribution/$component/binary-$arch/Packages" ); + } + } + else + { + process_index( $uri, "/$distribution/Packages" ); + } +} + +clear_stat_cache(); + +print "]\n\n"; + +close FILES_ALL; +close FILES_NEW; +close FILES_MD5; +close FILES_SHA1; +close FILES_SHA256; + +###################################################################################### +## Main download + +chdir get_variable("mirror_path") or die("apt-mirror: can't chdir to mirror"); + +my $need_bytes = 0; +foreach ( values %urls_to_download ) +{ + $need_bytes += $_; +} + +my $size_output = format_bytes($need_bytes); + +print "$size_output will be downloaded into archive.\n"; + +download_urls( "archive", sort keys %urls_to_download ); + +###################################################################################### +## Copy skel to main archive + +sub copy_file +{ + my ( $from, $to ) = @_; + my $dir = dirname($to); + return unless -f $from; + make_path($dir) unless -d $dir; + if ( get_variable("unlink") == 1 ) + { + if ( compare( $from, $to ) != 0 ) { unlink($to); } + } + unless ( copy( $from, $to ) ) + { + warn("apt-mirror: can't copy $from to $to"); + return; + } + my ( $atime, $mtime ) = ( stat($from) )[ 8, 9 ]; + utime( $atime, $mtime, $to ) or die("apt-mirror: can't utime $to"); +} + +foreach (@index_urls) +{ + die("apt-mirror: invalid url in index_urls") unless s[^(\w+)://][]; + copy_file( get_variable("skel_path") . "/" . sanitise_uri("$_"), get_variable("mirror_path") . "/" . sanitise_uri("$_") ); + copy_file( get_variable("skel_path") . "/" . sanitise_uri("$_"), get_variable("mirror_path") . "/" . sanitise_uri("$_") ) if (s/\.gz$//); + copy_file( get_variable("skel_path") . "/" . sanitise_uri("$_"), get_variable("mirror_path") . "/" . sanitise_uri("$_") ) if (s/\.bz2$//); + copy_file( get_variable("skel_path") . "/" . sanitise_uri("$_"), get_variable("mirror_path") . "/" . sanitise_uri("$_") ) if (s/\.xz$//); +} + +###################################################################################### +## Make cleaning script + +my ( @rm_dirs, @rm_files ) = (); +my $unnecessary_bytes = 0; + +sub process_symlink +{ + return 1; # symlinks are always needed +} + +sub process_file +{ + my $file = shift; + $file =~ s[~][%7E]g if get_variable("_tilde"); + return 1 if $skipclean{$file}; + push @rm_files, sanitise_uri($file); + my ( undef, undef, undef, undef, undef, undef, undef, $size, undef, undef, undef, undef, $blocks ) = stat($file); + $unnecessary_bytes += $blocks * 512; + return 0; +} + +sub process_directory +{ + my $dir = shift; + my $is_needed = 0; + return 1 if $skipclean{$dir}; + opendir( my $dir_h, $dir ) or die "apt-mirror: can't opendir $dir: $!"; + foreach ( grep { !/^\.$/ && !/^\.\.$/ } readdir($dir_h) ) + { + my $item = $dir . "/" . $_; + $is_needed |= process_directory($item) if -d $item && !-l $item; + $is_needed |= process_file($item) if -f $item; + $is_needed |= process_symlink($item) if -l $item; + } + closedir $dir_h; + push @rm_dirs, $dir unless $is_needed; + return $is_needed; +} + +chdir get_variable("mirror_path") or die("apt-mirror: can't chdir to mirror"); + +foreach ( keys %clean_directory ) +{ + process_directory($_) if -d $_ && !-l $_; +} + +open CLEAN, ">" . get_variable("cleanscript") or die("apt-mirror: can't open clean script file"); + +my ( $i, $total ) = ( 0, scalar @rm_files ); + +if ( get_variable("_autoclean") ) +{ + + my $size_output = format_bytes($unnecessary_bytes); + print "$size_output in $total files and " . scalar(@rm_dirs) . " directories will be freed..."; + + chdir get_variable("mirror_path") or die("apt-mirror: can't chdir to mirror"); + + foreach (@rm_files) { unlink $_; } + foreach (@rm_dirs) { rmdir $_; } + +} +else +{ + + my $size_output = format_bytes($unnecessary_bytes); + print "$size_output in $total files and " . scalar(@rm_dirs) . " directories can be freed.\n"; + print "Run " . get_variable("cleanscript") . " for this purpose.\n\n"; + + print CLEAN "#!/bin/sh\n"; + print CLEAN "set -e\n\n"; + print CLEAN "cd " . quoted_path(get_variable("mirror_path")) . "\n\n"; + print CLEAN "echo 'Removing $total unnecessary files [$size_output]...'\n"; + foreach (@rm_files) + { + print CLEAN "rm -f '$_'\n"; + print CLEAN "echo -n '[" . int( 100 * $i / $total ) . "\%]'\n" unless $i % 500; + print CLEAN "echo -n .\n" unless $i % 10; + $i++; + } + print CLEAN "echo 'done.'\n"; + print CLEAN "echo\n\n"; + + $i = 0; + $total = scalar @rm_dirs; + print CLEAN "echo 'Removing $total unnecessary directories...'\n"; + foreach (@rm_dirs) + { + print CLEAN "if test -d '$_'; then rmdir '$_'; fi\n"; + print CLEAN "echo -n '[" . int( 100 * $i / $total ) . "\%]'\n" unless $i % 50; + print CLEAN "echo -n .\n"; + $i++; + } + print CLEAN "echo 'done.'\n"; + print CLEAN "echo\n"; + + close CLEAN; + +} + +# Make clean script executable +my $perm = ( stat get_variable("cleanscript") )[2] & 07777; +chmod( $perm | 0111, get_variable("cleanscript") ); + +if ( get_variable("run_postmirror") ) +{ + print "Running the Post Mirror script ...\n"; + print "(" . get_variable("postmirror_script") . ")\n\n"; + if ( -x get_variable("postmirror_script") ) + { + system( get_variable("postmirror_script"), '' ); + } + else + { + system( '/bin/sh', get_variable("postmirror_script") ); + } + print "\nPost Mirror script has completed. See above output for any possible errors.\n\n"; +} + +unlock_aptmirror(); diff --git a/deb.list.d/bookworm/gitlab.list b/deb.list.d/bookworm/gitlab.list new file mode 100644 index 0000000..414ab8e --- /dev/null +++ b/deb.list.d/bookworm/gitlab.list @@ -0,0 +1,3 @@ +#deb-amd64 https://packages.gitlab.com/gitlab/gitlab-ce/debian bookworm main +deb-amd64 https://packages.gitlab.com/runner/gitlab-runner/debian bookworm main +clean https://packages.gitlab.com diff --git a/deb.list.d/bookworm/incus.list b/deb.list.d/bookworm/incus.list new file mode 100644 index 0000000..2849631 --- /dev/null +++ b/deb.list.d/bookworm/incus.list @@ -0,0 +1,2 @@ +deb-amd64 https://pkgs.zabbly.com/incus/stable bookworm main +clean https://pkgs.zabbly.com diff --git a/deb.list.d/debian.list b/deb.list.d/debian.list new file mode 100644 index 0000000..c3e9be9 --- /dev/null +++ b/deb.list.d/debian.list @@ -0,0 +1,8 @@ +deb-amd64 https://deb.debian.org/debian bookworm main non-free-firmware contrib non-free +deb-amd64 https://deb.debian.org/debian bookworm-backports main non-free-firmware contrib non-free +deb-amd64 https://deb.debian.org/debian bookworm-updates main non-free-firmware contrib non-free +deb-amd64 https://deb.debian.org/debian-security bookworm-security main non-free-firmware contrib non-free + +deb-amd64 https://deb.debian.org/debian-security bullseye-security main + +clean https://deb.debian.org diff --git a/deb.list.d/docker.list b/deb.list.d/docker.list new file mode 100644 index 0000000..a933c8c --- /dev/null +++ b/deb.list.d/docker.list @@ -0,0 +1,2 @@ +deb-amd64 https://download.docker.com/linux/debian bookworm stable +clean https://download.docker.com diff --git a/deb.list.d/mirror.list b/deb.list.d/mirror.list new file mode 100644 index 0000000..8d5a95b --- /dev/null +++ b/deb.list.d/mirror.list @@ -0,0 +1,2 @@ +set postmirror_script "$var_path/clean.sh" +set nthreads 16 diff --git a/deb.sync.py b/deb.sync.py new file mode 100755 index 0000000..3fb1745 --- /dev/null +++ b/deb.sync.py @@ -0,0 +1,41 @@ +#! /usr/bin/python3 -B + +import os +import shutil +import subprocess + +LIST_DIRECTORY = "deb.list.d" +LIST_FILE = "deb.list" +SKELETON = "skel" +VARIABLE = "var" + + +if __name__ == "__main__": + # directories + root_directory = os.path.dirname(os.path.realpath(__file__)) + command_directory = os.path.join(root_directory, 'deb.apt-mirror') + # files + lines = ['set base_path "{}"'.format(command_directory) + os.linesep] + os.chdir(root_directory) + for directory, _, files in os.walk(LIST_DIRECTORY): + for file in files: + with open(os.path.join(directory, file)) as f: + lines.append(os.linesep) + lines.extend(f.readlines()) + # write + os.chdir(root_directory) + string = "".join(lines) + print(string, end="") + with open(LIST_FILE, "w") as file: + file.write(string) + # wipe + os.chdir(command_directory) + shutil.rmtree(SKELETON, ignore_errors=True) + shutil.rmtree(VARIABLE, ignore_errors=True) + # run + os.chdir(root_directory) + subprocess.call([os.path.join(root_directory, "deb.fork"), LIST_FILE]) + os.remove(LIST_FILE) + # wipe + os.chdir(command_directory) + shutil.rmtree(SKELETON, ignore_errors=True) diff --git a/msys2.fix.sh b/msys2.fix.sh new file mode 100755 index 0000000..f831128 --- /dev/null +++ b/msys2.fix.sh @@ -0,0 +1,13 @@ +#! /usr/bin/env bash +FILE="$(realpath "${BASH_SOURCE[0]}")" +cd "$(dirname "${FILE}")" + +ROOT='root/msys2' + +rm s/msys/*.tar +rm i/mingw/mingw64/*.tar + +rm -fr "${ROOT}/msys/x86_64" ; mv -i s/msys "${ROOT}/msys/x86_64" +rm -fr "${ROOT}/mingw" ; mv -i i/mingw "${ROOT}/" + +rmdir s i diff --git a/msys2.i.py b/msys2.i.py new file mode 100755 index 0000000..a80fa58 --- /dev/null +++ b/msys2.i.py @@ -0,0 +1,60 @@ +#! /usr/bin/python3 -B + +import os +import requests +import shutil +import subprocess +import sys +import tarfile + +ARCHITECTURE = 'x86_64' +REPOSITORY = 'https://repo.msys2.org' + + +def download(url, file): + print(file) + response = requests.get(f'{url}/{file}') + open(file, 'bw').write(response.content) + + +def download_subrepo(root, directory, prefix): + path = os.path.join(root, directory) + print() + print(path) + os.makedirs(path) + os.chdir(path) + url = f'{REPOSITORY}/{directory}' + for suffix in ['files', 'db']: + archive = f'{prefix}.{suffix}' + download(url, f'{archive}.sig') + download(url, f'{archive}') + subprocess.run(['unzstd', + f'{archive}', + '-o', f'{archive}.tar']) + archive = tarfile.open(f'{archive}.tar') + packages = [m for m in archive.getmembers() if m.isfile()] + names = [] + for package in packages: + desc = archive.extractfile(package) + desc.readline() + names.append(desc.readline().strip().decode('u8')) + archive.close() + for name in names: + archive = f'{name}' + signature = f'{archive}.sig' + download(url, f'{archive}.sig') + download(url, f'{archive}') + + +def main(): + _, directory, *_ = sys.argv + output_directory = os.path.realpath(directory) + print(output_directory) + if os.path.exists(output_directory): + shutil.rmtree(output_directory) +# download_subrepo(output_directory, 'msys', 'msys') + download_subrepo(output_directory, 'mingw/mingw64', 'mingw64') + + +if __name__ == '__main__': + main() diff --git a/msys2.s.py b/msys2.s.py new file mode 100755 index 0000000..af819b4 --- /dev/null +++ b/msys2.s.py @@ -0,0 +1,60 @@ +#! /usr/bin/python3 -B + +import os +import requests +import shutil +import subprocess +import sys +import tarfile + +ARCHITECTURE = 'x86_64' +REPOSITORY = 'https://repo.msys2.org' + + +def download(url, file): + print(file) + response = requests.get(f'{url}/{file}') + open(file, 'bw').write(response.content) + + +def download_subrepo(root, directory, prefix): + path = os.path.join(root, directory) + print() + print(path) + os.makedirs(path) + os.chdir(path) + url = f'{REPOSITORY}/{directory}/{ARCHITECTURE}' + for suffix in ['files', 'db']: + archive = f'{prefix}.{suffix}' + download(url, f'{archive}.sig') + download(url, f'{archive}') + subprocess.run(['unzstd', + f'{archive}', + '-o', f'{archive}.tar']) + archive = tarfile.open(f'{archive}.tar') + packages = [m for m in archive.getmembers() if m.isfile()] + names = [] + for package in packages: + desc = archive.extractfile(package) + desc.readline() + names.append(desc.readline().strip().decode('u8')) + archive.close() + for name in names: + archive = f'{name}' + signature = f'{archive}.sig' + download(url, f'{archive}.sig') + download(url, f'{archive}') + + +def main(): + _, directory, *_ = sys.argv + output_directory = os.path.realpath(directory) + print(output_directory) + if os.path.exists(output_directory): + shutil.rmtree(output_directory) + download_subrepo(output_directory, 'msys', 'msys') +# download_subrepo(output_directory, 'mingw', 'mingw64') + + +if __name__ == '__main__': + main() diff --git a/rpm.alma.py b/rpm.alma.py new file mode 100755 index 0000000..e66fc41 --- /dev/null +++ b/rpm.alma.py @@ -0,0 +1,95 @@ +#! /usr/bin/env python3 + +import os +import subprocess + +ROOT = 'rsync://rsync.repo.almalinux.org/almalinux' +ARCH = 'x86_64' +VERSIONS = [ + '8', + '9', +] + +KEY = 'RPM-GPG-KEY-AlmaLinux' +TARGETS = { + '8': [ + 'AppStream', + 'BaseOS', + # 'HighAvailability', + # 'NFV', + # 'PowerTools', + # 'RT', + # 'ResilientStorage', + # 'SAP', + # 'SAPHANA', + 'cloud', + # 'devel', + 'extras', + # 'isos', + # 'live', + 'metadata', + # 'plus', + # 'raspberrypi', + # 'synergy', + ], + '9': [ + 'AppStream', + 'BaseOS', + # 'CRB', + # 'HighAvailability', + # 'NFV', + # 'RT', + # 'ResilientStorage', + # 'SAP', + # 'SAPHANA', + 'cloud', + # 'devel', + 'extras', + # 'isos', + # 'live', + 'metadata', + # 'plus', + # 'raspberrypi', + # 'synergy', + ], +} + + +def sync(source, target): + args = ['rsync', + '--archive', + # '--checksum', + '--delete-before', + # '--dry-run', + '--inplace', + '--no-motd', + '--partial', + '--progress', + '--verbose', + source, + target, + ] + print() + print() + print('←', source) + print('→', target) + subprocess.call(args) + + +def main(): + file = os.path.realpath(__file__) + root = os.path.dirname(file) + root = os.path.join(root, 'root', 'rpm', 'alma') + sources = [KEY] + for version in VERSIONS: + sources.append(f'{KEY}-{version}') + for target in TARGETS[version]: + sources.append(os.path.join(version, target, ARCH) + os.sep) + for source in sources: + target = os.path.join(root, source) + os.makedirs(os.path.dirname(target), exist_ok=True) + sync(os.path.join(ROOT, source), target) + + +if __name__ == '__main__': + main() diff --git a/rpm.epel.py b/rpm.epel.py new file mode 100755 index 0000000..e17b5f3 --- /dev/null +++ b/rpm.epel.py @@ -0,0 +1,62 @@ +#! /usr/bin/env python3 + +import os +import subprocess + +ROOT = 'rsync://fr2.rpmfind.net/linux/epel' +ARCH = 'x86_64' +VERSIONS = [ + '8', + '9', +] + +KEY = 'RPM-GPG-KEY-EPEL' +TARGETS = { + '8': [ + 'Everything', + # 'Modular', + ], + '9': [ + 'Everything', + ], +} + + +def sync(source, target): + args = ['rsync', + '--archive', + # '--checksum', + '--delete-before', + # '--dry-run', + '--inplace', + '--no-motd', + '--partial', + '--progress', + '--verbose', + source, + target, + ] + print() + print() + print('←', source) + print('→', target) + subprocess.call(args) + + +def main(): + file = os.path.realpath(__file__) + root = os.path.dirname(file) + root = os.path.join(root, 'root', 'rpm', 'epel') + sources = [KEY] + for version in VERSIONS: + sources.append(f'{KEY}-{version}') + for target in TARGETS[version]: + sources.append(os.path.join(version, target, ARCH) + os.sep) + for source in sources: + target = os.path.join(root, source) + os.makedirs(os.path.dirname(target), exist_ok=True) + sync(os.path.join(ROOT, source), target) + + +if __name__ == '__main__': + main()