mirror of https://git.wownero.com/dsc/mxe.git
remove gitlab mirror
suffixed tarballs are available in three places, no need for extra redundancy closes #1681
This commit is contained in:
parent
c1afdc77ca
commit
32edbb48cc
3
Makefile
3
Makefile
|
@ -21,9 +21,8 @@ SOURCEFORGE_MIRROR := downloads.sourceforge.net
|
||||||
MXE_MIRROR := https://mirror.mxe.cc/pkg
|
MXE_MIRROR := https://mirror.mxe.cc/pkg
|
||||||
PKG_MIRROR := https://s3.amazonaws.com/mxe-pkg
|
PKG_MIRROR := https://s3.amazonaws.com/mxe-pkg
|
||||||
PKG_CDN := http://d1yihgixbnrglp.cloudfront.net
|
PKG_CDN := http://d1yihgixbnrglp.cloudfront.net
|
||||||
GITLAB_BACKUP := https://gitlab.com/starius/mxe-backup2/raw/master
|
|
||||||
# reorder as required, ensuring final one is a http fallback
|
# reorder as required, ensuring final one is a http fallback
|
||||||
MIRROR_SITES := GITLAB_BACKUP MXE_MIRROR PKG_MIRROR PKG_CDN
|
MIRROR_SITES := MXE_MIRROR PKG_MIRROR PKG_CDN
|
||||||
|
|
||||||
PWD := $(shell pwd)
|
PWD := $(shell pwd)
|
||||||
SHELL := bash
|
SHELL := bash
|
||||||
|
|
|
@ -1,74 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
""" Download MXE packages from https://s3.amazonaws.com/mxe-pkg/
|
|
||||||
"""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import hashlib
|
|
||||||
import os
|
|
||||||
import urllib
|
|
||||||
try:
|
|
||||||
import urllib2
|
|
||||||
except:
|
|
||||||
# Python 3
|
|
||||||
import urllib.request as urllib2
|
|
||||||
import xml.etree.ElementTree
|
|
||||||
|
|
||||||
def get_files():
|
|
||||||
x = xml.etree.ElementTree.fromstring(
|
|
||||||
urllib2.urlopen("https://s3.amazonaws.com/mxe-pkg/").read()
|
|
||||||
)
|
|
||||||
for e in x:
|
|
||||||
if not e.tag.endswith('Contents'):
|
|
||||||
continue
|
|
||||||
filename = None
|
|
||||||
md5 = None
|
|
||||||
for child in e.getchildren():
|
|
||||||
if child.tag.endswith('Key'):
|
|
||||||
filename = child.text
|
|
||||||
if child.tag.endswith('ETag'):
|
|
||||||
md5 = child.text.replace('"', '')
|
|
||||||
if '-' in md5:
|
|
||||||
md5 = None
|
|
||||||
yield {
|
|
||||||
'filename': filename,
|
|
||||||
'md5': md5,
|
|
||||||
}
|
|
||||||
|
|
||||||
def download_files(backup_dir, files):
|
|
||||||
for f in files:
|
|
||||||
url = "https://s3.amazonaws.com/mxe-pkg/%s" % (
|
|
||||||
urllib.quote(f['filename'])
|
|
||||||
)
|
|
||||||
data = urllib2.urlopen(url).read()
|
|
||||||
if f['md5']:
|
|
||||||
md5 = hashlib.md5(data).hexdigest()
|
|
||||||
if md5 != f['md5']:
|
|
||||||
raise Exception("md5 mismatch for " + f['filename'])
|
|
||||||
sha256 = hashlib.sha256(data).hexdigest()
|
|
||||||
name = f['filename'] + '_' + sha256
|
|
||||||
full_name = os.path.join(backup_dir, name)
|
|
||||||
if os.path.exists(full_name):
|
|
||||||
print("File %s is already backuped" % name)
|
|
||||||
continue
|
|
||||||
print("Backup file %s" % name)
|
|
||||||
with open(full_name, 'w') as f:
|
|
||||||
f.write(data)
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description=__doc__,
|
|
||||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'--backup-dir',
|
|
||||||
type=str,
|
|
||||||
help='Path to backup',
|
|
||||||
required=True,
|
|
||||||
)
|
|
||||||
args = parser.parse_args()
|
|
||||||
files = get_files()
|
|
||||||
download_files(args.backup_dir, files)
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
|
@ -1,51 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
""" Update backup of MXE packages.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import hashlib
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
|
|
||||||
def make_checksum(filepath):
|
|
||||||
hasher = hashlib.sha256()
|
|
||||||
with open(filepath, 'rb') as f:
|
|
||||||
for chunk in iter(lambda: f.read(1024 ** 2), b''):
|
|
||||||
hasher.update(chunk)
|
|
||||||
return hasher.hexdigest()
|
|
||||||
|
|
||||||
def update_backup(mxe_pkg_dir, backup_dir):
|
|
||||||
for f in os.listdir(mxe_pkg_dir):
|
|
||||||
sha = make_checksum(os.path.join(mxe_pkg_dir, f))
|
|
||||||
new_name = '%s_%s' % (f, sha)
|
|
||||||
if os.path.exists(os.path.join(backup_dir, new_name)):
|
|
||||||
print("File %s is already backuped" % new_name)
|
|
||||||
continue
|
|
||||||
shutil.copy(
|
|
||||||
os.path.join(mxe_pkg_dir, f),
|
|
||||||
os.path.join(backup_dir, new_name),
|
|
||||||
)
|
|
||||||
print("Backup file %s" % new_name)
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description=__doc__,
|
|
||||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'--backup-dir',
|
|
||||||
type=str,
|
|
||||||
help='Path to backup',
|
|
||||||
required=True,
|
|
||||||
)
|
|
||||||
args = parser.parse_args()
|
|
||||||
mxe_tools_dir = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
mxe_pkg_dir = os.path.join(mxe_tools_dir, '..', 'pkg')
|
|
||||||
update_backup(
|
|
||||||
mxe_pkg_dir,
|
|
||||||
args.backup_dir,
|
|
||||||
)
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
Loading…
Reference in New Issue