From 0f19c2e5b660da2c211b4f70faa46004fb23474e Mon Sep 17 00:00:00 2001 From: Kazuki Suzuki Przyborowski Date: Fri, 11 Oct 2024 12:26:02 -0500 Subject: [PATCH] Revert changes --- pkgbuild/archlinux/python2/basename.py | 20 +- pkgbuild/archlinux/python2/dirname.py | 20 +- pkgbuild/archlinux/python2/pypac-gen.py | 208 +- pkgbuild/archlinux/python2/realpath.py | 20 +- pkgbuild/archlinux/python2/which.py | 20 +- pkgbuild/archlinux/python3/basename.py | 20 +- pkgbuild/archlinux/python3/dirname.py | 20 +- pkgbuild/archlinux/python3/pypac-gen.py | 208 +- pkgbuild/archlinux/python3/realpath.py | 20 +- pkgbuild/archlinux/python3/which.py | 20 +- pkgbuild/debian/python2/basename.py | 20 +- pkgbuild/debian/python2/dirname.py | 20 +- pkgbuild/debian/python2/pydeb-gen.py | 321 +- pkgbuild/debian/python2/realpath.py | 20 +- pkgbuild/debian/python2/which.py | 20 +- pkgbuild/debian/python3/basename.py | 20 +- pkgbuild/debian/python3/dirname.py | 20 +- pkgbuild/debian/python3/pydeb-gen.py | 321 +- pkgbuild/debian/python3/realpath.py | 20 +- pkgbuild/debian/python3/which.py | 20 +- pkgbuild/linuxmint/python2/basename.py | 20 +- pkgbuild/linuxmint/python2/dirname.py | 20 +- pkgbuild/linuxmint/python2/realpath.py | 20 +- pkgbuild/linuxmint/python2/which.py | 20 +- pkgbuild/linuxmint/python3/basename.py | 20 +- pkgbuild/linuxmint/python3/dirname.py | 20 +- pkgbuild/linuxmint/python3/realpath.py | 20 +- pkgbuild/linuxmint/python3/which.py | 20 +- pkgbuild/ubuntu/python2/basename.py | 20 +- pkgbuild/ubuntu/python2/dirname.py | 20 +- pkgbuild/ubuntu/python2/pydeb-gen.py | 585 +- pkgbuild/ubuntu/python2/realpath.py | 20 +- pkgbuild/ubuntu/python2/which.py | 20 +- pkgbuild/ubuntu/python3/basename.py | 20 +- pkgbuild/ubuntu/python3/dirname.py | 20 +- pkgbuild/ubuntu/python3/pydeb-gen.py | 585 +- pkgbuild/ubuntu/python3/realpath.py | 20 +- pkgbuild/ubuntu/python3/which.py | 20 +- pyhttpserv.py | 78 +- pypkg-gen.py | 114 +- pyverinfo.py | 17 +- pywwwget-dl.py | 110 +- pywwwget.py | 3633 ++------ pywwwgetold-dl.py | 110 +- pywwwgetold.py | 10511 +++++----------------- setup.py | 72 +- 46 files changed, 3975 insertions(+), 13538 deletions(-) diff --git a/pkgbuild/archlinux/python2/basename.py b/pkgbuild/archlinux/python2/basename.py index cb03c1e..d0538a9 100755 --- a/pkgbuild/archlinux/python2/basename.py +++ b/pkgbuild/archlinux/python2/basename.py @@ -15,24 +15,22 @@ $FileInfo: basename.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "basename" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/archlinux/python2/dirname.py b/pkgbuild/archlinux/python2/dirname.py index 1225a5a..46d4997 100755 --- a/pkgbuild/archlinux/python2/dirname.py +++ b/pkgbuild/archlinux/python2/dirname.py @@ -15,24 +15,22 @@ $FileInfo: dirname.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "dirname" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/archlinux/python2/pypac-gen.py b/pkgbuild/archlinux/python2/pypac-gen.py index 5a0bbd1..6104581 100644 --- a/pkgbuild/archlinux/python2/pypac-gen.py +++ b/pkgbuild/archlinux/python2/pypac-gen.py @@ -15,30 +15,28 @@ $FileInfo: pypac-gen.py - Last Update: 6/1/2016 Ver. 0.2.0 RC 1 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse -import datetime -import hashlib -import json -import os +from __future__ import absolute_import, division, print_function, unicode_literals import re -import subprocess +import os import sys import time +import datetime +import argparse +import hashlib +import subprocess +import json __version_info__ = (0, 2, 0, "rc1") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "pypac-gen" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover def which_exec(execfile): @@ -63,20 +61,17 @@ def which_exec(execfile): action="store_true", help="get pkg source") getargs = parser.parse_args() getargs.source = os.path.realpath(getargs.source) -pkgsetuppy = os.path.realpath(getargs.source + os.path.sep + "setup.py") +pkgsetuppy = os.path.realpath(getargs.source+os.path.sep+"setup.py") pyexecpath = os.path.realpath(sys.executable) -if (not os.path.exists(getargs.source) or not os.path.isdir(getargs.source)): +if(not os.path.exists(getargs.source) or not os.path.isdir(getargs.source)): raise Exception("Could not find directory.") -if (not os.path.exists(pkgsetuppy) or not os.path.isfile(pkgsetuppy)): +if(not os.path.exists(pkgsetuppy) or not os.path.isfile(pkgsetuppy)): raise Exception("Could not find setup.py in directory.") -pypkgenlistp = subprocess.Popen([pyexecpath, - pkgsetuppy, - "getversioninfo"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) +pypkgenlistp = subprocess.Popen( + [pyexecpath, pkgsetuppy, "getversioninfo"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) pypkgenout, pypkgenerr = pypkgenlistp.communicate() -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pypkgenout = pypkgenout.decode('utf-8') pymodule = json.loads(pypkgenout) setuppy_verinfo = pymodule['versionlist'] @@ -92,79 +87,78 @@ def which_exec(execfile): setuppy_longdescription = pymodule['longdescription'] setuppy_platforms = pymodule['platforms'] -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgsource = "py2www-get" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgsource = "py3www-get" pkgupstreamname = "PyWWW-Get" -pkgveralt = str(setuppy_verinfo[0]) + "." + \ - str(setuppy_verinfo[1]) + "." + str(setuppy_verinfo[2]) +pkgveralt = str(setuppy_verinfo[0])+"." + \ + str(setuppy_verinfo[1])+"."+str(setuppy_verinfo[2]) pkgveraltrel = str(setuppy_verinfo[4]) -pkgver = str(pkgveralt) + "-rc" + str(setuppy_verinfo[4]) +pkgver = str(pkgveralt)+"-rc"+str(setuppy_verinfo[4]) pkgurgency = "urgency=low" pkgauthorname = setuppy_author pkgauthoremail = setuppy_authoremail pkgauthoremailalt = setuppy_authoremail.replace( "@", "[at]").replace(".", "[dot]") -pkgauthor = pkgauthorname + " <" + pkgauthoremail + ">" -pkgauthoralt = pkgauthorname + " <" + pkgauthoremailalt + ">" +pkgauthor = pkgauthorname+" <"+pkgauthoremail+">" +pkgauthoralt = pkgauthorname+" <"+pkgauthoremailalt+">" pkgmaintainername = setuppy_maintainer pkgmaintaineremail = setuppy_maintaineremail pkgmaintaineremailalt = setuppy_maintaineremail.replace( "@", "[at]").replace(".", "[dot]") -pkgmaintainer = pkgmaintainername + " <" + pkgmaintaineremail + ">" -pkgmaintaineralt = pkgmaintainername + " <" + pkgmaintaineremailalt + ">" +pkgmaintainer = pkgmaintainername+" <"+pkgmaintaineremail+">" +pkgmaintaineralt = pkgmaintainername+" <"+pkgmaintaineremailalt+">" pkggiturl = "https://github.com/GameMaker2k/PyWWW-Get.git" pkghomepage = setuppy_url pkgsection = "python" pkgpriority = "optional" -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgbuilddepends = "'python2'" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgbuilddepends = "'python'" pkgstandardsversion = "3.9.8" -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgpackage = "python2-pywww-get" pkgoldname = "python2-www-get" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgpackage = "python-pywww-get" pkgoldname = "python-www-get" pkgarchitecture = "'any' 'i686' 'x86_64'" -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgdepends = "'python2-setuptools'" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgdepends = "'python-setuptools'" -pkgdescription = setuppy_description + "\n " + setuppy_longdescription +pkgdescription = setuppy_description+"\n "+setuppy_longdescription pkgtzstr = time.strftime("%a, %d %b %Y %H:%M:%S %z") -if (getargs.getsource): +if(getargs.getsource == True): print(getargs.source) sys.exit() -if (getargs.getparent): +if(getargs.getparent == True): print(os.path.realpath(os.path.dirname(getargs.source))) sys.exit() -if (getargs.getdirname): - print(pkgsource + "_" + pkgveralt + ".orig") +if(getargs.getdirname == True): + print(pkgsource+"_"+pkgveralt+".orig") sys.exit() -if (getargs.gettarname): - print(pkgsource + "_" + pkgveralt + ".orig.tar.gz") +if(getargs.gettarname == True): + print(pkgsource+"_"+pkgveralt+".orig.tar.gz") sys.exit() -if (getargs.getpkgsource): +if(getargs.getpkgsource == True): print(pkgsource) sys.exit() print("generating arch linux package build directory") -pacpkg_pkgbuild_dir = os.path.realpath( - getargs.source + os.path.sep + pkgsource) -print("creating directory " + pacpkg_pkgbuild_dir) -if (not os.path.exists(pacpkg_pkgbuild_dir)): +pacpkg_pkgbuild_dir = os.path.realpath(getargs.source+os.path.sep+pkgsource) +print("creating directory "+pacpkg_pkgbuild_dir) +if(not os.path.exists(pacpkg_pkgbuild_dir)): os.makedirs(pacpkg_pkgbuild_dir) os.chmod(pacpkg_pkgbuild_dir, int("0755", 8)) gzparentdir = os.path.realpath(os.path.dirname(getargs.source)) -filetargz = open(os.path.realpath(gzparentdir + os.path.sep + - pkgsource + "_" + pkgveralt + ".orig.tar.gz"), "rb") +filetargz = open(os.path.realpath(gzparentdir+os.path.sep + + pkgsource+"_"+pkgveralt+".orig.tar.gz"), "rb") filetargzmd5 = hashlib.md5(filetargz.read()).hexdigest() filetargz.seek(0) filetargzsha1 = hashlib.sha1(filetargz.read()).hexdigest() @@ -179,85 +173,79 @@ def which_exec(execfile): filetargz.close() pacpkg_pkgbuild_file = os.path.realpath( - pacpkg_pkgbuild_dir + os.path.sep + "PKGBUILD") -print("generating file " + pacpkg_pkgbuild_file) -if (sys.version[0] == "2"): - pacpkg_string_temp = "# Maintainer: " + pkgmaintaineralt + "\n" - pacpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - pacpkg_string_temp += "# " + pkgtzstr + "\n\n" - pacpkg_string_temp += "pkgname=" + pkgpackage + "\n" - pacpkg_string_temp += "pkgver=" + pkgveralt + "\n" - pacpkg_string_temp += "pkgrel=" + pkgveraltrel + "\n" - pacpkg_string_temp += "pkgdesc='" + setuppy_description + "'\n" - pacpkg_string_temp += "url='" + setuppy_url + "'\n" - pacpkg_string_temp += "arch=(" + pkgarchitecture + ")\n" - pacpkg_string_temp += "license=('" + setuppy_license + "')\n" + pacpkg_pkgbuild_dir+os.path.sep+"PKGBUILD") +print("generating file "+pacpkg_pkgbuild_file) +if(sys.version[0] == "2"): + pacpkg_string_temp = "# Maintainer: "+pkgmaintaineralt+"\n" + pacpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + pacpkg_string_temp += "# "+pkgtzstr+"\n\n" + pacpkg_string_temp += "pkgname="+pkgpackage+"\n" + pacpkg_string_temp += "pkgver="+pkgveralt+"\n" + pacpkg_string_temp += "pkgrel="+pkgveraltrel+"\n" + pacpkg_string_temp += "pkgdesc='"+setuppy_description+"'\n" + pacpkg_string_temp += "url='"+setuppy_url+"'\n" + pacpkg_string_temp += "arch=("+pkgarchitecture+")\n" + pacpkg_string_temp += "license=('"+setuppy_license+"')\n" pacpkg_string_temp += "groups=()\n" - pacpkg_string_temp += "depends=(" + pkgbuilddepends + ")\n" + pacpkg_string_temp += "depends=("+pkgbuilddepends+")\n" pacpkg_string_temp += "optdepends=()\n" - pacpkg_string_temp += "makedepends=(" + pkgdepends + ")\n" + pacpkg_string_temp += "makedepends=("+pkgdepends+")\n" pacpkg_string_temp += "conflicts=()\n" - pacpkg_string_temp += "replaces=('" + pkgoldname + "')\n" + pacpkg_string_temp += "replaces=('"+pkgoldname+"')\n" pacpkg_string_temp += "backup=()\n" pacpkg_string_temp += "options=(!strip !emptydirs)\n" pacpkg_string_temp += "install=''\n" - pacpkg_string_temp += "source=('." + os.path.sep + \ - pkgsource + "_" + pkgveralt + ".orig.tar.gz')\n" - pacpkg_string_temp += "md5sums=('" + filetargzmd5 + "')\n" - pacpkg_string_temp += "sha1sums=('" + filetargzsha1 + "')\n" - pacpkg_string_temp += "sha224sums=('" + filetargzsha224 + "')\n" - pacpkg_string_temp += "sha256sums=('" + filetargzsha256 + "')\n" - pacpkg_string_temp += "sha384sums=('" + filetargzsha384 + "')\n" - pacpkg_string_temp += "sha512sums=('" + filetargzsha512 + "')\n\n" + pacpkg_string_temp += "source=('."+os.path.sep + \ + pkgsource+"_"+pkgveralt+".orig.tar.gz')\n" + pacpkg_string_temp += "md5sums=('"+filetargzmd5+"')\n" + pacpkg_string_temp += "sha1sums=('"+filetargzsha1+"')\n" + pacpkg_string_temp += "sha224sums=('"+filetargzsha224+"')\n" + pacpkg_string_temp += "sha256sums=('"+filetargzsha256+"')\n" + pacpkg_string_temp += "sha384sums=('"+filetargzsha384+"')\n" + pacpkg_string_temp += "sha512sums=('"+filetargzsha512+"')\n\n" pacpkg_string_temp += "build() {\n" - pacpkg_string_temp += " cd \"${srcdir}/" + \ - pkgsource + "_${pkgver}.orig\"\n" + pacpkg_string_temp += " cd \"${srcdir}/"+pkgsource+"_${pkgver}.orig\"\n" pacpkg_string_temp += " python2 ./setup.py build\n" pacpkg_string_temp += "}\n\n" pacpkg_string_temp += "package() {\n" - pacpkg_string_temp += " cd \"${srcdir}/" + \ - pkgsource + "_${pkgver}.orig\"\n" + pacpkg_string_temp += " cd \"${srcdir}/"+pkgsource+"_${pkgver}.orig\"\n" pacpkg_string_temp += " python2 ./setup.py install --root=\"${pkgdir}\" --optimize=1\n" pacpkg_string_temp += "}\n\n" pacpkg_string_temp += "# vim:set ts=2 sw=2 et:\n" -if (sys.version[0] == "3"): - pacpkg_string_temp = "# Maintainer: " + pkgmaintaineralt + "\n" - pacpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - pacpkg_string_temp += "# " + pkgtzstr + "\n\n" - pacpkg_string_temp += "pkgname=" + pkgpackage + "\n" - pacpkg_string_temp += "pkgver=" + pkgveralt + "\n" - pacpkg_string_temp += "pkgrel=" + pkgveraltrel + "\n" - pacpkg_string_temp += "pkgdesc='" + setuppy_description + "'\n" - pacpkg_string_temp += "url='" + setuppy_url + "'\n" - pacpkg_string_temp += "arch=(" + pkgarchitecture + ")\n" - pacpkg_string_temp += "license=('" + setuppy_license + "')\n" +if(sys.version[0] == "3"): + pacpkg_string_temp = "# Maintainer: "+pkgmaintaineralt+"\n" + pacpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + pacpkg_string_temp += "# "+pkgtzstr+"\n\n" + pacpkg_string_temp += "pkgname="+pkgpackage+"\n" + pacpkg_string_temp += "pkgver="+pkgveralt+"\n" + pacpkg_string_temp += "pkgrel="+pkgveraltrel+"\n" + pacpkg_string_temp += "pkgdesc='"+setuppy_description+"'\n" + pacpkg_string_temp += "url='"+setuppy_url+"'\n" + pacpkg_string_temp += "arch=("+pkgarchitecture+")\n" + pacpkg_string_temp += "license=('"+setuppy_license+"')\n" pacpkg_string_temp += "groups=()\n" - pacpkg_string_temp += "depends=(" + pkgbuilddepends + ")\n" + pacpkg_string_temp += "depends=("+pkgbuilddepends+")\n" pacpkg_string_temp += "optdepends=()\n" - pacpkg_string_temp += "makedepends=(" + pkgdepends + ")\n" + pacpkg_string_temp += "makedepends=("+pkgdepends+")\n" pacpkg_string_temp += "conflicts=()\n" - pacpkg_string_temp += "replaces=('" + pkgoldname + "')\n" + pacpkg_string_temp += "replaces=('"+pkgoldname+"')\n" pacpkg_string_temp += "backup=()\n" pacpkg_string_temp += "options=(!strip !emptydirs)\n" pacpkg_string_temp += "install=''\n" - pacpkg_string_temp += "source=('." + os.path.sep + \ - pkgsource + "_" + pkgveralt + ".orig.tar.gz')\n" - pacpkg_string_temp += "md5sums=('" + filetargzmd5 + "')\n" - pacpkg_string_temp += "sha1sums=('" + filetargzsha1 + "')\n" - pacpkg_string_temp += "sha224sums=('" + filetargzsha224 + "')\n" - pacpkg_string_temp += "sha256sums=('" + filetargzsha256 + "')\n" - pacpkg_string_temp += "sha384sums=('" + filetargzsha384 + "')\n" - pacpkg_string_temp += "sha512sums=('" + filetargzsha512 + "')\n\n" + pacpkg_string_temp += "source=('."+os.path.sep + \ + pkgsource+"_"+pkgveralt+".orig.tar.gz')\n" + pacpkg_string_temp += "md5sums=('"+filetargzmd5+"')\n" + pacpkg_string_temp += "sha1sums=('"+filetargzsha1+"')\n" + pacpkg_string_temp += "sha224sums=('"+filetargzsha224+"')\n" + pacpkg_string_temp += "sha256sums=('"+filetargzsha256+"')\n" + pacpkg_string_temp += "sha384sums=('"+filetargzsha384+"')\n" + pacpkg_string_temp += "sha512sums=('"+filetargzsha512+"')\n\n" pacpkg_string_temp += "build() {\n" - pacpkg_string_temp += " cd \"${srcdir}/" + \ - pkgsource + "_${pkgver}.orig\"\n" + pacpkg_string_temp += " cd \"${srcdir}/"+pkgsource+"_${pkgver}.orig\"\n" pacpkg_string_temp += " python3 ./setup.py build\n" pacpkg_string_temp += "}\n\n" pacpkg_string_temp += "package() {\n" - pacpkg_string_temp += " cd \"${srcdir}/" + \ - pkgsource + "_${pkgver}.orig\"\n" + pacpkg_string_temp += " cd \"${srcdir}/"+pkgsource+"_${pkgver}.orig\"\n" pacpkg_string_temp += " python3 ./setup.py install --root=\"${pkgdir}\" --optimize=1\n" pacpkg_string_temp += "}\n\n" pacpkg_string_temp += "# vim:set ts=2 sw=2 et:\n" diff --git a/pkgbuild/archlinux/python2/realpath.py b/pkgbuild/archlinux/python2/realpath.py index 5eaaa7a..b2e441f 100755 --- a/pkgbuild/archlinux/python2/realpath.py +++ b/pkgbuild/archlinux/python2/realpath.py @@ -15,24 +15,22 @@ $FileInfo: realpath.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "realpath" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/archlinux/python2/which.py b/pkgbuild/archlinux/python2/which.py index 36ff5fb..e5cf271 100755 --- a/pkgbuild/archlinux/python2/which.py +++ b/pkgbuild/archlinux/python2/which.py @@ -15,24 +15,22 @@ $FileInfo: which.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "which" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover def which_exec(execfile): diff --git a/pkgbuild/archlinux/python3/basename.py b/pkgbuild/archlinux/python3/basename.py index 8b41db3..346121c 100755 --- a/pkgbuild/archlinux/python3/basename.py +++ b/pkgbuild/archlinux/python3/basename.py @@ -15,24 +15,22 @@ $FileInfo: basename.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "basename" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/archlinux/python3/dirname.py b/pkgbuild/archlinux/python3/dirname.py index 5aea1fd..080502f 100755 --- a/pkgbuild/archlinux/python3/dirname.py +++ b/pkgbuild/archlinux/python3/dirname.py @@ -15,24 +15,22 @@ $FileInfo: dirname.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "dirname" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/archlinux/python3/pypac-gen.py b/pkgbuild/archlinux/python3/pypac-gen.py index 974a1ca..44ef6a0 100644 --- a/pkgbuild/archlinux/python3/pypac-gen.py +++ b/pkgbuild/archlinux/python3/pypac-gen.py @@ -15,30 +15,28 @@ $FileInfo: pypac-gen.py - Last Update: 6/1/2016 Ver. 0.2.0 RC 1 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse -import datetime -import hashlib -import json -import os +from __future__ import absolute_import, division, print_function, unicode_literals import re -import subprocess +import os import sys import time +import datetime +import argparse +import hashlib +import subprocess +import json __version_info__ = (0, 2, 0, "rc1") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "pypac-gen" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover def which_exec(execfile): @@ -63,20 +61,17 @@ def which_exec(execfile): action="store_true", help="get pkg source") getargs = parser.parse_args() getargs.source = os.path.realpath(getargs.source) -pkgsetuppy = os.path.realpath(getargs.source + os.path.sep + "setup.py") +pkgsetuppy = os.path.realpath(getargs.source+os.path.sep+"setup.py") pyexecpath = os.path.realpath(sys.executable) -if (not os.path.exists(getargs.source) or not os.path.isdir(getargs.source)): +if(not os.path.exists(getargs.source) or not os.path.isdir(getargs.source)): raise Exception("Could not find directory.") -if (not os.path.exists(pkgsetuppy) or not os.path.isfile(pkgsetuppy)): +if(not os.path.exists(pkgsetuppy) or not os.path.isfile(pkgsetuppy)): raise Exception("Could not find setup.py in directory.") -pypkgenlistp = subprocess.Popen([pyexecpath, - pkgsetuppy, - "getversioninfo"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) +pypkgenlistp = subprocess.Popen( + [pyexecpath, pkgsetuppy, "getversioninfo"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) pypkgenout, pypkgenerr = pypkgenlistp.communicate() -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pypkgenout = pypkgenout.decode('utf-8') pymodule = json.loads(pypkgenout) setuppy_verinfo = pymodule['versionlist'] @@ -92,79 +87,78 @@ def which_exec(execfile): setuppy_longdescription = pymodule['longdescription'] setuppy_platforms = pymodule['platforms'] -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgsource = "py2www-get" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgsource = "py3www-get" pkgupstreamname = "PyWWW-Get" -pkgveralt = str(setuppy_verinfo[0]) + "." + \ - str(setuppy_verinfo[1]) + "." + str(setuppy_verinfo[2]) +pkgveralt = str(setuppy_verinfo[0])+"." + \ + str(setuppy_verinfo[1])+"."+str(setuppy_verinfo[2]) pkgveraltrel = str(setuppy_verinfo[4]) -pkgver = str(pkgveralt) + "-rc" + str(setuppy_verinfo[4]) +pkgver = str(pkgveralt)+"-rc"+str(setuppy_verinfo[4]) pkgurgency = "urgency=low" pkgauthorname = setuppy_author pkgauthoremail = setuppy_authoremail pkgauthoremailalt = setuppy_authoremail.replace( "@", "[at]").replace(".", "[dot]") -pkgauthor = pkgauthorname + " <" + pkgauthoremail + ">" -pkgauthoralt = pkgauthorname + " <" + pkgauthoremailalt + ">" +pkgauthor = pkgauthorname+" <"+pkgauthoremail+">" +pkgauthoralt = pkgauthorname+" <"+pkgauthoremailalt+">" pkgmaintainername = setuppy_maintainer pkgmaintaineremail = setuppy_maintaineremail pkgmaintaineremailalt = setuppy_maintaineremail.replace( "@", "[at]").replace(".", "[dot]") -pkgmaintainer = pkgmaintainername + " <" + pkgmaintaineremail + ">" -pkgmaintaineralt = pkgmaintainername + " <" + pkgmaintaineremailalt + ">" +pkgmaintainer = pkgmaintainername+" <"+pkgmaintaineremail+">" +pkgmaintaineralt = pkgmaintainername+" <"+pkgmaintaineremailalt+">" pkggiturl = "https://github.com/GameMaker2k/PyWWW-Get.git" pkghomepage = setuppy_url pkgsection = "python" pkgpriority = "optional" -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgbuilddepends = "'python2'" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgbuilddepends = "'python'" pkgstandardsversion = "3.9.8" -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgpackage = "python2-pywww-get" pkgoldname = "python2-www-get" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgpackage = "python-pywww-get" pkgoldname = "python-www-get" pkgarchitecture = "'any' 'i686' 'x86_64'" -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgdepends = "'python2-setuptools'" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgdepends = "'python-setuptools'" -pkgdescription = setuppy_description + "\n " + setuppy_longdescription +pkgdescription = setuppy_description+"\n "+setuppy_longdescription pkgtzstr = time.strftime("%a, %d %b %Y %H:%M:%S %z") -if (getargs.getsource): +if(getargs.getsource == True): print(getargs.source) sys.exit() -if (getargs.getparent): +if(getargs.getparent == True): print(os.path.realpath(os.path.dirname(getargs.source))) sys.exit() -if (getargs.getdirname): - print(pkgsource + "_" + pkgveralt + ".orig") +if(getargs.getdirname == True): + print(pkgsource+"_"+pkgveralt+".orig") sys.exit() -if (getargs.gettarname): - print(pkgsource + "_" + pkgveralt + ".orig.tar.gz") +if(getargs.gettarname == True): + print(pkgsource+"_"+pkgveralt+".orig.tar.gz") sys.exit() -if (getargs.getpkgsource): +if(getargs.getpkgsource == True): print(pkgsource) sys.exit() print("generating arch linux package build directory") -pacpkg_pkgbuild_dir = os.path.realpath( - getargs.source + os.path.sep + pkgsource) -print("creating directory " + pacpkg_pkgbuild_dir) -if (not os.path.exists(pacpkg_pkgbuild_dir)): +pacpkg_pkgbuild_dir = os.path.realpath(getargs.source+os.path.sep+pkgsource) +print("creating directory "+pacpkg_pkgbuild_dir) +if(not os.path.exists(pacpkg_pkgbuild_dir)): os.makedirs(pacpkg_pkgbuild_dir) os.chmod(pacpkg_pkgbuild_dir, int("0755", 8)) gzparentdir = os.path.realpath(os.path.dirname(getargs.source)) -filetargz = open(os.path.realpath(gzparentdir + os.path.sep + - pkgsource + "_" + pkgveralt + ".orig.tar.gz"), "rb") +filetargz = open(os.path.realpath(gzparentdir+os.path.sep + + pkgsource+"_"+pkgveralt+".orig.tar.gz"), "rb") filetargzmd5 = hashlib.md5(filetargz.read()).hexdigest() filetargz.seek(0) filetargzsha1 = hashlib.sha1(filetargz.read()).hexdigest() @@ -179,85 +173,79 @@ def which_exec(execfile): filetargz.close() pacpkg_pkgbuild_file = os.path.realpath( - pacpkg_pkgbuild_dir + os.path.sep + "PKGBUILD") -print("generating file " + pacpkg_pkgbuild_file) -if (sys.version[0] == "2"): - pacpkg_string_temp = "# Maintainer: " + pkgmaintaineralt + "\n" - pacpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - pacpkg_string_temp += "# " + pkgtzstr + "\n\n" - pacpkg_string_temp += "pkgname=" + pkgpackage + "\n" - pacpkg_string_temp += "pkgver=" + pkgveralt + "\n" - pacpkg_string_temp += "pkgrel=" + pkgveraltrel + "\n" - pacpkg_string_temp += "pkgdesc='" + setuppy_description + "'\n" - pacpkg_string_temp += "url='" + setuppy_url + "'\n" - pacpkg_string_temp += "arch=(" + pkgarchitecture + ")\n" - pacpkg_string_temp += "license=('" + setuppy_license + "')\n" + pacpkg_pkgbuild_dir+os.path.sep+"PKGBUILD") +print("generating file "+pacpkg_pkgbuild_file) +if(sys.version[0] == "2"): + pacpkg_string_temp = "# Maintainer: "+pkgmaintaineralt+"\n" + pacpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + pacpkg_string_temp += "# "+pkgtzstr+"\n\n" + pacpkg_string_temp += "pkgname="+pkgpackage+"\n" + pacpkg_string_temp += "pkgver="+pkgveralt+"\n" + pacpkg_string_temp += "pkgrel="+pkgveraltrel+"\n" + pacpkg_string_temp += "pkgdesc='"+setuppy_description+"'\n" + pacpkg_string_temp += "url='"+setuppy_url+"'\n" + pacpkg_string_temp += "arch=("+pkgarchitecture+")\n" + pacpkg_string_temp += "license=('"+setuppy_license+"')\n" pacpkg_string_temp += "groups=()\n" - pacpkg_string_temp += "depends=(" + pkgbuilddepends + ")\n" + pacpkg_string_temp += "depends=("+pkgbuilddepends+")\n" pacpkg_string_temp += "optdepends=()\n" - pacpkg_string_temp += "makedepends=(" + pkgdepends + ")\n" + pacpkg_string_temp += "makedepends=("+pkgdepends+")\n" pacpkg_string_temp += "conflicts=()\n" - pacpkg_string_temp += "replaces=('" + pkgoldname + "')\n" + pacpkg_string_temp += "replaces=('"+pkgoldname+"')\n" pacpkg_string_temp += "backup=()\n" pacpkg_string_temp += "options=(!strip !emptydirs)\n" pacpkg_string_temp += "install=''\n" - pacpkg_string_temp += "source=('." + os.path.sep + \ - pkgsource + "_" + pkgveralt + ".orig.tar.gz')\n" - pacpkg_string_temp += "md5sums=('" + filetargzmd5 + "')\n" - pacpkg_string_temp += "sha1sums=('" + filetargzsha1 + "')\n" - pacpkg_string_temp += "sha224sums=('" + filetargzsha224 + "')\n" - pacpkg_string_temp += "sha256sums=('" + filetargzsha256 + "')\n" - pacpkg_string_temp += "sha384sums=('" + filetargzsha384 + "')\n" - pacpkg_string_temp += "sha512sums=('" + filetargzsha512 + "')\n\n" + pacpkg_string_temp += "source=('."+os.path.sep + \ + pkgsource+"_"+pkgveralt+".orig.tar.gz')\n" + pacpkg_string_temp += "md5sums=('"+filetargzmd5+"')\n" + pacpkg_string_temp += "sha1sums=('"+filetargzsha1+"')\n" + pacpkg_string_temp += "sha224sums=('"+filetargzsha224+"')\n" + pacpkg_string_temp += "sha256sums=('"+filetargzsha256+"')\n" + pacpkg_string_temp += "sha384sums=('"+filetargzsha384+"')\n" + pacpkg_string_temp += "sha512sums=('"+filetargzsha512+"')\n\n" pacpkg_string_temp += "build() {\n" - pacpkg_string_temp += " cd \"${srcdir}/" + \ - pkgsource + "_${pkgver}.orig\"\n" + pacpkg_string_temp += " cd \"${srcdir}/"+pkgsource+"_${pkgver}.orig\"\n" pacpkg_string_temp += " python2 ./setup.py build\n" pacpkg_string_temp += "}\n\n" pacpkg_string_temp += "package() {\n" - pacpkg_string_temp += " cd \"${srcdir}/" + \ - pkgsource + "_${pkgver}.orig\"\n" + pacpkg_string_temp += " cd \"${srcdir}/"+pkgsource+"_${pkgver}.orig\"\n" pacpkg_string_temp += " python2 ./setup.py install --root=\"${pkgdir}\" --optimize=1\n" pacpkg_string_temp += "}\n\n" pacpkg_string_temp += "# vim:set ts=2 sw=2 et:\n" -if (sys.version[0] == "3"): - pacpkg_string_temp = "# Maintainer: " + pkgmaintaineralt + "\n" - pacpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - pacpkg_string_temp += "# " + pkgtzstr + "\n\n" - pacpkg_string_temp += "pkgname=" + pkgpackage + "\n" - pacpkg_string_temp += "pkgver=" + pkgveralt + "\n" - pacpkg_string_temp += "pkgrel=" + pkgveraltrel + "\n" - pacpkg_string_temp += "pkgdesc='" + setuppy_description + "'\n" - pacpkg_string_temp += "url='" + setuppy_url + "'\n" - pacpkg_string_temp += "arch=(" + pkgarchitecture + ")\n" - pacpkg_string_temp += "license=('" + setuppy_license + "')\n" +if(sys.version[0] == "3"): + pacpkg_string_temp = "# Maintainer: "+pkgmaintaineralt+"\n" + pacpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + pacpkg_string_temp += "# "+pkgtzstr+"\n\n" + pacpkg_string_temp += "pkgname="+pkgpackage+"\n" + pacpkg_string_temp += "pkgver="+pkgveralt+"\n" + pacpkg_string_temp += "pkgrel="+pkgveraltrel+"\n" + pacpkg_string_temp += "pkgdesc='"+setuppy_description+"'\n" + pacpkg_string_temp += "url='"+setuppy_url+"'\n" + pacpkg_string_temp += "arch=("+pkgarchitecture+")\n" + pacpkg_string_temp += "license=('"+setuppy_license+"')\n" pacpkg_string_temp += "groups=()\n" - pacpkg_string_temp += "depends=(" + pkgbuilddepends + ")\n" + pacpkg_string_temp += "depends=("+pkgbuilddepends+")\n" pacpkg_string_temp += "optdepends=()\n" - pacpkg_string_temp += "makedepends=(" + pkgdepends + ")\n" + pacpkg_string_temp += "makedepends=("+pkgdepends+")\n" pacpkg_string_temp += "conflicts=()\n" - pacpkg_string_temp += "replaces=('" + pkgoldname + "')\n" + pacpkg_string_temp += "replaces=('"+pkgoldname+"')\n" pacpkg_string_temp += "backup=()\n" pacpkg_string_temp += "options=(!strip !emptydirs)\n" pacpkg_string_temp += "install=''\n" - pacpkg_string_temp += "source=('." + os.path.sep + \ - pkgsource + "_" + pkgveralt + ".orig.tar.gz')\n" - pacpkg_string_temp += "md5sums=('" + filetargzmd5 + "')\n" - pacpkg_string_temp += "sha1sums=('" + filetargzsha1 + "')\n" - pacpkg_string_temp += "sha224sums=('" + filetargzsha224 + "')\n" - pacpkg_string_temp += "sha256sums=('" + filetargzsha256 + "')\n" - pacpkg_string_temp += "sha384sums=('" + filetargzsha384 + "')\n" - pacpkg_string_temp += "sha512sums=('" + filetargzsha512 + "')\n\n" + pacpkg_string_temp += "source=('."+os.path.sep + \ + pkgsource+"_"+pkgveralt+".orig.tar.gz')\n" + pacpkg_string_temp += "md5sums=('"+filetargzmd5+"')\n" + pacpkg_string_temp += "sha1sums=('"+filetargzsha1+"')\n" + pacpkg_string_temp += "sha224sums=('"+filetargzsha224+"')\n" + pacpkg_string_temp += "sha256sums=('"+filetargzsha256+"')\n" + pacpkg_string_temp += "sha384sums=('"+filetargzsha384+"')\n" + pacpkg_string_temp += "sha512sums=('"+filetargzsha512+"')\n\n" pacpkg_string_temp += "build() {\n" - pacpkg_string_temp += " cd \"${srcdir}/" + \ - pkgsource + "_${pkgver}.orig\"\n" + pacpkg_string_temp += " cd \"${srcdir}/"+pkgsource+"_${pkgver}.orig\"\n" pacpkg_string_temp += " python3 ./setup.py build\n" pacpkg_string_temp += "}\n\n" pacpkg_string_temp += "package() {\n" - pacpkg_string_temp += " cd \"${srcdir}/" + \ - pkgsource + "_${pkgver}.orig\"\n" + pacpkg_string_temp += " cd \"${srcdir}/"+pkgsource+"_${pkgver}.orig\"\n" pacpkg_string_temp += " python3 ./setup.py install --root=\"${pkgdir}\" --optimize=1\n" pacpkg_string_temp += "}\n\n" pacpkg_string_temp += "# vim:set ts=2 sw=2 et:\n" diff --git a/pkgbuild/archlinux/python3/realpath.py b/pkgbuild/archlinux/python3/realpath.py index 9837108..7350f44 100755 --- a/pkgbuild/archlinux/python3/realpath.py +++ b/pkgbuild/archlinux/python3/realpath.py @@ -15,24 +15,22 @@ $FileInfo: realpath.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "realpath" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/archlinux/python3/which.py b/pkgbuild/archlinux/python3/which.py index fde6e3d..22cccbd 100755 --- a/pkgbuild/archlinux/python3/which.py +++ b/pkgbuild/archlinux/python3/which.py @@ -15,24 +15,22 @@ $FileInfo: which.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "which" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover def which_exec(execfile): diff --git a/pkgbuild/debian/python2/basename.py b/pkgbuild/debian/python2/basename.py index cb03c1e..d0538a9 100755 --- a/pkgbuild/debian/python2/basename.py +++ b/pkgbuild/debian/python2/basename.py @@ -15,24 +15,22 @@ $FileInfo: basename.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "basename" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/debian/python2/dirname.py b/pkgbuild/debian/python2/dirname.py index 1225a5a..46d4997 100755 --- a/pkgbuild/debian/python2/dirname.py +++ b/pkgbuild/debian/python2/dirname.py @@ -15,24 +15,22 @@ $FileInfo: dirname.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "dirname" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/debian/python2/pydeb-gen.py b/pkgbuild/debian/python2/pydeb-gen.py index 3635a71..eac839b 100755 --- a/pkgbuild/debian/python2/pydeb-gen.py +++ b/pkgbuild/debian/python2/pydeb-gen.py @@ -15,29 +15,27 @@ $FileInfo: pydeb-gen.py - Last Update: 6/1/2016 Ver. 0.2.0 RC 1 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse -import datetime -import json -import os +from __future__ import absolute_import, division, print_function, unicode_literals import re -import subprocess +import os import sys import time +import datetime +import argparse +import subprocess +import json __version_info__ = (0, 2, 0, "rc1") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "pydeb-gen" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover buildsystem = "pybuild" @@ -47,75 +45,15 @@ def which_exec(execfile): return path + "/" + execfile -distvertoupname = { - '10.0': "Buster", - '9.0': "Stretch", - '8.0': "Jessie", - '7.0': "Wheezy", - '6.0': "Squeeze", - '5.0': "Lenny", - '4.0': "Etch", - '3.1': "Sarge", - '3.0': "Woody", - '2.2': "Potato", - '2.1': "Slink", - '2.0': "Hamm", - '1.3': "Bo", - '1.2': "Rex", - '1.1': "Buzz", - '0.0': "Sid"} -distvertoname = { - '10.0': "buster", - '9.0': "stretch", - '8.0': "jessie", - '7.0': "wheezy", - '6.0': "squeeze", - '5.0': "lenny", - '4.0': "etch", - '3.1': "sarge", - '3.0': "woody", - '2.2': "potato", - '2.1': "slink", - '2.0': "hamm", - '1.3': "bo", - '1.2': "rex", - '1.1': "buzz", - '0.0': "sid"} +distvertoupname = {'10.0': "Buster", '9.0': "Stretch", '8.0': "Jessie", '7.0': "Wheezy", '6.0': "Squeeze", '5.0': "Lenny", '4.0': "Etch", + '3.1': "Sarge", '3.0': "Woody", '2.2': "Potato", '2.1': "Slink", '2.0': "Hamm", '1.3': "Bo", '1.2': "Rex", '1.1': "Buzz", '0.0': "Sid"} +distvertoname = {'10.0': "buster", '9.0': "stretch", '8.0': "jessie", '7.0': "wheezy", '6.0': "squeeze", '5.0': "lenny", '4.0': "etch", + '3.1': "sarge", '3.0': "woody", '2.2': "potato", '2.1': "slink", '2.0': "hamm", '1.3': "bo", '1.2': "rex", '1.1': "buzz", '0.0': "sid"} distnamelist = distvertoname.values() -distnametover = { - 'buster': "10.0", - 'stretch': "9.0", - 'jessie': "8.0", - 'wheezy': "7.0", - 'squeeze': "6.0", - 'lenny': "5.0", - 'etch': "4.0", - 'sarge': "3.1", - 'woody': "3.0", - 'potato': "2.2", - 'slink': "2.1", - 'hamm': "2.0", - 'bo': "1.3", - 'rex': "1.2", - 'buzz': "1.1", - 'sid': "0.0"} -distupnametover = { - 'Buster': "10.0", - 'Stretch': "9.0", - 'Jessie': "8.0", - 'Wheezy': "7.0", - 'Squeeze': "6.0", - 'Lenny': "5.0", - 'Etch': "4.0", - 'Sarge': "3.1", - 'Woody': "3.0", - 'Potato': "2.2", - 'Slink': "2.1", - 'Hamm': "2.0", - 'Bo': "1.3", - 'Rex': "1.2", - 'Buzz': "1.1", - 'Sid': "0.0"} +distnametover = {'buster': "10.0", 'stretch': "9.0", 'jessie': "8.0", 'wheezy': "7.0", 'squeeze': "6.0", 'lenny': "5.0", 'etch': "4.0", + 'sarge': "3.1", 'woody': "3.0", 'potato': "2.2", 'slink': "2.1", 'hamm': "2.0", 'bo': "1.3", 'rex': "1.2", 'buzz': "1.1", 'sid': "0.0"} +distupnametover = {'Buster': "10.0", 'Stretch': "9.0", 'Jessie': "8.0", 'Wheezy': "7.0", 'Squeeze': "6.0", 'Lenny': "5.0", 'Etch': "4.0", + 'Sarge': "3.1", 'Woody': "3.0", 'Potato': "2.2", 'Slink': "2.1", 'Hamm': "2.0", 'Bo': "1.3", 'Rex': "1.2", 'Buzz': "1.1", 'Sid': "0.0"} distnamelistalt = distnametover.keys() debian_oldstable = "wheezy" @@ -142,25 +80,22 @@ def which_exec(execfile): action="store_true", help="get pkg source") getargs = parser.parse_args() getargs.source = os.path.realpath(getargs.source) -pkgsetuppy = os.path.realpath(getargs.source + os.path.sep + "setup.py") +pkgsetuppy = os.path.realpath(getargs.source+os.path.sep+"setup.py") pyexecpath = os.path.realpath(sys.executable) -if (not os.path.exists(getargs.source) or not os.path.isdir(getargs.source)): +if(not os.path.exists(getargs.source) or not os.path.isdir(getargs.source)): raise Exception("Could not find directory.") -if (not os.path.exists(pkgsetuppy) or not os.path.isfile(pkgsetuppy)): +if(not os.path.exists(pkgsetuppy) or not os.path.isfile(pkgsetuppy)): raise Exception("Could not find setup.py in directory.") getargs.codename = getargs.codename.lower() -if (getargs.codename not in distnamelist): - print("Could not build for debian " + getargs.codename + " codename.") +if(not getargs.codename in distnamelist): + print("Could not build for debian "+getargs.codename+" codename.") sys.exit() -pypkgenlistp = subprocess.Popen([pyexecpath, - pkgsetuppy, - "getversioninfo"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) +pypkgenlistp = subprocess.Popen( + [pyexecpath, pkgsetuppy, "getversioninfo"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) pypkgenout, pypkgenerr = pypkgenlistp.communicate() -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pypkgenout = pypkgenout.decode('utf-8') pymodule = json.loads(pypkgenout) setuppy_verinfo = pymodule['versionlist'] @@ -175,153 +110,140 @@ def which_exec(execfile): setuppy_downloadurl = pymodule['downloadurl'] setuppy_longdescription = pymodule['longdescription'] setuppy_platforms = pymodule['platforms'] -standverfilename = os.path.realpath( - os.path.sep + - "usr" + - os.path.sep + - "share" + - os.path.sep + - "lintian" + - os.path.sep + - "data" + - os.path.sep + - "standards-version" + - os.path.sep + - "release-dates") +standverfilename = os.path.realpath(os.path.sep+"usr"+os.path.sep+"share"+os.path.sep + + "lintian"+os.path.sep+"data"+os.path.sep+"standards-version"+os.path.sep+"release-dates") standverfile = open(standverfilename, "r") standverdata = standverfile.read() standverfile.close() -getstandver = re.findall("([0-9]\\.[0-9]\\.[0-9])\\s+([0-9]+)", standverdata) +getstandver = re.findall("([0-9]\.[0-9]\.[0-9])\s+([0-9]+)", standverdata) getcurstandver = getstandver[0][0] dpkglocatout = which_exec("dpkg") pydpkglistp = subprocess.Popen( [dpkglocatout, "-s", "debhelper"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) pydpkgout, pydpkgerr = pydpkglistp.communicate() -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pydpkgout = pydpkgout.decode("utf-8") -pydpkg_esc = re.escape("Version:") + '\\s+([0-9])' + re.escape(".") +pydpkg_esc = re.escape("Version:")+'\s+([0-9])'+re.escape(".") pydpkg_val = re.findall(pydpkg_esc, pydpkgout)[0] -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgsource = "py2www-get" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgsource = "py3www-get" pkgupstreamname = "PyWWW-Get" -pkgveralt = str(setuppy_verinfo[0]) + "." + \ - str(setuppy_verinfo[1]) + "." + str(setuppy_verinfo[2]) -pkgver = str(pkgveralt) + "~rc" + str(setuppy_verinfo[4]) + "~" + getargs.codename + str( +pkgveralt = str(setuppy_verinfo[0])+"." + \ + str(setuppy_verinfo[1])+"."+str(setuppy_verinfo[2]) +pkgver = str(pkgveralt)+"~rc"+str(setuppy_verinfo[4])+"~"+getargs.codename+str( distnametover.get(getargs.codename, "1").replace(".", "")) pkgdistname = getargs.codename pkgurgency = "urgency=low" pkgauthorname = setuppy_author pkgauthoremail = setuppy_authoremail -pkgauthor = pkgauthorname + " <" + pkgauthoremail + ">" +pkgauthor = pkgauthorname+" <"+pkgauthoremail+">" pkgmaintainername = setuppy_maintainer pkgmaintaineremail = setuppy_maintaineremail -pkgmaintainer = pkgmaintainername + " <" + pkgmaintaineremail + ">" +pkgmaintainer = pkgmaintainername+" <"+pkgmaintaineremail+">" pkggiturl = "https://github.com/GameMaker2k/PyWWW-Get.git" pkghomepage = setuppy_url pkgsection = "python" pkgpriority = "optional" -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgbuilddepends = "python-setuptools, python-all, debhelper, dh-python, devscripts" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgbuilddepends = "python3-setuptools, python3-all, debhelper, dh-python, devscripts" -if (getargs.codename == "squeeze" or getargs.codename == "wheezy"): - if (sys.version[0] == "2"): +if(getargs.codename == "squeeze" or getargs.codename == "wheezy"): + if(sys.version[0] == "2"): pkgbuilddepends = "python-setuptools, python-all, debhelper, dh-python, devscripts" - if (sys.version[0] == "3"): + if(sys.version[0] == "3"): pkgbuilddepends = "python3-setuptools, python3-all, debhelper, dh-python, devscripts" pkgstandardsversion = getcurstandver -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgpackage = "python-pywww-get" pkgoldname = "python-www-get" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgpackage = "python3-pywww-get" pkgoldname = "python3-www-get" pkgarchitecture = "all" -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgdepends = "${misc:Depends}, ${python:Depends}" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgdepends = "${misc:Depends}, ${python3:Depends}" -pkgdescription = setuppy_description + "\n " + setuppy_longdescription +pkgdescription = setuppy_description+"\n "+setuppy_longdescription pkgtzstr = time.strftime("%a, %d %b %Y %H:%M:%S %z") -if (getargs.getsource): +if(getargs.getsource == True): print(getargs.source) sys.exit() -if (getargs.getparent): +if(getargs.getparent == True): print(os.path.realpath(os.path.dirname(getargs.source))) sys.exit() -if (getargs.getdirname): - print(pkgsource + "_" + pkgveralt + ".orig") +if(getargs.getdirname == True): + print(pkgsource+"_"+pkgveralt+".orig") sys.exit() -if (getargs.gettarname): - print(pkgsource + "_" + pkgveralt + ".orig.tar.gz") +if(getargs.gettarname == True): + print(pkgsource+"_"+pkgveralt+".orig.tar.gz") sys.exit() -if (getargs.getpkgsource): +if(getargs.getpkgsource == True): print(pkgsource) sys.exit() print("generating debian package build directory") -debpkg_debian_dir = os.path.realpath(getargs.source + os.path.sep + "debian") -print("creating directory " + debpkg_debian_dir) -if (not os.path.exists(debpkg_debian_dir)): +debpkg_debian_dir = os.path.realpath(getargs.source+os.path.sep+"debian") +print("creating directory "+debpkg_debian_dir) +if(not os.path.exists(debpkg_debian_dir)): os.makedirs(debpkg_debian_dir) os.chmod(debpkg_debian_dir, int("0755", 8)) debpkg_changelog_file = os.path.realpath( - debpkg_debian_dir + os.path.sep + "changelog") -print("generating file " + debpkg_changelog_file) + debpkg_debian_dir+os.path.sep+"changelog") +print("generating file "+debpkg_changelog_file) debpkg_string_temp = pkgsource + \ - " (" + pkgver + ") " + pkgdistname + "; " + pkgurgency + "\n\n" -debpkg_string_temp += " * source package automatically created by " + profullname + "\n\n" -debpkg_string_temp += " -- " + pkgmaintainer + " " + pkgtzstr + "\n" + " ("+pkgver+") "+pkgdistname+"; "+pkgurgency+"\n\n" +debpkg_string_temp += " * source package automatically created by "+profullname+"\n\n" +debpkg_string_temp += " -- "+pkgmaintainer+" "+pkgtzstr+"\n" debpkg_file_temp = open(debpkg_changelog_file, "w") debpkg_file_temp.write(debpkg_string_temp) debpkg_file_temp.close() os.chmod(debpkg_changelog_file, int("0644", 8)) -debpkg_compat_file = os.path.realpath( - debpkg_debian_dir + os.path.sep + "compat") -print("generating file " + debpkg_compat_file) -debpkg_string_temp = str(pydpkg_val) + "\n" +debpkg_compat_file = os.path.realpath(debpkg_debian_dir+os.path.sep+"compat") +print("generating file "+debpkg_compat_file) +debpkg_string_temp = str(pydpkg_val)+"\n" debpkg_file_temp = open(debpkg_compat_file, "w") debpkg_file_temp.write(debpkg_string_temp) debpkg_file_temp.close() os.chmod(debpkg_compat_file, int("0644", 8)) -debpkg_control_file = os.path.realpath( - debpkg_debian_dir + os.path.sep + "control") -print("generating file " + debpkg_control_file) -debpkg_string_temp = "Source: " + pkgsource + "\n" -debpkg_string_temp += "Maintainer: " + pkgmaintainer + "\n" -debpkg_string_temp += "Homepage: " + pkghomepage + "\n" -debpkg_string_temp += "Vcs-Git: " + pkggiturl + "\n" -debpkg_string_temp += "Vcs-Browser: " + pkghomepage + "\n" -debpkg_string_temp += "Section: " + pkgsection + "\n" -debpkg_string_temp += "Priority: " + pkgpriority + "\n" -debpkg_string_temp += "Build-Depends: " + pkgbuilddepends + "\n" -debpkg_string_temp += "Standards-Version: " + pkgstandardsversion + "\n\n" -debpkg_string_temp += "Package: " + pkgpackage + "\n" -debpkg_string_temp += "Architecture: " + pkgarchitecture + "\n" -debpkg_string_temp += "Depends: " + pkgdepends + "\n" -debpkg_string_temp += "Replaces: " + pkgoldname + "\n" -debpkg_string_temp += "Description: " + pkgdescription + "\n" +debpkg_control_file = os.path.realpath(debpkg_debian_dir+os.path.sep+"control") +print("generating file "+debpkg_control_file) +debpkg_string_temp = "Source: "+pkgsource+"\n" +debpkg_string_temp += "Maintainer: "+pkgmaintainer+"\n" +debpkg_string_temp += "Homepage: "+pkghomepage+"\n" +debpkg_string_temp += "Vcs-Git: "+pkggiturl+"\n" +debpkg_string_temp += "Vcs-Browser: "+pkghomepage+"\n" +debpkg_string_temp += "Section: "+pkgsection+"\n" +debpkg_string_temp += "Priority: "+pkgpriority+"\n" +debpkg_string_temp += "Build-Depends: "+pkgbuilddepends+"\n" +debpkg_string_temp += "Standards-Version: "+pkgstandardsversion+"\n\n" +debpkg_string_temp += "Package: "+pkgpackage+"\n" +debpkg_string_temp += "Architecture: "+pkgarchitecture+"\n" +debpkg_string_temp += "Depends: "+pkgdepends+"\n" +debpkg_string_temp += "Replaces: "+pkgoldname+"\n" +debpkg_string_temp += "Description: "+pkgdescription+"\n" debpkg_file_temp = open(debpkg_control_file, "w") debpkg_file_temp.write(debpkg_string_temp) debpkg_file_temp.close() os.chmod(debpkg_control_file, int("0644", 8)) debpkg_copyright_file = os.path.realpath( - debpkg_debian_dir + os.path.sep + "copyright") -print("generating file " + debpkg_copyright_file) + debpkg_debian_dir+os.path.sep+"copyright") +print("generating file "+debpkg_copyright_file) debpkg_string_temp = "Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/\n" -debpkg_string_temp += "Upstream-Name: " + pkgupstreamname + "\n" -debpkg_string_temp += "Source: " + pkghomepage + "\n\n" +debpkg_string_temp += "Upstream-Name: "+pkgupstreamname+"\n" +debpkg_string_temp += "Source: "+pkghomepage+"\n\n" debpkg_string_temp += "Files: *\n" -debpkg_string_temp += "Copyright: Copyright 2011-2016 " + pkgauthor + "\n" +debpkg_string_temp += "Copyright: Copyright 2011-2016 "+pkgauthor+"\n" debpkg_string_temp += "License: BSD\n\n" debpkg_string_temp += "License: BSD\n" debpkg_string_temp += " Revised BSD License\n\n" @@ -356,23 +278,19 @@ def which_exec(execfile): debpkg_file_temp.close() os.chmod(debpkg_copyright_file, int("0644", 8)) -debpkg_rules_file = os.path.realpath(debpkg_debian_dir + os.path.sep + "rules") -print("generating file " + debpkg_rules_file) -if (sys.version[0] == "2" and (buildsystem == - "python" or buildsystem == "python_distutils")): +debpkg_rules_file = os.path.realpath(debpkg_debian_dir+os.path.sep+"rules") +print("generating file "+debpkg_rules_file) +if(sys.version[0] == "2" and (buildsystem == "python" or buildsystem == "python_distutils")): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --with python2 --buildsystem=python_distutils\n" -if (sys.version[0] == "3" and (buildsystem == - "python" or buildsystem == "python_distutils")): +if(sys.version[0] == "3" and (buildsystem == "python" or buildsystem == "python_distutils")): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --with python3\n" @@ -383,36 +301,30 @@ def which_exec(execfile): debpkg_string_temp += "override_dh_auto_install:\n" debpkg_string_temp += " python3 setup.py install \\\n" debpkg_string_temp += " --force --root=$(CURDIR)/debian/" + \ - pkgpackage + " \\\n" + pkgpackage+" \\\n" debpkg_string_temp += " --no-compile -O0 --install-layout=deb\n\n" debpkg_string_temp += "override_dh_auto_clean:\n" debpkg_string_temp += " python3 setup.py clean\n" -if (sys.version[0] == "2" and (buildsystem == - "pybuild" or buildsystem == "python_build")): +if(sys.version[0] == "2" and (buildsystem == "pybuild" or buildsystem == "python_build")): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n" debpkg_string_temp += "export PYBUILD_NAME=pywww-get\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --with python2 --buildsystem=pybuild\n" -if (sys.version[0] == "3" and (buildsystem == - "pybuild" or buildsystem == "python_build")): +if(sys.version[0] == "3" and (buildsystem == "pybuild" or buildsystem == "python_build")): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n" debpkg_string_temp += "export PYBUILD_NAME=pywww-get\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --with python3 --buildsystem=pybuild\n" -if ((sys.version[0] == "2" or sys.version[0] == "3") - and buildsystem == "cmake"): +if((sys.version[0] == "2" or sys.version[0] == "3") and buildsystem == "cmake"): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --buildsystem=cmake --parallel\n" @@ -421,25 +333,22 @@ def which_exec(execfile): debpkg_file_temp.close() os.chmod(debpkg_rules_file, int("0755", 8)) -debpkg_source_dir = os.path.realpath( - debpkg_debian_dir + os.path.sep + "source") -print("creating directory " + debpkg_source_dir) -if (not os.path.exists(debpkg_source_dir)): +debpkg_source_dir = os.path.realpath(debpkg_debian_dir+os.path.sep+"source") +print("creating directory "+debpkg_source_dir) +if(not os.path.exists(debpkg_source_dir)): os.makedirs(debpkg_source_dir) os.chmod(debpkg_source_dir, int("0755", 8)) -debpkg_format_file = os.path.realpath( - debpkg_source_dir + os.path.sep + "format") -print("generating file " + debpkg_format_file) +debpkg_format_file = os.path.realpath(debpkg_source_dir+os.path.sep+"format") +print("generating file "+debpkg_format_file) debpkg_string_temp = "3.0 (native)\n" debpkg_file_temp = open(debpkg_format_file, "w") debpkg_file_temp.write(debpkg_string_temp) debpkg_file_temp.close() os.chmod(debpkg_format_file, int("0644", 8)) -debpkg_options_file = os.path.realpath( - debpkg_source_dir + os.path.sep + "options") -print("generating file " + debpkg_options_file) +debpkg_options_file = os.path.realpath(debpkg_source_dir+os.path.sep+"options") +print("generating file "+debpkg_options_file) debpkg_string_temp = "extend-diff-ignore=\"\\.egg-info\"\n" debpkg_file_temp = open(debpkg_options_file, "w") debpkg_file_temp.write(debpkg_string_temp) diff --git a/pkgbuild/debian/python2/realpath.py b/pkgbuild/debian/python2/realpath.py index 5eaaa7a..b2e441f 100755 --- a/pkgbuild/debian/python2/realpath.py +++ b/pkgbuild/debian/python2/realpath.py @@ -15,24 +15,22 @@ $FileInfo: realpath.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "realpath" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/debian/python2/which.py b/pkgbuild/debian/python2/which.py index 36ff5fb..e5cf271 100755 --- a/pkgbuild/debian/python2/which.py +++ b/pkgbuild/debian/python2/which.py @@ -15,24 +15,22 @@ $FileInfo: which.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "which" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover def which_exec(execfile): diff --git a/pkgbuild/debian/python3/basename.py b/pkgbuild/debian/python3/basename.py index 8b41db3..346121c 100755 --- a/pkgbuild/debian/python3/basename.py +++ b/pkgbuild/debian/python3/basename.py @@ -15,24 +15,22 @@ $FileInfo: basename.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "basename" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/debian/python3/dirname.py b/pkgbuild/debian/python3/dirname.py index 5aea1fd..080502f 100755 --- a/pkgbuild/debian/python3/dirname.py +++ b/pkgbuild/debian/python3/dirname.py @@ -15,24 +15,22 @@ $FileInfo: dirname.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "dirname" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/debian/python3/pydeb-gen.py b/pkgbuild/debian/python3/pydeb-gen.py index 62f4216..d042064 100755 --- a/pkgbuild/debian/python3/pydeb-gen.py +++ b/pkgbuild/debian/python3/pydeb-gen.py @@ -15,29 +15,27 @@ $FileInfo: pydeb-gen.py - Last Update: 6/1/2016 Ver. 0.2.0 RC 1 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse -import datetime -import json -import os +from __future__ import absolute_import, division, print_function, unicode_literals import re -import subprocess +import os import sys import time +import datetime +import argparse +import subprocess +import json __version_info__ = (0, 2, 0, "rc1") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "pydeb-gen" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover buildsystem = "pybuild" @@ -47,75 +45,15 @@ def which_exec(execfile): return path + "/" + execfile -distvertoupname = { - '10.0': "Buster", - '9.0': "Stretch", - '8.0': "Jessie", - '7.0': "Wheezy", - '6.0': "Squeeze", - '5.0': "Lenny", - '4.0': "Etch", - '3.1': "Sarge", - '3.0': "Woody", - '2.2': "Potato", - '2.1': "Slink", - '2.0': "Hamm", - '1.3': "Bo", - '1.2': "Rex", - '1.1': "Buzz", - '0.0': "Sid"} -distvertoname = { - '10.0': "buster", - '9.0': "stretch", - '8.0': "jessie", - '7.0': "wheezy", - '6.0': "squeeze", - '5.0': "lenny", - '4.0': "etch", - '3.1': "sarge", - '3.0': "woody", - '2.2': "potato", - '2.1': "slink", - '2.0': "hamm", - '1.3': "bo", - '1.2': "rex", - '1.1': "buzz", - '0.0': "sid"} +distvertoupname = {'10.0': "Buster", '9.0': "Stretch", '8.0': "Jessie", '7.0': "Wheezy", '6.0': "Squeeze", '5.0': "Lenny", '4.0': "Etch", + '3.1': "Sarge", '3.0': "Woody", '2.2': "Potato", '2.1': "Slink", '2.0': "Hamm", '1.3': "Bo", '1.2': "Rex", '1.1': "Buzz", '0.0': "Sid"} +distvertoname = {'10.0': "buster", '9.0': "stretch", '8.0': "jessie", '7.0': "wheezy", '6.0': "squeeze", '5.0': "lenny", '4.0': "etch", + '3.1': "sarge", '3.0': "woody", '2.2': "potato", '2.1': "slink", '2.0': "hamm", '1.3': "bo", '1.2': "rex", '1.1': "buzz", '0.0': "sid"} distnamelist = distvertoname.values() -distnametover = { - 'buster': "10.0", - 'stretch': "9.0", - 'jessie': "8.0", - 'wheezy': "7.0", - 'squeeze': "6.0", - 'lenny': "5.0", - 'etch': "4.0", - 'sarge': "3.1", - 'woody': "3.0", - 'potato': "2.2", - 'slink': "2.1", - 'hamm': "2.0", - 'bo': "1.3", - 'rex': "1.2", - 'buzz': "1.1", - 'sid': "0.0"} -distupnametover = { - 'Buster': "10.0", - 'Stretch': "9.0", - 'Jessie': "8.0", - 'Wheezy': "7.0", - 'Squeeze': "6.0", - 'Lenny': "5.0", - 'Etch': "4.0", - 'Sarge': "3.1", - 'Woody': "3.0", - 'Potato': "2.2", - 'Slink': "2.1", - 'Hamm': "2.0", - 'Bo': "1.3", - 'Rex': "1.2", - 'Buzz': "1.1", - 'Sid': "0.0"} +distnametover = {'buster': "10.0", 'stretch': "9.0", 'jessie': "8.0", 'wheezy': "7.0", 'squeeze': "6.0", 'lenny': "5.0", 'etch': "4.0", + 'sarge': "3.1", 'woody': "3.0", 'potato': "2.2", 'slink': "2.1", 'hamm': "2.0", 'bo': "1.3", 'rex': "1.2", 'buzz': "1.1", 'sid': "0.0"} +distupnametover = {'Buster': "10.0", 'Stretch': "9.0", 'Jessie': "8.0", 'Wheezy': "7.0", 'Squeeze': "6.0", 'Lenny': "5.0", 'Etch': "4.0", + 'Sarge': "3.1", 'Woody': "3.0", 'Potato': "2.2", 'Slink': "2.1", 'Hamm': "2.0", 'Bo': "1.3", 'Rex': "1.2", 'Buzz': "1.1", 'Sid': "0.0"} distnamelistalt = distnametover.keys() debian_oldstable = "wheezy" @@ -142,25 +80,22 @@ def which_exec(execfile): action="store_true", help="get pkg source") getargs = parser.parse_args() getargs.source = os.path.realpath(getargs.source) -pkgsetuppy = os.path.realpath(getargs.source + os.path.sep + "setup.py") +pkgsetuppy = os.path.realpath(getargs.source+os.path.sep+"setup.py") pyexecpath = os.path.realpath(sys.executable) -if (not os.path.exists(getargs.source) or not os.path.isdir(getargs.source)): +if(not os.path.exists(getargs.source) or not os.path.isdir(getargs.source)): raise Exception("Could not find directory.") -if (not os.path.exists(pkgsetuppy) or not os.path.isfile(pkgsetuppy)): +if(not os.path.exists(pkgsetuppy) or not os.path.isfile(pkgsetuppy)): raise Exception("Could not find setup.py in directory.") getargs.codename = getargs.codename.lower() -if (getargs.codename not in distnamelist): - print("Could not build for debian " + getargs.codename + " codename.") +if(not getargs.codename in distnamelist): + print("Could not build for debian "+getargs.codename+" codename.") sys.exit() -pypkgenlistp = subprocess.Popen([pyexecpath, - pkgsetuppy, - "getversioninfo"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) +pypkgenlistp = subprocess.Popen( + [pyexecpath, pkgsetuppy, "getversioninfo"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) pypkgenout, pypkgenerr = pypkgenlistp.communicate() -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pypkgenout = pypkgenout.decode('utf-8') pymodule = json.loads(pypkgenout) setuppy_verinfo = pymodule['versionlist'] @@ -175,153 +110,140 @@ def which_exec(execfile): setuppy_downloadurl = pymodule['downloadurl'] setuppy_longdescription = pymodule['longdescription'] setuppy_platforms = pymodule['platforms'] -standverfilename = os.path.realpath( - os.path.sep + - "usr" + - os.path.sep + - "share" + - os.path.sep + - "lintian" + - os.path.sep + - "data" + - os.path.sep + - "standards-version" + - os.path.sep + - "release-dates") +standverfilename = os.path.realpath(os.path.sep+"usr"+os.path.sep+"share"+os.path.sep + + "lintian"+os.path.sep+"data"+os.path.sep+"standards-version"+os.path.sep+"release-dates") standverfile = open(standverfilename, "r") standverdata = standverfile.read() standverfile.close() -getstandver = re.findall("([0-9]\\.[0-9]\\.[0-9])\\s+([0-9]+)", standverdata) +getstandver = re.findall("([0-9]\.[0-9]\.[0-9])\s+([0-9]+)", standverdata) getcurstandver = getstandver[0][0] dpkglocatout = which_exec("dpkg") pydpkglistp = subprocess.Popen( [dpkglocatout, "-s", "debhelper"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) pydpkgout, pydpkgerr = pydpkglistp.communicate() -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pydpkgout = pydpkgout.decode("utf-8") -pydpkg_esc = re.escape("Version:") + '\\s+([0-9])' + re.escape(".") +pydpkg_esc = re.escape("Version:")+'\s+([0-9])'+re.escape(".") pydpkg_val = re.findall(pydpkg_esc, pydpkgout)[0] -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgsource = "py2www-get" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgsource = "py3www-get" pkgupstreamname = "PyWWW-Get" -pkgveralt = str(setuppy_verinfo[0]) + "." + \ - str(setuppy_verinfo[1]) + "." + str(setuppy_verinfo[2]) -pkgver = str(pkgveralt) + "~rc" + str(setuppy_verinfo[4]) + "~" + getargs.codename + str( +pkgveralt = str(setuppy_verinfo[0])+"." + \ + str(setuppy_verinfo[1])+"."+str(setuppy_verinfo[2]) +pkgver = str(pkgveralt)+"~rc"+str(setuppy_verinfo[4])+"~"+getargs.codename+str( distnametover.get(getargs.codename, "1").replace(".", "")) pkgdistname = getargs.codename pkgurgency = "urgency=low" pkgauthorname = setuppy_author pkgauthoremail = setuppy_authoremail -pkgauthor = pkgauthorname + " <" + pkgauthoremail + ">" +pkgauthor = pkgauthorname+" <"+pkgauthoremail+">" pkgmaintainername = setuppy_maintainer pkgmaintaineremail = setuppy_maintaineremail -pkgmaintainer = pkgmaintainername + " <" + pkgmaintaineremail + ">" +pkgmaintainer = pkgmaintainername+" <"+pkgmaintaineremail+">" pkggiturl = "https://github.com/GameMaker2k/PyWWW-Get.git" pkghomepage = setuppy_url pkgsection = "python" pkgpriority = "optional" -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgbuilddepends = "python-setuptools, python-all, debhelper, dh-python, devscripts" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgbuilddepends = "python3-setuptools, python3-all, debhelper, dh-python, devscripts" -if (getargs.codename == "squeeze" or getargs.codename == "wheezy"): - if (sys.version[0] == "2"): +if(getargs.codename == "squeeze" or getargs.codename == "wheezy"): + if(sys.version[0] == "2"): pkgbuilddepends = "python-setuptools, python-all, debhelper, dh-python, devscripts" - if (sys.version[0] == "3"): + if(sys.version[0] == "3"): pkgbuilddepends = "python3-setuptools, python3-all, debhelper, dh-python, devscripts" pkgstandardsversion = getcurstandver -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgpackage = "python-pywww-get" pkgoldname = "python-www-get" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgpackage = "python3-pywww-get" pkgoldname = "python3-www-get" pkgarchitecture = "all" -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgdepends = "${misc:Depends}, ${python:Depends}" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgdepends = "${misc:Depends}, ${python3:Depends}" -pkgdescription = setuppy_description + "\n " + setuppy_longdescription +pkgdescription = setuppy_description+"\n "+setuppy_longdescription pkgtzstr = time.strftime("%a, %d %b %Y %H:%M:%S %z") -if (getargs.getsource): +if(getargs.getsource == True): print(getargs.source) sys.exit() -if (getargs.getparent): +if(getargs.getparent == True): print(os.path.realpath(os.path.dirname(getargs.source))) sys.exit() -if (getargs.getdirname): - print(pkgsource + "_" + pkgveralt + ".orig") +if(getargs.getdirname == True): + print(pkgsource+"_"+pkgveralt+".orig") sys.exit() -if (getargs.gettarname): - print(pkgsource + "_" + pkgveralt + ".orig.tar.gz") +if(getargs.gettarname == True): + print(pkgsource+"_"+pkgveralt+".orig.tar.gz") sys.exit() -if (getargs.getpkgsource): +if(getargs.getpkgsource == True): print(pkgsource) sys.exit() print("generating debian package build directory") -debpkg_debian_dir = os.path.realpath(getargs.source + os.path.sep + "debian") -print("creating directory " + debpkg_debian_dir) -if (not os.path.exists(debpkg_debian_dir)): +debpkg_debian_dir = os.path.realpath(getargs.source+os.path.sep+"debian") +print("creating directory "+debpkg_debian_dir) +if(not os.path.exists(debpkg_debian_dir)): os.makedirs(debpkg_debian_dir) os.chmod(debpkg_debian_dir, int("0755", 8)) debpkg_changelog_file = os.path.realpath( - debpkg_debian_dir + os.path.sep + "changelog") -print("generating file " + debpkg_changelog_file) + debpkg_debian_dir+os.path.sep+"changelog") +print("generating file "+debpkg_changelog_file) debpkg_string_temp = pkgsource + \ - " (" + pkgver + ") " + pkgdistname + "; " + pkgurgency + "\n\n" -debpkg_string_temp += " * source package automatically created by " + profullname + "\n\n" -debpkg_string_temp += " -- " + pkgmaintainer + " " + pkgtzstr + "\n" + " ("+pkgver+") "+pkgdistname+"; "+pkgurgency+"\n\n" +debpkg_string_temp += " * source package automatically created by "+profullname+"\n\n" +debpkg_string_temp += " -- "+pkgmaintainer+" "+pkgtzstr+"\n" debpkg_file_temp = open(debpkg_changelog_file, "w") debpkg_file_temp.write(debpkg_string_temp) debpkg_file_temp.close() os.chmod(debpkg_changelog_file, int("0644", 8)) -debpkg_compat_file = os.path.realpath( - debpkg_debian_dir + os.path.sep + "compat") -print("generating file " + debpkg_compat_file) -debpkg_string_temp = str(pydpkg_val) + "\n" +debpkg_compat_file = os.path.realpath(debpkg_debian_dir+os.path.sep+"compat") +print("generating file "+debpkg_compat_file) +debpkg_string_temp = str(pydpkg_val)+"\n" debpkg_file_temp = open(debpkg_compat_file, "w") debpkg_file_temp.write(debpkg_string_temp) debpkg_file_temp.close() os.chmod(debpkg_compat_file, int("0644", 8)) -debpkg_control_file = os.path.realpath( - debpkg_debian_dir + os.path.sep + "control") -print("generating file " + debpkg_control_file) -debpkg_string_temp = "Source: " + pkgsource + "\n" -debpkg_string_temp += "Maintainer: " + pkgmaintainer + "\n" -debpkg_string_temp += "Homepage: " + pkghomepage + "\n" -debpkg_string_temp += "Vcs-Git: " + pkggiturl + "\n" -debpkg_string_temp += "Vcs-Browser: " + pkghomepage + "\n" -debpkg_string_temp += "Section: " + pkgsection + "\n" -debpkg_string_temp += "Priority: " + pkgpriority + "\n" -debpkg_string_temp += "Build-Depends: " + pkgbuilddepends + "\n" -debpkg_string_temp += "Standards-Version: " + pkgstandardsversion + "\n\n" -debpkg_string_temp += "Package: " + pkgpackage + "\n" -debpkg_string_temp += "Architecture: " + pkgarchitecture + "\n" -debpkg_string_temp += "Depends: " + pkgdepends + "\n" -debpkg_string_temp += "Replaces: " + pkgoldname + "\n" -debpkg_string_temp += "Description: " + pkgdescription + "\n" +debpkg_control_file = os.path.realpath(debpkg_debian_dir+os.path.sep+"control") +print("generating file "+debpkg_control_file) +debpkg_string_temp = "Source: "+pkgsource+"\n" +debpkg_string_temp += "Maintainer: "+pkgmaintainer+"\n" +debpkg_string_temp += "Homepage: "+pkghomepage+"\n" +debpkg_string_temp += "Vcs-Git: "+pkggiturl+"\n" +debpkg_string_temp += "Vcs-Browser: "+pkghomepage+"\n" +debpkg_string_temp += "Section: "+pkgsection+"\n" +debpkg_string_temp += "Priority: "+pkgpriority+"\n" +debpkg_string_temp += "Build-Depends: "+pkgbuilddepends+"\n" +debpkg_string_temp += "Standards-Version: "+pkgstandardsversion+"\n\n" +debpkg_string_temp += "Package: "+pkgpackage+"\n" +debpkg_string_temp += "Architecture: "+pkgarchitecture+"\n" +debpkg_string_temp += "Depends: "+pkgdepends+"\n" +debpkg_string_temp += "Replaces: "+pkgoldname+"\n" +debpkg_string_temp += "Description: "+pkgdescription+"\n" debpkg_file_temp = open(debpkg_control_file, "w") debpkg_file_temp.write(debpkg_string_temp) debpkg_file_temp.close() os.chmod(debpkg_control_file, int("0644", 8)) debpkg_copyright_file = os.path.realpath( - debpkg_debian_dir + os.path.sep + "copyright") -print("generating file " + debpkg_copyright_file) + debpkg_debian_dir+os.path.sep+"copyright") +print("generating file "+debpkg_copyright_file) debpkg_string_temp = "Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/\n" -debpkg_string_temp += "Upstream-Name: " + pkgupstreamname + "\n" -debpkg_string_temp += "Source: " + pkghomepage + "\n\n" +debpkg_string_temp += "Upstream-Name: "+pkgupstreamname+"\n" +debpkg_string_temp += "Source: "+pkghomepage+"\n\n" debpkg_string_temp += "Files: *\n" -debpkg_string_temp += "Copyright: Copyright 2011-2016 " + pkgauthor + "\n" +debpkg_string_temp += "Copyright: Copyright 2011-2016 "+pkgauthor+"\n" debpkg_string_temp += "License: BSD\n\n" debpkg_string_temp += "License: BSD\n" debpkg_string_temp += " Revised BSD License\n\n" @@ -356,23 +278,19 @@ def which_exec(execfile): debpkg_file_temp.close() os.chmod(debpkg_copyright_file, int("0644", 8)) -debpkg_rules_file = os.path.realpath(debpkg_debian_dir + os.path.sep + "rules") -print("generating file " + debpkg_rules_file) -if (sys.version[0] == "2" and (buildsystem == - "python" or buildsystem == "python_distutils")): +debpkg_rules_file = os.path.realpath(debpkg_debian_dir+os.path.sep+"rules") +print("generating file "+debpkg_rules_file) +if(sys.version[0] == "2" and (buildsystem == "python" or buildsystem == "python_distutils")): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --with python2 --buildsystem=python_distutils\n" -if (sys.version[0] == "3" and (buildsystem == - "python" or buildsystem == "python_distutils")): +if(sys.version[0] == "3" and (buildsystem == "python" or buildsystem == "python_distutils")): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --with python3\n" @@ -383,36 +301,30 @@ def which_exec(execfile): debpkg_string_temp += "override_dh_auto_install:\n" debpkg_string_temp += " python3 setup.py install \\\n" debpkg_string_temp += " --force --root=$(CURDIR)/debian/" + \ - pkgpackage + " \\\n" + pkgpackage+" \\\n" debpkg_string_temp += " --no-compile -O0 --install-layout=deb\n\n" debpkg_string_temp += "override_dh_auto_clean:\n" debpkg_string_temp += " python3 setup.py clean\n" -if (sys.version[0] == "2" and (buildsystem == - "pybuild" or buildsystem == "python_build")): +if(sys.version[0] == "2" and (buildsystem == "pybuild" or buildsystem == "python_build")): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n" debpkg_string_temp += "export PYBUILD_NAME=pywww-get\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --with python2 --buildsystem=pybuild\n" -if (sys.version[0] == "3" and (buildsystem == - "pybuild" or buildsystem == "python_build")): +if(sys.version[0] == "3" and (buildsystem == "pybuild" or buildsystem == "python_build")): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n" debpkg_string_temp += "export PYBUILD_NAME=pywww-get\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --with python3 --buildsystem=pybuild\n" -if ((sys.version[0] == "2" or sys.version[0] == "3") - and buildsystem == "cmake"): +if((sys.version[0] == "2" or sys.version[0] == "3") and buildsystem == "cmake"): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --buildsystem=cmake --parallel\n" @@ -421,25 +333,22 @@ def which_exec(execfile): debpkg_file_temp.close() os.chmod(debpkg_rules_file, int("0755", 8)) -debpkg_source_dir = os.path.realpath( - debpkg_debian_dir + os.path.sep + "source") -print("creating directory " + debpkg_source_dir) -if (not os.path.exists(debpkg_source_dir)): +debpkg_source_dir = os.path.realpath(debpkg_debian_dir+os.path.sep+"source") +print("creating directory "+debpkg_source_dir) +if(not os.path.exists(debpkg_source_dir)): os.makedirs(debpkg_source_dir) os.chmod(debpkg_source_dir, int("0755", 8)) -debpkg_format_file = os.path.realpath( - debpkg_source_dir + os.path.sep + "format") -print("generating file " + debpkg_format_file) +debpkg_format_file = os.path.realpath(debpkg_source_dir+os.path.sep+"format") +print("generating file "+debpkg_format_file) debpkg_string_temp = "3.0 (native)\n" debpkg_file_temp = open(debpkg_format_file, "w") debpkg_file_temp.write(debpkg_string_temp) debpkg_file_temp.close() os.chmod(debpkg_format_file, int("0644", 8)) -debpkg_options_file = os.path.realpath( - debpkg_source_dir + os.path.sep + "options") -print("generating file " + debpkg_options_file) +debpkg_options_file = os.path.realpath(debpkg_source_dir+os.path.sep+"options") +print("generating file "+debpkg_options_file) debpkg_string_temp = "extend-diff-ignore=\"\\.egg-info\"\n" debpkg_file_temp = open(debpkg_options_file, "w") debpkg_file_temp.write(debpkg_string_temp) diff --git a/pkgbuild/debian/python3/realpath.py b/pkgbuild/debian/python3/realpath.py index 9837108..7350f44 100755 --- a/pkgbuild/debian/python3/realpath.py +++ b/pkgbuild/debian/python3/realpath.py @@ -15,24 +15,22 @@ $FileInfo: realpath.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "realpath" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/debian/python3/which.py b/pkgbuild/debian/python3/which.py index fde6e3d..22cccbd 100755 --- a/pkgbuild/debian/python3/which.py +++ b/pkgbuild/debian/python3/which.py @@ -15,24 +15,22 @@ $FileInfo: which.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "which" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover def which_exec(execfile): diff --git a/pkgbuild/linuxmint/python2/basename.py b/pkgbuild/linuxmint/python2/basename.py index cb03c1e..d0538a9 100755 --- a/pkgbuild/linuxmint/python2/basename.py +++ b/pkgbuild/linuxmint/python2/basename.py @@ -15,24 +15,22 @@ $FileInfo: basename.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "basename" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/linuxmint/python2/dirname.py b/pkgbuild/linuxmint/python2/dirname.py index 1225a5a..46d4997 100755 --- a/pkgbuild/linuxmint/python2/dirname.py +++ b/pkgbuild/linuxmint/python2/dirname.py @@ -15,24 +15,22 @@ $FileInfo: dirname.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "dirname" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/linuxmint/python2/realpath.py b/pkgbuild/linuxmint/python2/realpath.py index 5eaaa7a..b2e441f 100755 --- a/pkgbuild/linuxmint/python2/realpath.py +++ b/pkgbuild/linuxmint/python2/realpath.py @@ -15,24 +15,22 @@ $FileInfo: realpath.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "realpath" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/linuxmint/python2/which.py b/pkgbuild/linuxmint/python2/which.py index 36ff5fb..e5cf271 100755 --- a/pkgbuild/linuxmint/python2/which.py +++ b/pkgbuild/linuxmint/python2/which.py @@ -15,24 +15,22 @@ $FileInfo: which.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "which" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover def which_exec(execfile): diff --git a/pkgbuild/linuxmint/python3/basename.py b/pkgbuild/linuxmint/python3/basename.py index 8b41db3..346121c 100755 --- a/pkgbuild/linuxmint/python3/basename.py +++ b/pkgbuild/linuxmint/python3/basename.py @@ -15,24 +15,22 @@ $FileInfo: basename.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "basename" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/linuxmint/python3/dirname.py b/pkgbuild/linuxmint/python3/dirname.py index 5aea1fd..080502f 100755 --- a/pkgbuild/linuxmint/python3/dirname.py +++ b/pkgbuild/linuxmint/python3/dirname.py @@ -15,24 +15,22 @@ $FileInfo: dirname.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "dirname" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/linuxmint/python3/realpath.py b/pkgbuild/linuxmint/python3/realpath.py index 9837108..7350f44 100755 --- a/pkgbuild/linuxmint/python3/realpath.py +++ b/pkgbuild/linuxmint/python3/realpath.py @@ -15,24 +15,22 @@ $FileInfo: realpath.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "realpath" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/linuxmint/python3/which.py b/pkgbuild/linuxmint/python3/which.py index fde6e3d..22cccbd 100755 --- a/pkgbuild/linuxmint/python3/which.py +++ b/pkgbuild/linuxmint/python3/which.py @@ -15,24 +15,22 @@ $FileInfo: which.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "which" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover def which_exec(execfile): diff --git a/pkgbuild/ubuntu/python2/basename.py b/pkgbuild/ubuntu/python2/basename.py index 017ae45..36f255a 100755 --- a/pkgbuild/ubuntu/python2/basename.py +++ b/pkgbuild/ubuntu/python2/basename.py @@ -15,24 +15,22 @@ $FileInfo: basename.py - Last Update: 4/23/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "basename" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/ubuntu/python2/dirname.py b/pkgbuild/ubuntu/python2/dirname.py index 364beee..7cf14da 100755 --- a/pkgbuild/ubuntu/python2/dirname.py +++ b/pkgbuild/ubuntu/python2/dirname.py @@ -15,24 +15,22 @@ $FileInfo: dirname.py - Last Update: 4/23/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "dirname" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/ubuntu/python2/pydeb-gen.py b/pkgbuild/ubuntu/python2/pydeb-gen.py index cfe9e83..11d7877 100755 --- a/pkgbuild/ubuntu/python2/pydeb-gen.py +++ b/pkgbuild/ubuntu/python2/pydeb-gen.py @@ -15,29 +15,27 @@ $FileInfo: pydeb-gen.py - Last Update: 6/1/2016 Ver. 0.2.0 RC 1 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse -import datetime -import json -import os +from __future__ import absolute_import, division, print_function, unicode_literals import re -import subprocess +import os import sys import time +import datetime +import argparse +import subprocess +import json __version_info__ = (0, 2, 0, "rc1") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "pydeb-gen" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover buildsystem = "pybuild" @@ -47,327 +45,35 @@ def which_exec(execfile): return path + "/" + execfile -distupnametover = { - 'Warty': "4.10", - 'Hoary': "5.04", - 'Breezy': "5.10", - 'Dapper': "6.06", - 'Edgy': "6.10", - 'Feisty': "7.04", - 'Gutsy': "7.10", - 'Hardy': "8.04", - 'Intrepid': "8.10", - 'Jaunty': "9.04", - 'Karmic': "9.10", - 'Lucid': "10.04", - 'Maverick': "10.10", - 'Natty': "11.04", - 'Oneiric': "11.10", - 'Precise': "12.04", - 'Quantal': "12.10", - 'Raring': "13.04", - 'Saucy': "13.10", - 'Trusty': "14.04", - 'Utopic': "14.10", - 'Vivid': "15.04", - 'Wily': "15.10", - 'Xenial': "16.04", - 'Yakkety': "16.10", - 'Zesty': "16.10"} -distnametover = { - 'warty': "4.10", - 'hoary': "5.04", - 'breezy': "5.10", - 'dapper': "6.06", - 'edgy': "6.10", - 'feisty': "7.04", - 'gutsy': "7.10", - 'hardy': "8.04", - 'intrepid': "8.10", - 'jaunty': "9.04", - 'karmic': "9.10", - 'lucid': "10.04", - 'maverick': "10.10", - 'natty': "11.04", - 'oneiric': "11.10", - 'precise': "12.04", - 'quantal': "12.10", - 'raring': "13.04", - 'saucy': "13.10", - 'trusty': "14.04", - 'utopic': "14.10", - 'vivid': "15.04", - 'wily': "15.10", - 'xenial': "16.04", - 'yakkety': "16.10", - 'zesty': "17.04"} +distupnametover = {'Warty': "4.10", 'Hoary': "5.04", 'Breezy': "5.10", 'Dapper': "6.06", 'Edgy': "6.10", 'Feisty': "7.04", 'Gutsy': "7.10", 'Hardy': "8.04", 'Intrepid': "8.10", 'Jaunty': "9.04", 'Karmic': "9.10", 'Lucid': "10.04", 'Maverick': "10.10", + 'Natty': "11.04", 'Oneiric': "11.10", 'Precise': "12.04", 'Quantal': "12.10", 'Raring': "13.04", 'Saucy': "13.10", 'Trusty': "14.04", 'Utopic': "14.10", 'Vivid': "15.04", 'Wily': "15.10", 'Xenial': "16.04", 'Yakkety': "16.10", 'Zesty': "16.10"} +distnametover = {'warty': "4.10", 'hoary': "5.04", 'breezy': "5.10", 'dapper': "6.06", 'edgy': "6.10", 'feisty': "7.04", 'gutsy': "7.10", 'hardy': "8.04", 'intrepid': "8.10", 'jaunty': "9.04", 'karmic': "9.10", 'lucid': "10.04", 'maverick': "10.10", + 'natty': "11.04", 'oneiric': "11.10", 'precise': "12.04", 'quantal': "12.10", 'raring': "13.04", 'saucy': "13.10", 'trusty': "14.04", 'utopic': "14.10", 'vivid': "15.04", 'wily': "15.10", 'xenial': "16.04", 'yakkety': "16.10", 'zesty': "17.04"} distnamelist = distnametover.keys() -distvertoname = { - '4.10': "warty", - '5.04': "hoary", - '5.10': "breezy", - '6.06': "dapper", - '6.10': "edgy", - '7.04': "feisty", - '7.10': "gutsy", - '8.04': "hardy", - '8.10': "intrepid", - '9.04': "jaunty", - '9.10': "karmic", - '10.04': "lucid", - '10.10': "maverick", - '11.04': "natty", - '11.10': "oneiric", - '12.04': "precise", - '12.10': "quantal", - '13.04': "raring", - '13.10': "saucy", - '14.04': "trusty", - '14.10': "utopic", - '15.04': "vivid", - '15.10': "wily", - '16.04': "xenial", - '17.04': "zesty"} -distvertoupname = { - '4.10': "Warty", - '5.04': "Hoary", - '5.10': "Breezy", - '6.06': "Dapper", - '6.10': "Edgy", - '7.04': "Feisty", - '7.10': "Gutsy", - '8.04': "Hardy", - '8.10': "Intrepid", - '9.04': "Jaunty", - '9.10': "Karmic", - '10.04': "Lucid", - '10.10': "Maverick", - '11.04': "Natty", - '11.10': "Oneiric", - '12.04': "Precise", - '12.10': "Quantal", - '13.04': "Raring", - '13.10': "Saucy", - '14.04': "Trusty", - '14.10': "Utopic", - '15.04': "Vivid", - '15.10': "Wily", - '16.04': "Xenial", - '17.04': "Zesty"} +distvertoname = {'4.10': "warty", '5.04': "hoary", '5.10': "breezy", '6.06': "dapper", '6.10': "edgy", '7.04': "feisty", '7.10': "gutsy", '8.04': "hardy", '8.10': "intrepid", '9.04': "jaunty", '9.10': "karmic", '10.04': "lucid", + '10.10': "maverick", '11.04': "natty", '11.10': "oneiric", '12.04': "precise", '12.10': "quantal", '13.04': "raring", '13.10': "saucy", '14.04': "trusty", '14.10': "utopic", '15.04': "vivid", '15.10': "wily", '16.04': "xenial", '17.04': "zesty"} +distvertoupname = {'4.10': "Warty", '5.04': "Hoary", '5.10': "Breezy", '6.06': "Dapper", '6.10': "Edgy", '7.04': "Feisty", '7.10': "Gutsy", '8.04': "Hardy", '8.10': "Intrepid", '9.04': "Jaunty", '9.10': "Karmic", '10.04': "Lucid", + '10.10': "Maverick", '11.04': "Natty", '11.10': "Oneiric", '12.04': "Precise", '12.10': "Quantal", '13.04': "Raring", '13.10': "Saucy", '14.04': "Trusty", '14.10': "Utopic", '15.04': "Vivid", '15.10': "Wily", '16.04': "Xenial", '17.04': "Zesty"} distnamelistalt = distvertoname.values() -distnametoveralt = { - 'Warty Warthog': "4.10", - 'Hoary Hedgehog': "5.04", - 'Breezy Badger': "5.10", - 'Dapper Drake': "6.06", - 'Edgy Eft': "6.10", - 'Feisty Fawn': "7.04", - 'Gutsy Gibbon': "7.10", - 'Hardy Heron': "8.04", - 'Intrepid Ibex': "8.10", - 'Jaunty Jackalope': "9.04", - 'Karmic Koala': "9.10", - 'Lucid Lynx': "10.04", - 'Maverick Meerkat': "10.10", - 'Natty Narwhal': "11.04", - 'Oneiric Ocelot': "11.10", - 'Precise Pangolin': "12.04", - 'Quantal Quetzal': "12.10", - 'Raring Ringtail': "13.04", - 'Saucy Salamander': "13.10", - 'Trusty Tahr': "14.04", - 'Utopic Unicorn': "14.10", - 'Vivid Vervet': "15.04", - 'Wily Werewolf': "15.10", - 'Xenial Xerus': "16.04", - 'Yakkety Yak': "16.10", - 'Zesty Zapus': "17.04"} -distvertonamealt = { - '4.10': "Warty Warthog", - '5.04': "Hoary Hedgehog", - '5.10': "Breezy Badger", - '6.06': "Dapper Drake", - '6.10': "Edgy Eft", - '7.04': "Feisty Fawn", - '7.10': "Gutsy Gibbon", - '8.04': "Hardy Heron", - '8.10': "Intrepid Ibex", - '9.04': "Jaunty Jackalope", - '9.10': "Karmic Koala", - '10.04': "Lucid Lynx", - '10.10': "Maverick Meerkat", - '11.04': "Natty Narwhal", - '11.10': "Oneiric Ocelot", - '12.04': "Precise Pangolin", - '12.10': "Quantal Quetzal", - '13.04': "Raring Ringtail", - '13.10': "Saucy Salamander", - '14.04': "Trusty Tahr", - '14.10': "Utopic Unicorn", - '15.04': "Vivid Vervet", - '15.10': "Wily Werewolf", - '16.04': "Xenial Xerus", - '16.10': "Yakkety Yak", - '17.04': "Zesty Zapus"} +distnametoveralt = {'Warty Warthog': "4.10", 'Hoary Hedgehog': "5.04", 'Breezy Badger': "5.10", 'Dapper Drake': "6.06", 'Edgy Eft': "6.10", + 'Feisty Fawn': "7.04", 'Gutsy Gibbon': "7.10", 'Hardy Heron': "8.04", 'Intrepid Ibex': "8.10", 'Jaunty Jackalope': "9.04", 'Karmic Koala': "9.10", 'Lucid Lynx': "10.04", 'Maverick Meerkat': "10.10", 'Natty Narwhal': "11.04", 'Oneiric Ocelot': "11.10", 'Precise Pangolin': "12.04", 'Quantal Quetzal': "12.10", 'Raring Ringtail': "13.04", 'Saucy Salamander': "13.10", 'Trusty Tahr': "14.04", 'Utopic Unicorn': "14.10", 'Vivid Vervet': "15.04", 'Wily Werewolf': "15.10", 'Xenial Xerus': "16.04", 'Yakkety Yak': "16.10", 'Zesty Zapus': "17.04"} +distvertonamealt = {'4.10': "Warty Warthog", '5.04': "Hoary Hedgehog", '5.10': "Breezy Badger", '6.06': "Dapper Drake", '6.10': "Edgy Eft", + '7.04': "Feisty Fawn", '7.10': "Gutsy Gibbon", '8.04': "Hardy Heron", '8.10': "Intrepid Ibex", '9.04': "Jaunty Jackalope", '9.10': "Karmic Koala", '10.04': "Lucid Lynx", '10.10': "Maverick Meerkat", '11.04': "Natty Narwhal", '11.10': "Oneiric Ocelot", '12.04': "Precise Pangolin", '12.10': "Quantal Quetzal", '13.04': "Raring Ringtail", '13.10': "Saucy Salamander", '14.04': "Trusty Tahr", '14.10': "Utopic Unicorn", '15.04': "Vivid Vervet", '15.10': "Wily Werewolf", '16.04': "Xenial Xerus", '16.10': "Yakkety Yak", '17.04': "Zesty Zapus"} -lmdistvertoname = { - '1.0': "ada", - '2.0': "barbara", - '2.1': "bea", - '2.2': "bianca", - '3.0': "cassandra", - '3.1': "celena", - '4.0': "daryna", - '5': "elyssa", - '6': "felicia", - '7': "gloria", - '8': "helena", - '9': "isadora", - '10': "julia", - '11': "katya", - '12': "lisa", - '13': "maya", - '14': "nadia", - '15': "olivia", - '16': "petra", - '17': "qiana", - '17.1': "rebecca", - '17.2': "rafaela", - '17.3': "rosa", - '18': "sarah", - '18.1': "serena"} -lmdistvertonamealt = { - '1.0': "Ada", - '2.0': "Barbara", - '2.1': "Bea", - '2.2': "Bianca", - '3.0': "Cassandra", - '3.1': "Celena", - '4.0': "Daryna", - '5': "Elyssa", - '6': "Felicia", - '7': "Gloria", - '8': "Helena", - '9': "Isadora", - '10': "Julia", - '11': "Katya", - '12': "Lisa", - '13': "Maya", - '14': "Nadia", - '15': "Olivia", - '16': "Petra", - '17': "Qiana", - '17.1': "Rebecca", - '17.2': "Rafaela", - '17.3': "Rosa", - '18': "Sarah", - '18.1': "Serena"} +lmdistvertoname = {'1.0': "ada", '2.0': "barbara", '2.1': "bea", '2.2': "bianca", '3.0': "cassandra", '3.1': "celena", '4.0': "daryna", '5': "elyssa", '6': "felicia", '7': "gloria", '8': "helena", '9': "isadora", + '10': "julia", '11': "katya", '12': "lisa", '13': "maya", '14': "nadia", '15': "olivia", '16': "petra", '17': "qiana", '17.1': "rebecca", '17.2': "rafaela", '17.3': "rosa", '18': "sarah", '18.1': "serena"} +lmdistvertonamealt = {'1.0': "Ada", '2.0': "Barbara", '2.1': "Bea", '2.2': "Bianca", '3.0': "Cassandra", '3.1': "Celena", '4.0': "Daryna", '5': "Elyssa", '6': "Felicia", '7': "Gloria", '8': "Helena", '9': "Isadora", + '10': "Julia", '11': "Katya", '12': "Lisa", '13': "Maya", '14': "Nadia", '15': "Olivia", '16': "Petra", '17': "Qiana", '17.1': "Rebecca", '17.2': "Rafaela", '17.3': "Rosa", '18': "Sarah", '18.1': "Serena"} lmdistnamelistalt = lmdistvertoname.values() -lmdistnametover = { - 'ada': "1.0", - 'barbara': "2.0", - 'bea': "2.1", - 'bianca': "2.2", - 'cassandra': "3.0", - 'celena': "3.1", - 'daryna': "4.0", - 'elyssa': "5", - 'felicia': "6", - 'gloria': "7", - 'helena': "8", - 'isadora': "9", - 'julia': "10", - 'katya': "11", - 'lisa': "12", - 'maya': "13", - 'nadia': "14", - 'olivia': "15", - 'petra': "16", - 'qiana': "17", - 'rebecca': "17.1", - 'rafaela': "17.2", - 'rosa': "17.3", - 'sarah': "18", - 'serena': "18.1"} -lmdistnametoveralt = { - 'Ada': "1.0", - 'Barbara': "2.0", - 'Bea': "2.1", - 'Bianca': "2.2", - 'Cassandra': "3.0", - 'Celena': "3.1", - 'Daryna': "4.0", - 'Elyssa': "5", - 'Felicia': "6", - 'Gloria': "7", - 'Helena': "8", - 'Isadora': "9", - 'Julia': "10", - 'Katya': "11", - 'Lisa': "12", - 'Maya': "13", - 'Nadia': "14", - 'Olivia': "15", - 'Petra': "16", - 'Qiana': "17", - 'Rebecca': "17.1", - 'Rafaela': "17.2", - 'Rosa': "17.3", - 'Sarah': "18", - 'Serena': "18.1"} +lmdistnametover = {'ada': "1.0", 'barbara': "2.0", 'bea': "2.1", 'bianca': "2.2", 'cassandra': "3.0", 'celena': "3.1", 'daryna': "4.0", 'elyssa': "5", 'felicia': "6", 'gloria': "7", 'helena': "8", 'isadora': "9", + 'julia': "10", 'katya': "11", 'lisa': "12", 'maya': "13", 'nadia': "14", 'olivia': "15", 'petra': "16", 'qiana': "17", 'rebecca': "17.1", 'rafaela': "17.2", 'rosa': "17.3", 'sarah': "18", 'serena': "18.1"} +lmdistnametoveralt = {'Ada': "1.0", 'Barbara': "2.0", 'Bea': "2.1", 'Bianca': "2.2", 'Cassandra': "3.0", 'Celena': "3.1", 'Daryna': "4.0", 'Elyssa': "5", 'Felicia': "6", 'Gloria': "7", 'Helena': "8", 'Isadora': "9", + 'Julia': "10", 'Katya': "11", 'Lisa': "12", 'Maya': "13", 'Nadia': "14", 'Olivia': "15", 'Petra': "16", 'Qiana': "17", 'Rebecca': "17.1", 'Rafaela': "17.2", 'Rosa': "17.3", 'Sarah': "18", 'Serena': "18.1"} lmdistnamelist = lmdistnametover.keys() -distlmnametouname = { - "ada": "dapper", - "barbara": "edgy", - "bea": "edgy", - "bianca": "edgy", - "cassandra": "feisty", - "celena": "feisty", - "daryna": "gutsy", - "elyssa": "hardy", - "felicia": "intrepid", - "gloria": "jaunty", - "helena": "karmic", - "isadora": "lucid", - "julia": "maverick", - "katya": "natty", - "lisa": "oneiric", - "maya": "precise", - "nadia": "quantal", - "olivia": "raring", - "petra": "saucy", - "qiana": "trusty", - "rebecca": "trusty", - "rafaela": "trusty", - "rosa": "trusty", - "sarah": "xenial", - "serena": "xenial"} -distlmnametounamealt = { - "Ada": "Dapper", - "Barbara": "Edgy", - "Bea": "Edgy", - "Bianca": "Edgy", - "Cassandra": "Feisty", - "Celena": "Feisty", - "Daryna": "Gutsy", - "Elyssa": "Hardy", - "Felicia": "Intrepid", - "Gloria": "Jaunty", - "Helena": "Karmic", - "Isadora": "Lucid", - "Julia": "Maverick", - "Katya": "Natty", - "Lisa": "Oneiric", - "Maya": "Precise", - "Nadia": "Quantal", - "Olivia": "Raring", - "Petra": "Saucy", - "Qiana": "Trusty", - "Rebecca": "Trusty", - "Rafaela": "Trusty", - "Rosa": "Trusty", - "Sarah": "Xenial", - "Serena": "Xenial"} +distlmnametouname = {"ada": "dapper", "barbara": "edgy", "bea": "edgy", "bianca": "edgy", "cassandra": "feisty", "celena": "feisty", "daryna": "gutsy", "elyssa": "hardy", "felicia": "intrepid", "gloria": "jaunty", "helena": "karmic", "isadora": "lucid", + "julia": "maverick", "katya": "natty", "lisa": "oneiric", "maya": "precise", "nadia": "quantal", "olivia": "raring", "petra": "saucy", "qiana": "trusty", "rebecca": "trusty", "rafaela": "trusty", "rosa": "trusty", "sarah": "xenial", "serena": "xenial"} +distlmnametounamealt = {"Ada": "Dapper", "Barbara": "Edgy", "Bea": "Edgy", "Bianca": "Edgy", "Cassandra": "Feisty", "Celena": "Feisty", "Daryna": "Gutsy", "Elyssa": "Hardy", "Felicia": "Intrepid", "Gloria": "Jaunty", "Helena": "Karmic", "Isadora": "Lucid", + "Julia": "Maverick", "Katya": "Natty", "Lisa": "Oneiric", "Maya": "Precise", "Nadia": "Quantal", "Olivia": "Raring", "Petra": "Saucy", "Qiana": "Trusty", "Rebecca": "Trusty", "Rafaela": "Trusty", "Rosa": "Trusty", "Sarah": "Xenial", "Serena": "Xenial"} ubuntu_oldstable = "wily" ubuntu_stable = "xenial" @@ -393,27 +99,24 @@ def which_exec(execfile): action="store_true", help="get pkg source") getargs = parser.parse_args() getargs.source = os.path.realpath(getargs.source) -pkgsetuppy = os.path.realpath(getargs.source + os.path.sep + "setup.py") +pkgsetuppy = os.path.realpath(getargs.source+os.path.sep+"setup.py") pyexecpath = os.path.realpath(sys.executable) -if (not os.path.exists(getargs.source) or not os.path.isdir(getargs.source)): +if(not os.path.exists(getargs.source) or not os.path.isdir(getargs.source)): raise Exception("Could not find directory.") -if (not os.path.exists(pkgsetuppy) or not os.path.isfile(pkgsetuppy)): +if(not os.path.exists(pkgsetuppy) or not os.path.isfile(pkgsetuppy)): raise Exception("Could not find setup.py in directory.") getargs.codename = distlmnametouname.get(getargs.codename, getargs.codename) getargs.codename = distlmnametounamealt.get(getargs.codename, getargs.codename) getargs.codename = getargs.codename.lower() -if (getargs.codename not in distnamelist): - print("Could not build for ubuntu " + getargs.codename + " codename.") +if(not getargs.codename in distnamelist): + print("Could not build for ubuntu "+getargs.codename+" codename.") sys.exit() -pypkgenlistp = subprocess.Popen([pyexecpath, - pkgsetuppy, - "getversioninfo"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) +pypkgenlistp = subprocess.Popen( + [pyexecpath, pkgsetuppy, "getversioninfo"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) pypkgenout, pypkgenerr = pypkgenlistp.communicate() -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pypkgenout = pypkgenout.decode('utf-8') pymodule = json.loads(pypkgenout) setuppy_verinfo = pymodule['versionlist'] @@ -428,153 +131,140 @@ def which_exec(execfile): setuppy_downloadurl = pymodule['downloadurl'] setuppy_longdescription = pymodule['longdescription'] setuppy_platforms = pymodule['platforms'] -standverfilename = os.path.realpath( - os.path.sep + - "usr" + - os.path.sep + - "share" + - os.path.sep + - "lintian" + - os.path.sep + - "data" + - os.path.sep + - "standards-version" + - os.path.sep + - "release-dates") +standverfilename = os.path.realpath(os.path.sep+"usr"+os.path.sep+"share"+os.path.sep + + "lintian"+os.path.sep+"data"+os.path.sep+"standards-version"+os.path.sep+"release-dates") standverfile = open(standverfilename, "r") standverdata = standverfile.read() standverfile.close() -getstandver = re.findall("([0-9]\\.[0-9]\\.[0-9])\\s+([0-9]+)", standverdata) +getstandver = re.findall("([0-9]\.[0-9]\.[0-9])\s+([0-9]+)", standverdata) getcurstandver = getstandver[0][0] dpkglocatout = which_exec("dpkg") pydpkglistp = subprocess.Popen( [dpkglocatout, "-s", "debhelper"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) pydpkgout, pydpkgerr = pydpkglistp.communicate() -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pydpkgout = pydpkgout.decode("utf-8") -pydpkg_esc = re.escape("Version:") + '\\s+([0-9]+)' + re.escape(".") +pydpkg_esc = re.escape("Version:")+'\s+([0-9]+)'+re.escape(".") pydpkg_val = re.findall(pydpkg_esc, pydpkgout.replace("ubuntu", "."))[0] -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgsource = "py2www-get" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgsource = "py3www-get" pkgupstreamname = "PyWWW-Get" -pkgveralt = str(setuppy_verinfo[0]) + "." + \ - str(setuppy_verinfo[1]) + "." + str(setuppy_verinfo[2]) -pkgver = str(pkgveralt) + "~rc" + str(setuppy_verinfo[4]) + "~" + getargs.codename + str( +pkgveralt = str(setuppy_verinfo[0])+"." + \ + str(setuppy_verinfo[1])+"."+str(setuppy_verinfo[2]) +pkgver = str(pkgveralt)+"~rc"+str(setuppy_verinfo[4])+"~"+getargs.codename+str( distnametover.get(getargs.codename, "1").replace(".", "")) pkgdistname = getargs.codename pkgurgency = "urgency=low" pkgauthorname = setuppy_author pkgauthoremail = setuppy_authoremail -pkgauthor = pkgauthorname + " <" + pkgauthoremail + ">" +pkgauthor = pkgauthorname+" <"+pkgauthoremail+">" pkgmaintainername = setuppy_maintainer pkgmaintaineremail = setuppy_maintaineremail -pkgmaintainer = pkgmaintainername + " <" + pkgmaintaineremail + ">" +pkgmaintainer = pkgmaintainername+" <"+pkgmaintaineremail+">" pkggiturl = "https://github.com/GameMaker2k/PyWWW-Get.git" pkghomepage = setuppy_url pkgsection = "python" pkgpriority = "optional" -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgbuilddepends = "python-setuptools, python-all, debhelper, dh-python, devscripts" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgbuilddepends = "python3-setuptools, python3-all, debhelper, dh-python, devscripts" -if (getargs.codename == "lucid" or getargs.codename == "precise"): - if (sys.version[0] == "2"): +if(getargs.codename == "lucid" or getargs.codename == "precise"): + if(sys.version[0] == "2"): pkgbuilddepends = "python-setuptools, python-all, debhelper, dh-python, devscripts" - if (sys.version[0] == "3"): + if(sys.version[0] == "3"): pkgbuilddepends = "python3-setuptools, python3-all, debhelper, dh-python, devscripts" pkgstandardsversion = getcurstandver -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgpackage = "python-pywww-get" pkgoldname = "python-www-get" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgpackage = "python3-pywww-get" pkgoldname = "python3-www-get" pkgarchitecture = "all" -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgdepends = "${misc:Depends}, ${python:Depends}" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgdepends = "${misc:Depends}, ${python3:Depends}" -pkgdescription = setuppy_description + "\n " + setuppy_longdescription +pkgdescription = setuppy_description+"\n "+setuppy_longdescription pkgtzstr = time.strftime("%a, %d %b %Y %H:%M:%S %z") -if (getargs.getsource): +if(getargs.getsource == True): print(getargs.source) sys.exit() -if (getargs.getparent): +if(getargs.getparent == True): print(os.path.realpath(os.path.dirname(getargs.source))) sys.exit() -if (getargs.getdirname): - print(pkgsource + "_" + pkgveralt + ".orig") +if(getargs.getdirname == True): + print(pkgsource+"_"+pkgveralt+".orig") sys.exit() -if (getargs.gettarname): - print(pkgsource + "_" + pkgveralt + ".orig.tar.gz") +if(getargs.gettarname == True): + print(pkgsource+"_"+pkgveralt+".orig.tar.gz") sys.exit() -if (getargs.getpkgsource): +if(getargs.getpkgsource == True): print(pkgsource) sys.exit() print("generating debian package build directory") -debpkg_debian_dir = os.path.realpath(getargs.source + os.path.sep + "debian") -print("creating directory " + debpkg_debian_dir) -if (not os.path.exists(debpkg_debian_dir)): +debpkg_debian_dir = os.path.realpath(getargs.source+os.path.sep+"debian") +print("creating directory "+debpkg_debian_dir) +if(not os.path.exists(debpkg_debian_dir)): os.makedirs(debpkg_debian_dir) os.chmod(debpkg_debian_dir, int("0755", 8)) debpkg_changelog_file = os.path.realpath( - debpkg_debian_dir + os.path.sep + "changelog") -print("generating file " + debpkg_changelog_file) + debpkg_debian_dir+os.path.sep+"changelog") +print("generating file "+debpkg_changelog_file) debpkg_string_temp = pkgsource + \ - " (" + pkgver + ") " + pkgdistname + "; " + pkgurgency + "\n\n" -debpkg_string_temp += " * source package automatically created by " + profullname + "\n\n" -debpkg_string_temp += " -- " + pkgmaintainer + " " + pkgtzstr + "\n" + " ("+pkgver+") "+pkgdistname+"; "+pkgurgency+"\n\n" +debpkg_string_temp += " * source package automatically created by "+profullname+"\n\n" +debpkg_string_temp += " -- "+pkgmaintainer+" "+pkgtzstr+"\n" debpkg_file_temp = open(debpkg_changelog_file, "w") debpkg_file_temp.write(debpkg_string_temp) debpkg_file_temp.close() os.chmod(debpkg_changelog_file, int("0644", 8)) -debpkg_compat_file = os.path.realpath( - debpkg_debian_dir + os.path.sep + "compat") -print("generating file " + debpkg_compat_file) -debpkg_string_temp = str(pydpkg_val) + "\n" +debpkg_compat_file = os.path.realpath(debpkg_debian_dir+os.path.sep+"compat") +print("generating file "+debpkg_compat_file) +debpkg_string_temp = str(pydpkg_val)+"\n" debpkg_file_temp = open(debpkg_compat_file, "w") debpkg_file_temp.write(debpkg_string_temp) debpkg_file_temp.close() os.chmod(debpkg_compat_file, int("0644", 8)) -debpkg_control_file = os.path.realpath( - debpkg_debian_dir + os.path.sep + "control") -print("generating file " + debpkg_control_file) -debpkg_string_temp = "Source: " + pkgsource + "\n" -debpkg_string_temp += "Maintainer: " + pkgmaintainer + "\n" -debpkg_string_temp += "Homepage: " + pkghomepage + "\n" -debpkg_string_temp += "Vcs-Git: " + pkggiturl + "\n" -debpkg_string_temp += "Vcs-Browser: " + pkghomepage + "\n" -debpkg_string_temp += "Section: " + pkgsection + "\n" -debpkg_string_temp += "Priority: " + pkgpriority + "\n" -debpkg_string_temp += "Build-Depends: " + pkgbuilddepends + "\n" -debpkg_string_temp += "Standards-Version: " + pkgstandardsversion + "\n\n" -debpkg_string_temp += "Package: " + pkgpackage + "\n" -debpkg_string_temp += "Architecture: " + pkgarchitecture + "\n" -debpkg_string_temp += "Depends: " + pkgdepends + "\n" -debpkg_string_temp += "Replaces: " + pkgoldname + "\n" -debpkg_string_temp += "Description: " + pkgdescription + "\n" +debpkg_control_file = os.path.realpath(debpkg_debian_dir+os.path.sep+"control") +print("generating file "+debpkg_control_file) +debpkg_string_temp = "Source: "+pkgsource+"\n" +debpkg_string_temp += "Maintainer: "+pkgmaintainer+"\n" +debpkg_string_temp += "Homepage: "+pkghomepage+"\n" +debpkg_string_temp += "Vcs-Git: "+pkggiturl+"\n" +debpkg_string_temp += "Vcs-Browser: "+pkghomepage+"\n" +debpkg_string_temp += "Section: "+pkgsection+"\n" +debpkg_string_temp += "Priority: "+pkgpriority+"\n" +debpkg_string_temp += "Build-Depends: "+pkgbuilddepends+"\n" +debpkg_string_temp += "Standards-Version: "+pkgstandardsversion+"\n\n" +debpkg_string_temp += "Package: "+pkgpackage+"\n" +debpkg_string_temp += "Architecture: "+pkgarchitecture+"\n" +debpkg_string_temp += "Depends: "+pkgdepends+"\n" +debpkg_string_temp += "Replaces: "+pkgoldname+"\n" +debpkg_string_temp += "Description: "+pkgdescription+"\n" debpkg_file_temp = open(debpkg_control_file, "w") debpkg_file_temp.write(debpkg_string_temp) debpkg_file_temp.close() os.chmod(debpkg_control_file, int("0644", 8)) debpkg_copyright_file = os.path.realpath( - debpkg_debian_dir + os.path.sep + "copyright") -print("generating file " + debpkg_copyright_file) + debpkg_debian_dir+os.path.sep+"copyright") +print("generating file "+debpkg_copyright_file) debpkg_string_temp = "Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/\n" -debpkg_string_temp += "Upstream-Name: " + pkgupstreamname + "\n" -debpkg_string_temp += "Source: " + pkghomepage + "\n\n" +debpkg_string_temp += "Upstream-Name: "+pkgupstreamname+"\n" +debpkg_string_temp += "Source: "+pkghomepage+"\n\n" debpkg_string_temp += "Files: *\n" -debpkg_string_temp += "Copyright: Copyright 2011-2016 " + pkgauthor + "\n" +debpkg_string_temp += "Copyright: Copyright 2011-2016 "+pkgauthor+"\n" debpkg_string_temp += "License: BSD\n\n" debpkg_string_temp += "License: BSD\n" debpkg_string_temp += " Revised BSD License\n\n" @@ -609,23 +299,19 @@ def which_exec(execfile): debpkg_file_temp.close() os.chmod(debpkg_copyright_file, int("0644", 8)) -debpkg_rules_file = os.path.realpath(debpkg_debian_dir + os.path.sep + "rules") -print("generating file " + debpkg_rules_file) -if (sys.version[0] == "2" and (buildsystem == - "python" or buildsystem == "python_distutils")): +debpkg_rules_file = os.path.realpath(debpkg_debian_dir+os.path.sep+"rules") +print("generating file "+debpkg_rules_file) +if(sys.version[0] == "2" and (buildsystem == "python" or buildsystem == "python_distutils")): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --with python2 --buildsystem=python_distutils\n" -if (sys.version[0] == "3" and (buildsystem == - "python" or buildsystem == "python_distutils")): +if(sys.version[0] == "3" and (buildsystem == "python" or buildsystem == "python_distutils")): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --with python3\n" @@ -636,36 +322,30 @@ def which_exec(execfile): debpkg_string_temp += "override_dh_auto_install:\n" debpkg_string_temp += " python3 setup.py install \\\n" debpkg_string_temp += " --force --root=$(CURDIR)/debian/" + \ - pkgpackage + " \\\n" + pkgpackage+" \\\n" debpkg_string_temp += " --no-compile -O0 --install-layout=deb\n\n" debpkg_string_temp += "override_dh_auto_clean:\n" debpkg_string_temp += " python3 setup.py clean\n" -if (sys.version[0] == "2" and (buildsystem == - "pybuild" or buildsystem == "python_build")): +if(sys.version[0] == "2" and (buildsystem == "pybuild" or buildsystem == "python_build")): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n" debpkg_string_temp += "export PYBUILD_NAME=pywww-get\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --with python2 --buildsystem=pybuild\n" -if (sys.version[0] == "3" and (buildsystem == - "pybuild" or buildsystem == "python_build")): +if(sys.version[0] == "3" and (buildsystem == "pybuild" or buildsystem == "python_build")): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n" debpkg_string_temp += "export PYBUILD_NAME=pywww-get\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --with python3 --buildsystem=pybuild\n" -if ((sys.version[0] == "2" or sys.version[0] == "3") - and buildsystem == "cmake"): +if((sys.version[0] == "2" or sys.version[0] == "3") and buildsystem == "cmake"): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --buildsystem=cmake --parallel\n" @@ -674,25 +354,22 @@ def which_exec(execfile): debpkg_file_temp.close() os.chmod(debpkg_rules_file, int("0755", 8)) -debpkg_source_dir = os.path.realpath( - debpkg_debian_dir + os.path.sep + "source") -print("creating directory " + debpkg_source_dir) -if (not os.path.exists(debpkg_source_dir)): +debpkg_source_dir = os.path.realpath(debpkg_debian_dir+os.path.sep+"source") +print("creating directory "+debpkg_source_dir) +if(not os.path.exists(debpkg_source_dir)): os.makedirs(debpkg_source_dir) os.chmod(debpkg_source_dir, int("0755", 8)) -debpkg_format_file = os.path.realpath( - debpkg_source_dir + os.path.sep + "format") -print("generating file " + debpkg_format_file) +debpkg_format_file = os.path.realpath(debpkg_source_dir+os.path.sep+"format") +print("generating file "+debpkg_format_file) debpkg_string_temp = "3.0 (native)\n" debpkg_file_temp = open(debpkg_format_file, "w") debpkg_file_temp.write(debpkg_string_temp) debpkg_file_temp.close() os.chmod(debpkg_format_file, int("0644", 8)) -debpkg_options_file = os.path.realpath( - debpkg_source_dir + os.path.sep + "options") -print("generating file " + debpkg_options_file) +debpkg_options_file = os.path.realpath(debpkg_source_dir+os.path.sep+"options") +print("generating file "+debpkg_options_file) debpkg_string_temp = "extend-diff-ignore=\"\\.egg-info\"\n" debpkg_file_temp = open(debpkg_options_file, "w") debpkg_file_temp.write(debpkg_string_temp) diff --git a/pkgbuild/ubuntu/python2/realpath.py b/pkgbuild/ubuntu/python2/realpath.py index 7294436..9e40990 100755 --- a/pkgbuild/ubuntu/python2/realpath.py +++ b/pkgbuild/ubuntu/python2/realpath.py @@ -15,24 +15,22 @@ $FileInfo: realpath.py - Last Update: 4/23/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "realpath" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/ubuntu/python2/which.py b/pkgbuild/ubuntu/python2/which.py index 09a214f..af5429a 100755 --- a/pkgbuild/ubuntu/python2/which.py +++ b/pkgbuild/ubuntu/python2/which.py @@ -15,24 +15,22 @@ $FileInfo: which.py - Last Update: 4/23/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "which" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover def which_exec(execfile): diff --git a/pkgbuild/ubuntu/python3/basename.py b/pkgbuild/ubuntu/python3/basename.py index 8b41db3..346121c 100755 --- a/pkgbuild/ubuntu/python3/basename.py +++ b/pkgbuild/ubuntu/python3/basename.py @@ -15,24 +15,22 @@ $FileInfo: basename.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "basename" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/ubuntu/python3/dirname.py b/pkgbuild/ubuntu/python3/dirname.py index 5aea1fd..080502f 100755 --- a/pkgbuild/ubuntu/python3/dirname.py +++ b/pkgbuild/ubuntu/python3/dirname.py @@ -15,24 +15,22 @@ $FileInfo: dirname.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "dirname" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/ubuntu/python3/pydeb-gen.py b/pkgbuild/ubuntu/python3/pydeb-gen.py index 58d5bf7..1ce77b4 100755 --- a/pkgbuild/ubuntu/python3/pydeb-gen.py +++ b/pkgbuild/ubuntu/python3/pydeb-gen.py @@ -15,29 +15,27 @@ $FileInfo: pydeb-gen.py - Last Update: 6/1/2016 Ver. 0.2.0 RC 1 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse -import datetime -import json -import os +from __future__ import absolute_import, division, print_function, unicode_literals import re -import subprocess +import os import sys import time +import datetime +import argparse +import subprocess +import json __version_info__ = (0, 2, 0, "rc1") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "pydeb-gen" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover buildsystem = "pybuild" @@ -47,327 +45,35 @@ def which_exec(execfile): return path + "/" + execfile -distupnametover = { - 'Warty': "4.10", - 'Hoary': "5.04", - 'Breezy': "5.10", - 'Dapper': "6.06", - 'Edgy': "6.10", - 'Feisty': "7.04", - 'Gutsy': "7.10", - 'Hardy': "8.04", - 'Intrepid': "8.10", - 'Jaunty': "9.04", - 'Karmic': "9.10", - 'Lucid': "10.04", - 'Maverick': "10.10", - 'Natty': "11.04", - 'Oneiric': "11.10", - 'Precise': "12.04", - 'Quantal': "12.10", - 'Raring': "13.04", - 'Saucy': "13.10", - 'Trusty': "14.04", - 'Utopic': "14.10", - 'Vivid': "15.04", - 'Wily': "15.10", - 'Xenial': "16.04", - 'Yakkety': "16.10", - 'Zesty': "16.10"} -distnametover = { - 'warty': "4.10", - 'hoary': "5.04", - 'breezy': "5.10", - 'dapper': "6.06", - 'edgy': "6.10", - 'feisty': "7.04", - 'gutsy': "7.10", - 'hardy': "8.04", - 'intrepid': "8.10", - 'jaunty': "9.04", - 'karmic': "9.10", - 'lucid': "10.04", - 'maverick': "10.10", - 'natty': "11.04", - 'oneiric': "11.10", - 'precise': "12.04", - 'quantal': "12.10", - 'raring': "13.04", - 'saucy': "13.10", - 'trusty': "14.04", - 'utopic': "14.10", - 'vivid': "15.04", - 'wily': "15.10", - 'xenial': "16.04", - 'yakkety': "16.10", - 'zesty': "17.04"} +distupnametover = {'Warty': "4.10", 'Hoary': "5.04", 'Breezy': "5.10", 'Dapper': "6.06", 'Edgy': "6.10", 'Feisty': "7.04", 'Gutsy': "7.10", 'Hardy': "8.04", 'Intrepid': "8.10", 'Jaunty': "9.04", 'Karmic': "9.10", 'Lucid': "10.04", 'Maverick': "10.10", + 'Natty': "11.04", 'Oneiric': "11.10", 'Precise': "12.04", 'Quantal': "12.10", 'Raring': "13.04", 'Saucy': "13.10", 'Trusty': "14.04", 'Utopic': "14.10", 'Vivid': "15.04", 'Wily': "15.10", 'Xenial': "16.04", 'Yakkety': "16.10", 'Zesty': "16.10"} +distnametover = {'warty': "4.10", 'hoary': "5.04", 'breezy': "5.10", 'dapper': "6.06", 'edgy': "6.10", 'feisty': "7.04", 'gutsy': "7.10", 'hardy': "8.04", 'intrepid': "8.10", 'jaunty': "9.04", 'karmic': "9.10", 'lucid': "10.04", 'maverick': "10.10", + 'natty': "11.04", 'oneiric': "11.10", 'precise': "12.04", 'quantal': "12.10", 'raring': "13.04", 'saucy': "13.10", 'trusty': "14.04", 'utopic': "14.10", 'vivid': "15.04", 'wily': "15.10", 'xenial': "16.04", 'yakkety': "16.10", 'zesty': "17.04"} distnamelist = distnametover.keys() -distvertoname = { - '4.10': "warty", - '5.04': "hoary", - '5.10': "breezy", - '6.06': "dapper", - '6.10': "edgy", - '7.04': "feisty", - '7.10': "gutsy", - '8.04': "hardy", - '8.10': "intrepid", - '9.04': "jaunty", - '9.10': "karmic", - '10.04': "lucid", - '10.10': "maverick", - '11.04': "natty", - '11.10': "oneiric", - '12.04': "precise", - '12.10': "quantal", - '13.04': "raring", - '13.10': "saucy", - '14.04': "trusty", - '14.10': "utopic", - '15.04': "vivid", - '15.10': "wily", - '16.04': "xenial", - '17.04': "zesty"} -distvertoupname = { - '4.10': "Warty", - '5.04': "Hoary", - '5.10': "Breezy", - '6.06': "Dapper", - '6.10': "Edgy", - '7.04': "Feisty", - '7.10': "Gutsy", - '8.04': "Hardy", - '8.10': "Intrepid", - '9.04': "Jaunty", - '9.10': "Karmic", - '10.04': "Lucid", - '10.10': "Maverick", - '11.04': "Natty", - '11.10': "Oneiric", - '12.04': "Precise", - '12.10': "Quantal", - '13.04': "Raring", - '13.10': "Saucy", - '14.04': "Trusty", - '14.10': "Utopic", - '15.04': "Vivid", - '15.10': "Wily", - '16.04': "Xenial", - '17.04': "Zesty"} +distvertoname = {'4.10': "warty", '5.04': "hoary", '5.10': "breezy", '6.06': "dapper", '6.10': "edgy", '7.04': "feisty", '7.10': "gutsy", '8.04': "hardy", '8.10': "intrepid", '9.04': "jaunty", '9.10': "karmic", '10.04': "lucid", + '10.10': "maverick", '11.04': "natty", '11.10': "oneiric", '12.04': "precise", '12.10': "quantal", '13.04': "raring", '13.10': "saucy", '14.04': "trusty", '14.10': "utopic", '15.04': "vivid", '15.10': "wily", '16.04': "xenial", '17.04': "zesty"} +distvertoupname = {'4.10': "Warty", '5.04': "Hoary", '5.10': "Breezy", '6.06': "Dapper", '6.10': "Edgy", '7.04': "Feisty", '7.10': "Gutsy", '8.04': "Hardy", '8.10': "Intrepid", '9.04': "Jaunty", '9.10': "Karmic", '10.04': "Lucid", + '10.10': "Maverick", '11.04': "Natty", '11.10': "Oneiric", '12.04': "Precise", '12.10': "Quantal", '13.04': "Raring", '13.10': "Saucy", '14.04': "Trusty", '14.10': "Utopic", '15.04': "Vivid", '15.10': "Wily", '16.04': "Xenial", '17.04': "Zesty"} distnamelistalt = distvertoname.values() -distnametoveralt = { - 'Warty Warthog': "4.10", - 'Hoary Hedgehog': "5.04", - 'Breezy Badger': "5.10", - 'Dapper Drake': "6.06", - 'Edgy Eft': "6.10", - 'Feisty Fawn': "7.04", - 'Gutsy Gibbon': "7.10", - 'Hardy Heron': "8.04", - 'Intrepid Ibex': "8.10", - 'Jaunty Jackalope': "9.04", - 'Karmic Koala': "9.10", - 'Lucid Lynx': "10.04", - 'Maverick Meerkat': "10.10", - 'Natty Narwhal': "11.04", - 'Oneiric Ocelot': "11.10", - 'Precise Pangolin': "12.04", - 'Quantal Quetzal': "12.10", - 'Raring Ringtail': "13.04", - 'Saucy Salamander': "13.10", - 'Trusty Tahr': "14.04", - 'Utopic Unicorn': "14.10", - 'Vivid Vervet': "15.04", - 'Wily Werewolf': "15.10", - 'Xenial Xerus': "16.04", - 'Yakkety Yak': "16.10", - 'Zesty Zapus': "17.04"} -distvertonamealt = { - '4.10': "Warty Warthog", - '5.04': "Hoary Hedgehog", - '5.10': "Breezy Badger", - '6.06': "Dapper Drake", - '6.10': "Edgy Eft", - '7.04': "Feisty Fawn", - '7.10': "Gutsy Gibbon", - '8.04': "Hardy Heron", - '8.10': "Intrepid Ibex", - '9.04': "Jaunty Jackalope", - '9.10': "Karmic Koala", - '10.04': "Lucid Lynx", - '10.10': "Maverick Meerkat", - '11.04': "Natty Narwhal", - '11.10': "Oneiric Ocelot", - '12.04': "Precise Pangolin", - '12.10': "Quantal Quetzal", - '13.04': "Raring Ringtail", - '13.10': "Saucy Salamander", - '14.04': "Trusty Tahr", - '14.10': "Utopic Unicorn", - '15.04': "Vivid Vervet", - '15.10': "Wily Werewolf", - '16.04': "Xenial Xerus", - '16.10': "Yakkety Yak", - '17.04': "Zesty Zapus"} +distnametoveralt = {'Warty Warthog': "4.10", 'Hoary Hedgehog': "5.04", 'Breezy Badger': "5.10", 'Dapper Drake': "6.06", 'Edgy Eft': "6.10", + 'Feisty Fawn': "7.04", 'Gutsy Gibbon': "7.10", 'Hardy Heron': "8.04", 'Intrepid Ibex': "8.10", 'Jaunty Jackalope': "9.04", 'Karmic Koala': "9.10", 'Lucid Lynx': "10.04", 'Maverick Meerkat': "10.10", 'Natty Narwhal': "11.04", 'Oneiric Ocelot': "11.10", 'Precise Pangolin': "12.04", 'Quantal Quetzal': "12.10", 'Raring Ringtail': "13.04", 'Saucy Salamander': "13.10", 'Trusty Tahr': "14.04", 'Utopic Unicorn': "14.10", 'Vivid Vervet': "15.04", 'Wily Werewolf': "15.10", 'Xenial Xerus': "16.04", 'Yakkety Yak': "16.10", 'Zesty Zapus': "17.04"} +distvertonamealt = {'4.10': "Warty Warthog", '5.04': "Hoary Hedgehog", '5.10': "Breezy Badger", '6.06': "Dapper Drake", '6.10': "Edgy Eft", + '7.04': "Feisty Fawn", '7.10': "Gutsy Gibbon", '8.04': "Hardy Heron", '8.10': "Intrepid Ibex", '9.04': "Jaunty Jackalope", '9.10': "Karmic Koala", '10.04': "Lucid Lynx", '10.10': "Maverick Meerkat", '11.04': "Natty Narwhal", '11.10': "Oneiric Ocelot", '12.04': "Precise Pangolin", '12.10': "Quantal Quetzal", '13.04': "Raring Ringtail", '13.10': "Saucy Salamander", '14.04': "Trusty Tahr", '14.10': "Utopic Unicorn", '15.04': "Vivid Vervet", '15.10': "Wily Werewolf", '16.04': "Xenial Xerus", '16.10': "Yakkety Yak", '17.04': "Zesty Zapus"} -lmdistvertoname = { - '1.0': "ada", - '2.0': "barbara", - '2.1': "bea", - '2.2': "bianca", - '3.0': "cassandra", - '3.1': "celena", - '4.0': "daryna", - '5': "elyssa", - '6': "felicia", - '7': "gloria", - '8': "helena", - '9': "isadora", - '10': "julia", - '11': "katya", - '12': "lisa", - '13': "maya", - '14': "nadia", - '15': "olivia", - '16': "petra", - '17': "qiana", - '17.1': "rebecca", - '17.2': "rafaela", - '17.3': "rosa", - '18': "sarah", - '18.1': "serena"} -lmdistvertonamealt = { - '1.0': "Ada", - '2.0': "Barbara", - '2.1': "Bea", - '2.2': "Bianca", - '3.0': "Cassandra", - '3.1': "Celena", - '4.0': "Daryna", - '5': "Elyssa", - '6': "Felicia", - '7': "Gloria", - '8': "Helena", - '9': "Isadora", - '10': "Julia", - '11': "Katya", - '12': "Lisa", - '13': "Maya", - '14': "Nadia", - '15': "Olivia", - '16': "Petra", - '17': "Qiana", - '17.1': "Rebecca", - '17.2': "Rafaela", - '17.3': "Rosa", - '18': "Sarah", - '18.1': "Serena"} +lmdistvertoname = {'1.0': "ada", '2.0': "barbara", '2.1': "bea", '2.2': "bianca", '3.0': "cassandra", '3.1': "celena", '4.0': "daryna", '5': "elyssa", '6': "felicia", '7': "gloria", '8': "helena", '9': "isadora", + '10': "julia", '11': "katya", '12': "lisa", '13': "maya", '14': "nadia", '15': "olivia", '16': "petra", '17': "qiana", '17.1': "rebecca", '17.2': "rafaela", '17.3': "rosa", '18': "sarah", '18.1': "serena"} +lmdistvertonamealt = {'1.0': "Ada", '2.0': "Barbara", '2.1': "Bea", '2.2': "Bianca", '3.0': "Cassandra", '3.1': "Celena", '4.0': "Daryna", '5': "Elyssa", '6': "Felicia", '7': "Gloria", '8': "Helena", '9': "Isadora", + '10': "Julia", '11': "Katya", '12': "Lisa", '13': "Maya", '14': "Nadia", '15': "Olivia", '16': "Petra", '17': "Qiana", '17.1': "Rebecca", '17.2': "Rafaela", '17.3': "Rosa", '18': "Sarah", '18.1': "Serena"} lmdistnamelistalt = lmdistvertoname.values() -lmdistnametover = { - 'ada': "1.0", - 'barbara': "2.0", - 'bea': "2.1", - 'bianca': "2.2", - 'cassandra': "3.0", - 'celena': "3.1", - 'daryna': "4.0", - 'elyssa': "5", - 'felicia': "6", - 'gloria': "7", - 'helena': "8", - 'isadora': "9", - 'julia': "10", - 'katya': "11", - 'lisa': "12", - 'maya': "13", - 'nadia': "14", - 'olivia': "15", - 'petra': "16", - 'qiana': "17", - 'rebecca': "17.1", - 'rafaela': "17.2", - 'rosa': "17.3", - 'sarah': "18", - 'serena': "18.1"} -lmdistnametoveralt = { - 'Ada': "1.0", - 'Barbara': "2.0", - 'Bea': "2.1", - 'Bianca': "2.2", - 'Cassandra': "3.0", - 'Celena': "3.1", - 'Daryna': "4.0", - 'Elyssa': "5", - 'Felicia': "6", - 'Gloria': "7", - 'Helena': "8", - 'Isadora': "9", - 'Julia': "10", - 'Katya': "11", - 'Lisa': "12", - 'Maya': "13", - 'Nadia': "14", - 'Olivia': "15", - 'Petra': "16", - 'Qiana': "17", - 'Rebecca': "17.1", - 'Rafaela': "17.2", - 'Rosa': "17.3", - 'Sarah': "18", - 'Serena': "18.1"} +lmdistnametover = {'ada': "1.0", 'barbara': "2.0", 'bea': "2.1", 'bianca': "2.2", 'cassandra': "3.0", 'celena': "3.1", 'daryna': "4.0", 'elyssa': "5", 'felicia': "6", 'gloria': "7", 'helena': "8", 'isadora': "9", + 'julia': "10", 'katya': "11", 'lisa': "12", 'maya': "13", 'nadia': "14", 'olivia': "15", 'petra': "16", 'qiana': "17", 'rebecca': "17.1", 'rafaela': "17.2", 'rosa': "17.3", 'sarah': "18", 'serena': "18.1"} +lmdistnametoveralt = {'Ada': "1.0", 'Barbara': "2.0", 'Bea': "2.1", 'Bianca': "2.2", 'Cassandra': "3.0", 'Celena': "3.1", 'Daryna': "4.0", 'Elyssa': "5", 'Felicia': "6", 'Gloria': "7", 'Helena': "8", 'Isadora': "9", + 'Julia': "10", 'Katya': "11", 'Lisa': "12", 'Maya': "13", 'Nadia': "14", 'Olivia': "15", 'Petra': "16", 'Qiana': "17", 'Rebecca': "17.1", 'Rafaela': "17.2", 'Rosa': "17.3", 'Sarah': "18", 'Serena': "18.1"} lmdistnamelist = lmdistnametover.keys() -distlmnametouname = { - "ada": "dapper", - "barbara": "edgy", - "bea": "edgy", - "bianca": "edgy", - "cassandra": "feisty", - "celena": "feisty", - "daryna": "gutsy", - "elyssa": "hardy", - "felicia": "intrepid", - "gloria": "jaunty", - "helena": "karmic", - "isadora": "lucid", - "julia": "maverick", - "katya": "natty", - "lisa": "oneiric", - "maya": "precise", - "nadia": "quantal", - "olivia": "raring", - "petra": "saucy", - "qiana": "trusty", - "rebecca": "trusty", - "rafaela": "trusty", - "rosa": "trusty", - "sarah": "xenial", - "serena": "xenial"} -distlmnametounamealt = { - "Ada": "Dapper", - "Barbara": "Edgy", - "Bea": "Edgy", - "Bianca": "Edgy", - "Cassandra": "Feisty", - "Celena": "Feisty", - "Daryna": "Gutsy", - "Elyssa": "Hardy", - "Felicia": "Intrepid", - "Gloria": "Jaunty", - "Helena": "Karmic", - "Isadora": "Lucid", - "Julia": "Maverick", - "Katya": "Natty", - "Lisa": "Oneiric", - "Maya": "Precise", - "Nadia": "Quantal", - "Olivia": "Raring", - "Petra": "Saucy", - "Qiana": "Trusty", - "Rebecca": "Trusty", - "Rafaela": "Trusty", - "Rosa": "Trusty", - "Sarah": "Xenial", - "Serena": "Xenial"} +distlmnametouname = {"ada": "dapper", "barbara": "edgy", "bea": "edgy", "bianca": "edgy", "cassandra": "feisty", "celena": "feisty", "daryna": "gutsy", "elyssa": "hardy", "felicia": "intrepid", "gloria": "jaunty", "helena": "karmic", "isadora": "lucid", + "julia": "maverick", "katya": "natty", "lisa": "oneiric", "maya": "precise", "nadia": "quantal", "olivia": "raring", "petra": "saucy", "qiana": "trusty", "rebecca": "trusty", "rafaela": "trusty", "rosa": "trusty", "sarah": "xenial", "serena": "xenial"} +distlmnametounamealt = {"Ada": "Dapper", "Barbara": "Edgy", "Bea": "Edgy", "Bianca": "Edgy", "Cassandra": "Feisty", "Celena": "Feisty", "Daryna": "Gutsy", "Elyssa": "Hardy", "Felicia": "Intrepid", "Gloria": "Jaunty", "Helena": "Karmic", "Isadora": "Lucid", + "Julia": "Maverick", "Katya": "Natty", "Lisa": "Oneiric", "Maya": "Precise", "Nadia": "Quantal", "Olivia": "Raring", "Petra": "Saucy", "Qiana": "Trusty", "Rebecca": "Trusty", "Rafaela": "Trusty", "Rosa": "Trusty", "Sarah": "Xenial", "Serena": "Xenial"} ubuntu_oldstable = "wily" ubuntu_stable = "xenial" @@ -393,27 +99,24 @@ def which_exec(execfile): action="store_true", help="get pkg source") getargs = parser.parse_args() getargs.source = os.path.realpath(getargs.source) -pkgsetuppy = os.path.realpath(getargs.source + os.path.sep + "setup.py") +pkgsetuppy = os.path.realpath(getargs.source+os.path.sep+"setup.py") pyexecpath = os.path.realpath(sys.executable) -if (not os.path.exists(getargs.source) or not os.path.isdir(getargs.source)): +if(not os.path.exists(getargs.source) or not os.path.isdir(getargs.source)): raise Exception("Could not find directory.") -if (not os.path.exists(pkgsetuppy) or not os.path.isfile(pkgsetuppy)): +if(not os.path.exists(pkgsetuppy) or not os.path.isfile(pkgsetuppy)): raise Exception("Could not find setup.py in directory.") getargs.codename = distlmnametouname.get(getargs.codename, getargs.codename) getargs.codename = distlmnametounamealt.get(getargs.codename, getargs.codename) getargs.codename = getargs.codename.lower() -if (getargs.codename not in distnamelist): - print("Could not build for ubuntu " + getargs.codename + " codename.") +if(not getargs.codename in distnamelist): + print("Could not build for ubuntu "+getargs.codename+" codename.") sys.exit() -pypkgenlistp = subprocess.Popen([pyexecpath, - pkgsetuppy, - "getversioninfo"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) +pypkgenlistp = subprocess.Popen( + [pyexecpath, pkgsetuppy, "getversioninfo"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) pypkgenout, pypkgenerr = pypkgenlistp.communicate() -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pypkgenout = pypkgenout.decode('utf-8') pymodule = json.loads(pypkgenout) setuppy_verinfo = pymodule['versionlist'] @@ -428,153 +131,140 @@ def which_exec(execfile): setuppy_downloadurl = pymodule['downloadurl'] setuppy_longdescription = pymodule['longdescription'] setuppy_platforms = pymodule['platforms'] -standverfilename = os.path.realpath( - os.path.sep + - "usr" + - os.path.sep + - "share" + - os.path.sep + - "lintian" + - os.path.sep + - "data" + - os.path.sep + - "standards-version" + - os.path.sep + - "release-dates") +standverfilename = os.path.realpath(os.path.sep+"usr"+os.path.sep+"share"+os.path.sep + + "lintian"+os.path.sep+"data"+os.path.sep+"standards-version"+os.path.sep+"release-dates") standverfile = open(standverfilename, "r") standverdata = standverfile.read() standverfile.close() -getstandver = re.findall("([0-9]\\.[0-9]\\.[0-9])\\s+([0-9]+)", standverdata) +getstandver = re.findall("([0-9]\.[0-9]\.[0-9])\s+([0-9]+)", standverdata) getcurstandver = getstandver[0][0] dpkglocatout = which_exec("dpkg") pydpkglistp = subprocess.Popen( [dpkglocatout, "-s", "debhelper"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) pydpkgout, pydpkgerr = pydpkglistp.communicate() -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pydpkgout = pydpkgout.decode("utf-8") -pydpkg_esc = re.escape("Version:") + '\\s+([0-9]+)' + re.escape(".") +pydpkg_esc = re.escape("Version:")+'\s+([0-9]+)'+re.escape(".") pydpkg_val = re.findall(pydpkg_esc, pydpkgout.replace("ubuntu", "."))[0] -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgsource = "py2www-get" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgsource = "py3www-get" pkgupstreamname = "PyWWW-Get" -pkgveralt = str(setuppy_verinfo[0]) + "." + \ - str(setuppy_verinfo[1]) + "." + str(setuppy_verinfo[2]) -pkgver = str(pkgveralt) + "~rc" + str(setuppy_verinfo[4]) + "~" + getargs.codename + str( +pkgveralt = str(setuppy_verinfo[0])+"." + \ + str(setuppy_verinfo[1])+"."+str(setuppy_verinfo[2]) +pkgver = str(pkgveralt)+"~rc"+str(setuppy_verinfo[4])+"~"+getargs.codename+str( distnametover.get(getargs.codename, "1").replace(".", "")) pkgdistname = getargs.codename pkgurgency = "urgency=low" pkgauthorname = setuppy_author pkgauthoremail = setuppy_authoremail -pkgauthor = pkgauthorname + " <" + pkgauthoremail + ">" +pkgauthor = pkgauthorname+" <"+pkgauthoremail+">" pkgmaintainername = setuppy_maintainer pkgmaintaineremail = setuppy_maintaineremail -pkgmaintainer = pkgmaintainername + " <" + pkgmaintaineremail + ">" +pkgmaintainer = pkgmaintainername+" <"+pkgmaintaineremail+">" pkggiturl = "https://github.com/GameMaker2k/PyWWW-Get.git" pkghomepage = setuppy_url pkgsection = "python" pkgpriority = "optional" -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgbuilddepends = "python-setuptools, python-all, debhelper, dh-python, devscripts" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgbuilddepends = "python3-setuptools, python3-all, debhelper, dh-python, devscripts" -if (getargs.codename == "lucid" or getargs.codename == "precise"): - if (sys.version[0] == "2"): +if(getargs.codename == "lucid" or getargs.codename == "precise"): + if(sys.version[0] == "2"): pkgbuilddepends = "python-setuptools, python-all, debhelper, dh-python, devscripts" - if (sys.version[0] == "3"): + if(sys.version[0] == "3"): pkgbuilddepends = "python3-setuptools, python3-all, debhelper, dh-python, devscripts" pkgstandardsversion = getcurstandver -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgpackage = "python-pywww-get" pkgoldname = "python-www-get" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgpackage = "python3-pywww-get" pkgoldname = "python3-www-get" pkgarchitecture = "all" -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): pkgdepends = "${misc:Depends}, ${python:Depends}" -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pkgdepends = "${misc:Depends}, ${python3:Depends}" -pkgdescription = setuppy_description + "\n " + setuppy_longdescription +pkgdescription = setuppy_description+"\n "+setuppy_longdescription pkgtzstr = time.strftime("%a, %d %b %Y %H:%M:%S %z") -if (getargs.getsource): +if(getargs.getsource == True): print(getargs.source) sys.exit() -if (getargs.getparent): +if(getargs.getparent == True): print(os.path.realpath(os.path.dirname(getargs.source))) sys.exit() -if (getargs.getdirname): - print(pkgsource + "_" + pkgveralt + ".orig") +if(getargs.getdirname == True): + print(pkgsource+"_"+pkgveralt+".orig") sys.exit() -if (getargs.gettarname): - print(pkgsource + "_" + pkgveralt + ".orig.tar.gz") +if(getargs.gettarname == True): + print(pkgsource+"_"+pkgveralt+".orig.tar.gz") sys.exit() -if (getargs.getpkgsource): +if(getargs.getpkgsource == True): print(pkgsource) sys.exit() print("generating debian package build directory") -debpkg_debian_dir = os.path.realpath(getargs.source + os.path.sep + "debian") -print("creating directory " + debpkg_debian_dir) -if (not os.path.exists(debpkg_debian_dir)): +debpkg_debian_dir = os.path.realpath(getargs.source+os.path.sep+"debian") +print("creating directory "+debpkg_debian_dir) +if(not os.path.exists(debpkg_debian_dir)): os.makedirs(debpkg_debian_dir) os.chmod(debpkg_debian_dir, int("0755", 8)) debpkg_changelog_file = os.path.realpath( - debpkg_debian_dir + os.path.sep + "changelog") -print("generating file " + debpkg_changelog_file) + debpkg_debian_dir+os.path.sep+"changelog") +print("generating file "+debpkg_changelog_file) debpkg_string_temp = pkgsource + \ - " (" + pkgver + ") " + pkgdistname + "; " + pkgurgency + "\n\n" -debpkg_string_temp += " * source package automatically created by " + profullname + "\n\n" -debpkg_string_temp += " -- " + pkgmaintainer + " " + pkgtzstr + "\n" + " ("+pkgver+") "+pkgdistname+"; "+pkgurgency+"\n\n" +debpkg_string_temp += " * source package automatically created by "+profullname+"\n\n" +debpkg_string_temp += " -- "+pkgmaintainer+" "+pkgtzstr+"\n" debpkg_file_temp = open(debpkg_changelog_file, "w") debpkg_file_temp.write(debpkg_string_temp) debpkg_file_temp.close() os.chmod(debpkg_changelog_file, int("0644", 8)) -debpkg_compat_file = os.path.realpath( - debpkg_debian_dir + os.path.sep + "compat") -print("generating file " + debpkg_compat_file) -debpkg_string_temp = str(pydpkg_val) + "\n" +debpkg_compat_file = os.path.realpath(debpkg_debian_dir+os.path.sep+"compat") +print("generating file "+debpkg_compat_file) +debpkg_string_temp = str(pydpkg_val)+"\n" debpkg_file_temp = open(debpkg_compat_file, "w") debpkg_file_temp.write(debpkg_string_temp) debpkg_file_temp.close() os.chmod(debpkg_compat_file, int("0644", 8)) -debpkg_control_file = os.path.realpath( - debpkg_debian_dir + os.path.sep + "control") -print("generating file " + debpkg_control_file) -debpkg_string_temp = "Source: " + pkgsource + "\n" -debpkg_string_temp += "Maintainer: " + pkgmaintainer + "\n" -debpkg_string_temp += "Homepage: " + pkghomepage + "\n" -debpkg_string_temp += "Vcs-Git: " + pkggiturl + "\n" -debpkg_string_temp += "Vcs-Browser: " + pkghomepage + "\n" -debpkg_string_temp += "Section: " + pkgsection + "\n" -debpkg_string_temp += "Priority: " + pkgpriority + "\n" -debpkg_string_temp += "Build-Depends: " + pkgbuilddepends + "\n" -debpkg_string_temp += "Standards-Version: " + pkgstandardsversion + "\n\n" -debpkg_string_temp += "Package: " + pkgpackage + "\n" -debpkg_string_temp += "Architecture: " + pkgarchitecture + "\n" -debpkg_string_temp += "Depends: " + pkgdepends + "\n" -debpkg_string_temp += "Replaces: " + pkgoldname + "\n" -debpkg_string_temp += "Description: " + pkgdescription + "\n" +debpkg_control_file = os.path.realpath(debpkg_debian_dir+os.path.sep+"control") +print("generating file "+debpkg_control_file) +debpkg_string_temp = "Source: "+pkgsource+"\n" +debpkg_string_temp += "Maintainer: "+pkgmaintainer+"\n" +debpkg_string_temp += "Homepage: "+pkghomepage+"\n" +debpkg_string_temp += "Vcs-Git: "+pkggiturl+"\n" +debpkg_string_temp += "Vcs-Browser: "+pkghomepage+"\n" +debpkg_string_temp += "Section: "+pkgsection+"\n" +debpkg_string_temp += "Priority: "+pkgpriority+"\n" +debpkg_string_temp += "Build-Depends: "+pkgbuilddepends+"\n" +debpkg_string_temp += "Standards-Version: "+pkgstandardsversion+"\n\n" +debpkg_string_temp += "Package: "+pkgpackage+"\n" +debpkg_string_temp += "Architecture: "+pkgarchitecture+"\n" +debpkg_string_temp += "Depends: "+pkgdepends+"\n" +debpkg_string_temp += "Replaces: "+pkgoldname+"\n" +debpkg_string_temp += "Description: "+pkgdescription+"\n" debpkg_file_temp = open(debpkg_control_file, "w") debpkg_file_temp.write(debpkg_string_temp) debpkg_file_temp.close() os.chmod(debpkg_control_file, int("0644", 8)) debpkg_copyright_file = os.path.realpath( - debpkg_debian_dir + os.path.sep + "copyright") -print("generating file " + debpkg_copyright_file) + debpkg_debian_dir+os.path.sep+"copyright") +print("generating file "+debpkg_copyright_file) debpkg_string_temp = "Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/\n" -debpkg_string_temp += "Upstream-Name: " + pkgupstreamname + "\n" -debpkg_string_temp += "Source: " + pkghomepage + "\n\n" +debpkg_string_temp += "Upstream-Name: "+pkgupstreamname+"\n" +debpkg_string_temp += "Source: "+pkghomepage+"\n\n" debpkg_string_temp += "Files: *\n" -debpkg_string_temp += "Copyright: Copyright 2011-2016 " + pkgauthor + "\n" +debpkg_string_temp += "Copyright: Copyright 2011-2016 "+pkgauthor+"\n" debpkg_string_temp += "License: BSD\n\n" debpkg_string_temp += "License: BSD\n" debpkg_string_temp += " Revised BSD License\n\n" @@ -609,23 +299,19 @@ def which_exec(execfile): debpkg_file_temp.close() os.chmod(debpkg_copyright_file, int("0644", 8)) -debpkg_rules_file = os.path.realpath(debpkg_debian_dir + os.path.sep + "rules") -print("generating file " + debpkg_rules_file) -if (sys.version[0] == "2" and (buildsystem == - "python" or buildsystem == "python_distutils")): +debpkg_rules_file = os.path.realpath(debpkg_debian_dir+os.path.sep+"rules") +print("generating file "+debpkg_rules_file) +if(sys.version[0] == "2" and (buildsystem == "python" or buildsystem == "python_distutils")): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --with python2 --buildsystem=python_distutils\n" -if (sys.version[0] == "3" and (buildsystem == - "python" or buildsystem == "python_distutils")): +if(sys.version[0] == "3" and (buildsystem == "python" or buildsystem == "python_distutils")): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --with python3\n" @@ -636,36 +322,30 @@ def which_exec(execfile): debpkg_string_temp += "override_dh_auto_install:\n" debpkg_string_temp += " python3 setup.py install \\\n" debpkg_string_temp += " --force --root=$(CURDIR)/debian/" + \ - pkgpackage + " \\\n" + pkgpackage+" \\\n" debpkg_string_temp += " --no-compile -O0 --install-layout=deb\n\n" debpkg_string_temp += "override_dh_auto_clean:\n" debpkg_string_temp += " python3 setup.py clean\n" -if (sys.version[0] == "2" and (buildsystem == - "pybuild" or buildsystem == "python_build")): +if(sys.version[0] == "2" and (buildsystem == "pybuild" or buildsystem == "python_build")): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n" debpkg_string_temp += "export PYBUILD_NAME=pywww-get\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --with python2 --buildsystem=pybuild\n" -if (sys.version[0] == "3" and (buildsystem == - "pybuild" or buildsystem == "python_build")): +if(sys.version[0] == "3" and (buildsystem == "pybuild" or buildsystem == "python_build")): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n" debpkg_string_temp += "export PYBUILD_NAME=pywww-get\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --with python3 --buildsystem=pybuild\n" -if ((sys.version[0] == "2" or sys.version[0] == "3") - and buildsystem == "cmake"): +if((sys.version[0] == "2" or sys.version[0] == "3") and buildsystem == "cmake"): debpkg_string_temp = "#!/usr/bin/make -f\n\n" - debpkg_string_temp += "# This file was automatically generated by " + \ - profullname + " at\n" - debpkg_string_temp += "# " + pkgtzstr + "\n\n" + debpkg_string_temp += "# This file was automatically generated by "+profullname+" at\n" + debpkg_string_temp += "# "+pkgtzstr+"\n\n" debpkg_string_temp += "export DH_VERBOSE=1\n\n" debpkg_string_temp += "%:\n" debpkg_string_temp += " dh $@ --buildsystem=cmake --parallel\n" @@ -674,25 +354,22 @@ def which_exec(execfile): debpkg_file_temp.close() os.chmod(debpkg_rules_file, int("0755", 8)) -debpkg_source_dir = os.path.realpath( - debpkg_debian_dir + os.path.sep + "source") -print("creating directory " + debpkg_source_dir) -if (not os.path.exists(debpkg_source_dir)): +debpkg_source_dir = os.path.realpath(debpkg_debian_dir+os.path.sep+"source") +print("creating directory "+debpkg_source_dir) +if(not os.path.exists(debpkg_source_dir)): os.makedirs(debpkg_source_dir) os.chmod(debpkg_source_dir, int("0755", 8)) -debpkg_format_file = os.path.realpath( - debpkg_source_dir + os.path.sep + "format") -print("generating file " + debpkg_format_file) +debpkg_format_file = os.path.realpath(debpkg_source_dir+os.path.sep+"format") +print("generating file "+debpkg_format_file) debpkg_string_temp = "3.0 (native)\n" debpkg_file_temp = open(debpkg_format_file, "w") debpkg_file_temp.write(debpkg_string_temp) debpkg_file_temp.close() os.chmod(debpkg_format_file, int("0644", 8)) -debpkg_options_file = os.path.realpath( - debpkg_source_dir + os.path.sep + "options") -print("generating file " + debpkg_options_file) +debpkg_options_file = os.path.realpath(debpkg_source_dir+os.path.sep+"options") +print("generating file "+debpkg_options_file) debpkg_string_temp = "extend-diff-ignore=\"\\.egg-info\"\n" debpkg_file_temp = open(debpkg_options_file, "w") debpkg_file_temp.write(debpkg_string_temp) diff --git a/pkgbuild/ubuntu/python3/realpath.py b/pkgbuild/ubuntu/python3/realpath.py index 9837108..7350f44 100755 --- a/pkgbuild/ubuntu/python3/realpath.py +++ b/pkgbuild/ubuntu/python3/realpath.py @@ -15,24 +15,22 @@ $FileInfo: realpath.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "realpath" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) parser.add_argument("-v", "--version", action="version", version=profullname) diff --git a/pkgbuild/ubuntu/python3/which.py b/pkgbuild/ubuntu/python3/which.py index fde6e3d..22cccbd 100755 --- a/pkgbuild/ubuntu/python3/which.py +++ b/pkgbuild/ubuntu/python3/which.py @@ -15,24 +15,22 @@ $FileInfo: which.py - Last Update: 2/15/2016 Ver. 0.0.5 RC 3 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse +from __future__ import absolute_import, division, print_function, unicode_literals import os import sys +import argparse __version_info__ = (0, 0, 5, "rc3") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "which" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover def which_exec(execfile): diff --git a/pyhttpserv.py b/pyhttpserv.py index 813cf7e..e582cb6 100644 --- a/pyhttpserv.py +++ b/pyhttpserv.py @@ -15,11 +15,11 @@ $FileInfo: pyhttpserv.py - Last Update: 10/5/2023 Ver. 2.0.2 RC 1 - Author: cooldude2k $ ''' +import os import argparse -import bz2 import gzip -import os import zlib +import bz2 havebrotli = False try: @@ -47,28 +47,26 @@ __project_url__ = "https://github.com/GameMaker2k/PyWWW-Get" __version_info__ = (2, 0, 2, "RC 1", 1) __version_date_info__ = (2023, 10, 5, "RC 1", 1) -__version_date__ = str(__version_date_info__[0]) + "." + str(__version_date_info__[ - 1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2) +__version_date__ = str(__version_date_info__[0])+"."+str(__version_date_info__[ + 1]).zfill(2)+"."+str(__version_date_info__[2]).zfill(2) __revision__ = __version_info__[3] __revision_id__ = "$Id$" -if (__version_info__[4] is not None): +if(__version_info__[4] is not None): __version_date_plusrc__ = __version_date__ + \ - "-" + str(__version_date_info__[4]) -if (__version_info__[4] is None): + "-"+str(__version_date_info__[4]) +if(__version_info__[4] is None): __version_date_plusrc__ = __version_date__ -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + " " + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] is not None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+" "+str(__version_info__[3]) +if(__version_info__[3] is None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) parser = argparse.ArgumentParser( - description="Simple HTTP Server in Python.", - conflict_handler="resolve", - add_help=True) + description="Simple HTTP Server in Python.", conflict_handler="resolve", add_help=True) parser.add_argument("-V", "--version", action="version", - version=__program_name__ + " " + __version__) + version=__program_name__+" "+__version__) parser.add_argument("-e", "--enablessl", action="store_true", help="Enable SSL") parser.add_argument("-k", "--sslkeypem", default=None, @@ -83,44 +81,41 @@ sslkeypem = getargs.sslkeypem sslcertpem = getargs.sslcertpem servport = int(getargs.servport) -if (isinstance(servport, int)): - if (servport < 1 or servport > 65535): +if(isinstance(servport, int)): + if(servport < 1 or servport > 65535): servport = 8080 -elif (isinstance(servport, str)): - if (servport.isnumeric()): +elif(isinstance(servport, str)): + if(servport.isnumeric()): servport = int(servport) - if (servport < 1 or servport > 65535): + if(servport < 1 or servport > 65535): servport = 8080 else: servport = 8080 else: servport = 8080 -if (enablessl): - if (sslkeypem is not None and +if(enablessl): + if(sslkeypem is not None and (not os.path.exists(sslkeypem) or not os.path.isfile(sslkeypem))): sslkeypem = None enablessl = False - if (sslcertpem is not None and + if(sslcertpem is not None and (not os.path.exists(sslkeypem) or not os.path.isfile(sslkeypem))): sslcertpem = None enablessl = False pyoldver = True try: from BaseHTTPServer import HTTPServer - from Cookie import SimpleCookie from SimpleHTTPServer import SimpleHTTPRequestHandler from urlparse import parse_qs + from Cookie import SimpleCookie except ImportError: - from http.cookies import SimpleCookie - from http.server import HTTPServer, SimpleHTTPRequestHandler + from http.server import SimpleHTTPRequestHandler, HTTPServer from urllib.parse import parse_qs + from http.cookies import SimpleCookie pyoldver = False -if ( - enablessl and ( - sslkeypem is not None and ( - os.path.exists(sslkeypem) and os.path.isfile(sslkeypem))) and ( - sslcertpem is not None and ( - os.path.exists(sslkeypem) and os.path.isfile(sslkeypem)))): +if(enablessl and + (sslkeypem is not None and (os.path.exists(sslkeypem) and os.path.isfile(sslkeypem))) and + (sslcertpem is not None and (os.path.exists(sslkeypem) and os.path.isfile(sslkeypem)))): import ssl # HTTP/HTTPS Server Class @@ -231,14 +226,11 @@ def do_POST(self): if __name__ == "__main__": server_address = ('', int(servport)) httpd = HTTPServer(server_address, CustomHTTPRequestHandler) - if (enablessl and sslkeypem is not None and sslcertpem is not None): - httpd.socket = ssl.wrap_socket( - httpd.socket, - keyfile=sslkeypem, - certfile=sslcertpem, - server_side=True) - if (enablessl): - print("Server started at https://localhost:" + str(servport)) + if(enablessl and sslkeypem is not None and sslcertpem is not None): + httpd.socket = ssl.wrap_socket(httpd.socket, + keyfile=sslkeypem, certfile=sslcertpem, server_side=True) + if(enablessl): + print("Server started at https://localhost:"+str(servport)) else: - print("Server started at http://localhost:" + str(servport)) + print("Server started at http://localhost:"+str(servport)) httpd.serve_forever() diff --git a/pypkg-gen.py b/pypkg-gen.py index 9ce1fde..dfb5de2 100755 --- a/pypkg-gen.py +++ b/pypkg-gen.py @@ -15,29 +15,27 @@ $FileInfo: pypkg-gen.py - Last Update: 6/1/2016 Ver. 0.2.0 RC 1 - Author: cooldude2k $ ''' -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import argparse -import datetime -import os -import platform +from __future__ import absolute_import, division, print_function, unicode_literals import re -import subprocess +import os import sys import time +import platform +import datetime +import argparse +import subprocess __version_info__ = (0, 2, 0, "rc1") -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + "+" + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] != None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+"+"+str(__version_info__[3]) +if(__version_info__[3] == None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) proname = "pypkg-gen" prover = __version__ -profullname = proname + " " + prover +profullname = proname+" "+prover def which_exec(execfile): @@ -49,21 +47,20 @@ def which_exec(execfile): getlinuxdist = platform.linux_distribution() setdistroname = "debian" setdistrocname = "jessie" -if (getlinuxdist[0].lower() == "debian" or getlinuxdist[0].lower() - == "ubuntu" or getlinuxdist[0].lower() == "linuxmint"): +if(getlinuxdist[0].lower() == "debian" or getlinuxdist[0].lower() == "ubuntu" or getlinuxdist[0].lower() == "linuxmint"): setdistroname = getlinuxdist[0].lower() setdistrocname = getlinuxdist[2].lower() - if (setdistrocname == ""): + if(setdistrocname == ""): lsblocatout = which_exec("lsb_release") pylsblistp = subprocess.Popen( [lsblocatout, "-c"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) pylsbout, pylsberr = pylsblistp.communicate() - if (sys.version[0] == "3"): + if(sys.version[0] == "3"): pylsbout = pylsbout.decode("utf-8") - pylsb_esc = re.escape("Codename:") + '([a-zA-Z\t+\\s+]+)' + pylsb_esc = re.escape("Codename:")+'([a-zA-Z\t+\s+]+)' pylsbname = re.findall(pylsb_esc, pylsbout)[0].lower() setdistrocname = pylsbname.strip() -if (getlinuxdist[0].lower() == "archlinux"): +if(getlinuxdist[0].lower() == "archlinux"): setdistroname = getlinuxdist[0].lower() setdistrocname = None parser = argparse.ArgumentParser(conflict_handler="resolve", add_help=True) @@ -75,10 +72,7 @@ def which_exec(execfile): parser.add_argument("-c", "--codename", default=setdistrocname, help="enter release code name") parser.add_argument( - "-p", - "--pyver", - default=sys.version[0], - help="enter version of python to use") + "-p", "--pyver", default=sys.version[0], help="enter version of python to use") getargs = parser.parse_args() bashlocatout = which_exec("bash") @@ -87,73 +81,47 @@ def which_exec(execfile): getargs.codename = getargs.codename.lower() getargs.distro = getargs.distro.lower() -if (getargs.pyver == "2"): +if(getargs.pyver == "2"): getpyver = "python2" -if (getargs.pyver == "3"): +if(getargs.pyver == "3"): getpyver = "python3" -if (getargs.pyver != "2" and getargs.pyver != "3"): - if (sys.version[0] == "2"): +if(getargs.pyver != "2" and getargs.pyver != "3"): + if(sys.version[0] == "2"): getpyver = "python2" - if (sys.version[0] == "3"): + if(sys.version[0] == "3"): getpyver = "python3" -get_pkgbuild_dir = os.path.realpath(getargs.source + os.path.sep + "pkgbuild") +get_pkgbuild_dir = os.path.realpath(getargs.source+os.path.sep+"pkgbuild") get_pkgbuild_dist_pre_list = [d for d in os.listdir( get_pkgbuild_dir) if os.path.isdir(os.path.join(get_pkgbuild_dir, d))] get_pkgbuild_dist_list = [] for dists in get_pkgbuild_dist_pre_list: tmp_pkgbuild_python = os.path.realpath( - get_pkgbuild_dir + os.path.sep + dists + os.path.sep + getpyver) - if (os.path.exists(tmp_pkgbuild_python) - and os.path.isdir(tmp_pkgbuild_python)): + get_pkgbuild_dir+os.path.sep+dists+os.path.sep+getpyver) + if(os.path.exists(tmp_pkgbuild_python) and os.path.isdir(tmp_pkgbuild_python)): get_pkgbuild_dist_list.append(dists) -if (getargs.distro not in get_pkgbuild_dist_list): - print("Could not build for " + getargs.distro + " distro.") +if(not getargs.distro in get_pkgbuild_dist_list): + print("Could not build for "+getargs.distro+" distro.") sys.exit() -if (getargs.distro == "debian" or getargs.distro == - "ubuntu" or getargs.distro == "linuxmint"): - pypkgpath = os.path.realpath( - getargs.source + - os.path.sep + - "pkgbuild" + - os.path.sep + - getargs.distro + - os.path.sep + - getpyver + - os.path.sep + - "pydeb-gen.sh") - pypkgenlistp = subprocess.Popen([bashlocatout, - pypkgpath, - getargs.source, - getargs.codename], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) +if(getargs.distro == "debian" or getargs.distro == "ubuntu" or getargs.distro == "linuxmint"): + pypkgpath = os.path.realpath(getargs.source+os.path.sep+"pkgbuild" + + os.path.sep+getargs.distro+os.path.sep+getpyver+os.path.sep+"pydeb-gen.sh") + pypkgenlistp = subprocess.Popen([bashlocatout, pypkgpath, getargs.source, + getargs.codename], stdout=subprocess.PIPE, stderr=subprocess.PIPE) pypkgenout, pypkgenerr = pypkgenlistp.communicate() - if (sys.version[0] == "3"): + if(sys.version[0] == "3"): pypkgenout = pypkgenout.decode("utf-8") print(pypkgenout) pypkgenlistp.wait() -if (getargs.distro == "archlinux"): - pypkgpath = os.path.realpath( - getargs.source + - os.path.sep + - "pkgbuild" + - os.path.sep + - getargs.distro + - os.path.sep + - getpyver + - os.path.sep + - "pypac-gen.sh") - pypkgenlistp = subprocess.Popen([bashlocatout, - pypkgpath, - getargs.source, - getargs.codename], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) +if(getargs.distro == "archlinux"): + pypkgpath = os.path.realpath(getargs.source+os.path.sep+"pkgbuild" + + os.path.sep+getargs.distro+os.path.sep+getpyver+os.path.sep+"pypac-gen.sh") + pypkgenlistp = subprocess.Popen([bashlocatout, pypkgpath, getargs.source, + getargs.codename], stdout=subprocess.PIPE, stderr=subprocess.PIPE) pypkgenout, pypkgenerr = pypkgenlistp.communicate() - if (sys.version[0] == "3"): + if(sys.version[0] == "3"): pypkgenout = pypkgenout.decode("utf-8") print(pypkgenout) pypkgenlistp.wait() diff --git a/pyverinfo.py b/pyverinfo.py index 769bcf2..8bbb1ac 100755 --- a/pyverinfo.py +++ b/pyverinfo.py @@ -1,20 +1,17 @@ #!/usr/bin/python2 -import json -import os import re -import subprocess +import os import sys +import json +import subprocess pyexecpath = os.path.realpath(sys.executable) -pkgsetuppy = os.path.realpath("." + os.path.sep + "setup.py") -pypkgenlistp = subprocess.Popen([pyexecpath, - pkgsetuppy, - "getversioninfo"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) +pkgsetuppy = os.path.realpath("."+os.path.sep+"setup.py") +pypkgenlistp = subprocess.Popen( + [pyexecpath, pkgsetuppy, "getversioninfo"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) pypkgenout, pypkgenerr = pypkgenlistp.communicate() -if (sys.version[0] == "3"): +if(sys.version[0] == "3"): pypkgenout = pypkgenout.decode('utf-8') pyconfiginfo = json.loads(pypkgenout) print(pypkgenout) diff --git a/pywwwget-dl.py b/pywwwget-dl.py index e16ff0d..ee581e5 100755 --- a/pywwwget-dl.py +++ b/pywwwget-dl.py @@ -15,15 +15,13 @@ $FileInfo: pywwwget-dl.py - Last Update: 10/5/2023 Ver. 2.0.2 RC 1 - Author: cooldude2k $ ''' -from __future__ import absolute_import, division, print_function - -import argparse -import logging as log -import os +from __future__ import division, absolute_import, print_function import re +import os import sys - import pywwwget +import argparse +import logging as log __project__ = pywwwget.__project__ __program_name__ = pywwwget.__program_name__ @@ -67,17 +65,12 @@ geturls_download_sleep = pywwwget.geturls_download_sleep parser = argparse.ArgumentParser( - description="Python libary/module to download files.", - conflict_handler="resolve", - add_help=True) + description="Python libary/module to download files.", conflict_handler="resolve", add_help=True) parser.add_argument("url", help="enter a url") parser.add_argument("-V", "--version", action="version", - version=__program_name__ + " " + __version__) -parser.add_argument( - "-u", - "--update", - action="store_true", - help="update this program to latest version. Make sure that you have sufficient permissions (run with sudo if needed)") + version=__program_name__+" "+__version__) +parser.add_argument("-u", "--update", action="store_true", + help="update this program to latest version. Make sure that you have sufficient permissions (run with sudo if needed)") parser.add_argument("-d", "--dump-user-agent", action="store_true", help="display the current browser identification") parser.add_argument("-u", "--user-agent", default=geturls_ua_firefox_windows7, @@ -88,17 +81,10 @@ help="specify a file name for output") parser.add_argument("-o", "--output-directory", default=os.path.realpath( os.getcwd()), help="specify a directory to output file to") -parser.add_argument( - "-l", - "--use-httplib", - default="urllib", - help="select library to download file can be urllib or requests or mechanize") -parser.add_argument( - "-b", - "--set-buffersize", - default=524288, - type=int, - help="set how big buffersize is in bytes. how much it will download") +parser.add_argument("-l", "--use-httplib", default="urllib", + help="select library to download file can be urllib or requests or mechanize") +parser.add_argument("-b", "--set-buffersize", default=524288, type=int, + help="set how big buffersize is in bytes. how much it will download") parser.add_argument("-t", "--timeout", default=10, type=int, help="set timeout time for http request") parser.add_argument("-s", "--sleep", default=10, type=int, @@ -107,74 +93,32 @@ help="print various debugging information") getargs = parser.parse_args() -if (not pywwwget.check_httplib_support(getargs.use_httplib)): +if(not pywwwget.check_httplib_support(getargs.use_httplib)): getargs.use_httplib = "urllib" getargs_cj = geturls_cj -getargs_headers = { - 'Referer': getargs.referer, - 'User-Agent': getargs.user_agent, - 'Accept-Encoding': "gzip, deflate", - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close"} +getargs_headers = {'Referer': getargs.referer, 'User-Agent': getargs.user_agent, 'Accept-Encoding': "gzip, deflate", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", + 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"} getargs.output_directory = os.path.realpath(getargs.output_directory) -if (getargs.verbose): +if(getargs.verbose == True): log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG) -if (getargs.dump_user_agent): +if(getargs.dump_user_agent == True): print(getargs.user_agent) sys.exit() -if (getargs.output_document == "-"): - if (sys.version[0] == "2"): - precontstr = pywwwget.download_from_url_to_file( - getargs.url, - getargs_headers, - getargs.user_agent, - getargs.referer, - geturls_cj, - httplibuse=getargs.use_httplib, - buffersize=[ - getargs.set_buffersize, - getargs.set_buffersize], - outfile=getargs.output_document, - outpath=os.getcwd(), - sleep=getargs.sleep, - timeout=getargs.timeout) +if(getargs.output_document == "-"): + if(sys.version[0] == "2"): + precontstr = pywwwget.download_from_url_to_file(getargs.url, getargs_headers, getargs.user_agent, getargs.referer, geturls_cj, httplibuse=getargs.use_httplib, buffersize=[ + getargs.set_buffersize, getargs.set_buffersize], outfile=getargs.output_document, outpath=os.getcwd(), sleep=getargs.sleep, timeout=getargs.timeout) print(precontstr['Content']) - if (sys.version[0] >= "3"): - precontstr = pywwwget.download_from_url_to_file( - getargs.url, - getargs_headers, - getargs.user_agent, - getargs.referer, - geturls_cj, - httplibuse=getargs.use_httplib, - buffersize=[ - getargs.set_buffersize, - getargs.set_buffersize], - outfile=getargs.output_document, - outpath=os.getcwd(), - sleep=getargs.sleep, - timeout=getargs.timeout) + if(sys.version[0] >= "3"): + precontstr = pywwwget.download_from_url_to_file(getargs.url, getargs_headers, getargs.user_agent, getargs.referer, geturls_cj, httplibuse=getargs.use_httplib, buffersize=[ + getargs.set_buffersize, getargs.set_buffersize], outfile=getargs.output_document, outpath=os.getcwd(), sleep=getargs.sleep, timeout=getargs.timeout) print(precontstr['Content'].decode('ascii', 'replace')) -if (getargs.output_document != "-"): - pywwwget.download_from_url_to_file( - getargs.url, - getargs_headers, - getargs.user_agent, - getargs.referer, - geturls_cj, - httplibuse=getargs.use_httplib, - buffersize=[ - getargs.set_buffersize, - getargs.set_buffersize], - outfile=getargs.output_document, - outpath=getargs.output_directory, - sleep=getargs.sleep, - timeout=getargs.timeout) +if(getargs.output_document != "-"): + pywwwget.download_from_url_to_file(getargs.url, getargs_headers, getargs.user_agent, getargs.referer, geturls_cj, httplibuse=getargs.use_httplib, buffersize=[ + getargs.set_buffersize, getargs.set_buffersize], outfile=getargs.output_document, outpath=getargs.output_directory, sleep=getargs.sleep, timeout=getargs.timeout) diff --git a/pywwwget.py b/pywwwget.py index 5840332..78ffed0 100644 --- a/pywwwget.py +++ b/pywwwget.py @@ -15,28 +15,27 @@ $FileInfo: pywwwget.py - Last Update: 10/5/2023 Ver. 2.0.2 RC 1 - Author: cooldude2k $ ''' -from __future__ import absolute_import, division, print_function - -import argparse -import bz2 -import datetime -import email.utils -import hashlib -import logging as log -import os -import platform +from __future__ import division, absolute_import, print_function import re -import shutil -import socket -import subprocess +import os import sys +import hashlib +import shutil +import platform import tempfile -import time import urllib import zlib -from base64 import b64encode +import bz2 +import time +import argparse +import subprocess +import socket +import email.utils +import datetime +import time +import logging as log from ftplib import FTP, FTP_TLS - +from base64 import b64encode try: from cgi import parse_qsl except ImportError: @@ -127,9 +126,9 @@ havelzma = True except ImportError: havelzma = False -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): try: - from io import BytesIO, StringIO + from io import StringIO, BytesIO except ImportError: try: from cStringIO import StringIO @@ -138,26 +137,22 @@ from StringIO import StringIO from StringIO import StringIO as BytesIO # From http://python-future.org/compatible_idioms.html + from urlparse import urlparse, urlunparse, urlsplit, urlunsplit, urljoin from urllib import urlencode from urllib import urlopen as urlopenalt - - import cookielib + from urllib2 import urlopen, Request, install_opener, HTTPError, URLError, build_opener, HTTPCookieProcessor import urlparse + import cookielib from httplib import HTTPConnection, HTTPSConnection - from urllib2 import (HTTPCookieProcessor, HTTPError, Request, URLError, - build_opener, install_opener, urlopen) - from urlparse import urljoin, urlparse, urlsplit, urlunparse, urlunsplit -if (sys.version[0] >= "3"): - import http.cookiejar as cookielib +if(sys.version[0] >= "3"): + from io import StringIO, BytesIO + # From http://python-future.org/compatible_idioms.html + from urllib.parse import urlparse, urlunparse, urlsplit, urlunsplit, urljoin, urlencode + from urllib.request import urlopen, Request, install_opener, build_opener, HTTPCookieProcessor + from urllib.error import HTTPError, URLError import urllib.parse as urlparse + import http.cookiejar as cookielib from http.client import HTTPConnection, HTTPSConnection - from io import BytesIO, StringIO - from urllib.error import HTTPError, URLError - # From http://python-future.org/compatible_idioms.html - from urllib.parse import (urlencode, urljoin, urlparse, urlsplit, - urlunparse, urlunsplit) - from urllib.request import (HTTPCookieProcessor, Request, build_opener, - install_opener, urlopen) __program_name__ = "PyWWW-Get" __program_alt_name__ = "PyWWWGet" @@ -166,53 +161,50 @@ __project_url__ = "https://github.com/GameMaker2k/PyWWW-Get" __version_info__ = (2, 0, 2, "RC 1", 1) __version_date_info__ = (2023, 10, 5, "RC 1", 1) -__version_date__ = str(__version_date_info__[0]) + "." + str(__version_date_info__[ - 1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2) +__version_date__ = str(__version_date_info__[0])+"."+str(__version_date_info__[ + 1]).zfill(2)+"."+str(__version_date_info__[2]).zfill(2) __revision__ = __version_info__[3] __revision_id__ = "$Id$" -if (__version_info__[4] is not None): +if(__version_info__[4] is not None): __version_date_plusrc__ = __version_date__ + \ - "-" + str(__version_date_info__[4]) -if (__version_info__[4] is None): + "-"+str(__version_date_info__[4]) +if(__version_info__[4] is None): __version_date_plusrc__ = __version_date__ -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + " " + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] is not None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+" "+str(__version_info__[3]) +if(__version_info__[3] is None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) tmpfileprefix = "py" + \ - str(sys.version_info[0]) + __program_small_name__ + \ - str(__version_info__[0]) + "-" + str(sys.version_info[0])+__program_small_name__ + \ + str(__version_info__[0])+"-" tmpfilesuffix = "-" pytempdir = tempfile.gettempdir() PyBitness = platform.architecture() -if (PyBitness == "32bit" or PyBitness == "32"): +if(PyBitness == "32bit" or PyBitness == "32"): PyBitness = "32" -elif (PyBitness == "64bit" or PyBitness == "64"): +elif(PyBitness == "64bit" or PyBitness == "64"): PyBitness = "64" else: PyBitness = "32" compression_supported_list = ['identity', 'gzip', 'deflate', 'bzip2'] -if (havebrotli): +if(havebrotli): compression_supported_list.append('br') -if (havezstd): +if(havezstd): compression_supported_list.append('zstd') -if (havelzma): +if(havelzma): compression_supported_list.append('lzma') compression_supported_list.append('xz') compression_supported = ', '.join(compression_supported_list) geturls_cj = cookielib.CookieJar() windowsNT4_ua_string = "Windows NT 4.0" -windowsNT4_ua_addon = { - 'SEC-CH-UA-PLATFORM': "Windows", - 'SEC-CH-UA-ARCH': "x86", - 'SEC-CH-UA-BITNESS': "32", - 'SEC-CH-UA-PLATFORM': "4.0.0"} +windowsNT4_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", + 'SEC-CH-UA-BITNESS': "32", 'SEC-CH-UA-PLATFORM': "4.0.0"} windows2k_ua_string = "Windows NT 5.0" windows2k_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "32", 'SEC-CH-UA-PLATFORM': "5.0.0"} @@ -220,11 +212,8 @@ windowsXP_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "32", 'SEC-CH-UA-PLATFORM': "5.1.0"} windowsXP64_ua_string = "Windows NT 5.2; Win64; x64" -windowsXP64_ua_addon = { - 'SEC-CH-UA-PLATFORM': "Windows", - 'SEC-CH-UA-ARCH': "x86", - 'SEC-CH-UA-BITNESS': "64", - 'SEC-CH-UA-PLATFORM': "5.1.0"} +windowsXP64_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", + 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "5.1.0"} windows7_ua_string = "Windows NT 6.1; Win64; x64" windows7_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "6.1.0"} @@ -235,230 +224,110 @@ windows81_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "6.3.0"} windows10_ua_string = "Windows NT 10.0; Win64; x64" -windows10_ua_addon = { - 'SEC-CH-UA-PLATFORM': "Windows", - 'SEC-CH-UA-ARCH': "x86", - 'SEC-CH-UA-BITNESS': "64", - 'SEC-CH-UA-PLATFORM': "10.0.0"} +windows10_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", + 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "10.0.0"} windows11_ua_string = "Windows NT 11.0; Win64; x64" -windows11_ua_addon = { - 'SEC-CH-UA-PLATFORM': "Windows", - 'SEC-CH-UA-ARCH': "x86", - 'SEC-CH-UA-BITNESS': "64", - 'SEC-CH-UA-PLATFORM': "11.0.0"} -geturls_ua_firefox_windows7 = "Mozilla/5.0 (" + windows7_ua_string + \ +windows11_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", + 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "11.0.0"} +geturls_ua_firefox_windows7 = "Mozilla/5.0 ("+windows7_ua_string + \ "; rv:109.0) Gecko/20100101 Firefox/117.0" -geturls_ua_seamonkey_windows7 = "Mozilla/5.0 (" + windows7_ua_string + \ +geturls_ua_seamonkey_windows7 = "Mozilla/5.0 ("+windows7_ua_string + \ "; rv:91.0) Gecko/20100101 Firefox/91.0 SeaMonkey/2.53.17" -geturls_ua_chrome_windows7 = "Mozilla/5.0 (" + windows7_ua_string + \ +geturls_ua_chrome_windows7 = "Mozilla/5.0 ("+windows7_ua_string + \ ") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36" -geturls_ua_chromium_windows7 = "Mozilla/5.0 (" + windows7_ua_string + \ +geturls_ua_chromium_windows7 = "Mozilla/5.0 ("+windows7_ua_string + \ ") AppleWebKit/537.36 (KHTML, like Gecko) Chromium/117.0.0.0 Chrome/117.0.0.0 Safari/537.36" -geturls_ua_palemoon_windows7 = "Mozilla/5.0 (" + windows7_ua_string + \ +geturls_ua_palemoon_windows7 = "Mozilla/5.0 ("+windows7_ua_string + \ "; rv:102.0) Gecko/20100101 Goanna/6.3 Firefox/102.0 PaleMoon/32.4.0.1" -geturls_ua_opera_windows7 = "Mozilla/5.0 (" + windows7_ua_string + \ +geturls_ua_opera_windows7 = "Mozilla/5.0 ("+windows7_ua_string + \ ") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36 OPR/102.0.0.0" -geturls_ua_vivaldi_windows7 = "Mozilla/5.0 (" + windows7_ua_string + \ +geturls_ua_vivaldi_windows7 = "Mozilla/5.0 ("+windows7_ua_string + \ ") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36 Vivaldi/6.2.3105.48" geturls_ua_internet_explorer_windows7 = "Mozilla/5.0 (" + \ - windows7_ua_string + "; Trident/7.0; rv:11.0) like Gecko" -geturls_ua_microsoft_edge_windows7 = "Mozilla/5.0 (" + windows7_ua_string + \ + windows7_ua_string+"; Trident/7.0; rv:11.0) like Gecko" +geturls_ua_microsoft_edge_windows7 = "Mozilla/5.0 ("+windows7_ua_string + \ ") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36 Edg/117.0.2045.31" geturls_ua_pywwwget_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prourl})".format( proname=__project__, prover=__version__, prourl=__project_url__) -if (platform.python_implementation() != ""): +if(platform.python_implementation() != ""): py_implementation = platform.python_implementation() -if (platform.python_implementation() == ""): +if(platform.python_implementation() == ""): py_implementation = "Python" -geturls_ua_pywwwget_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format( - osver=platform.system() + - " " + - platform.release(), - archtype=platform.machine(), - prourl=__project_url__, - pyimp=py_implementation, - pyver=platform.python_version(), - proname=__project__, - prover=__version__) +geturls_ua_pywwwget_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver=platform.system( +)+" "+platform.release(), archtype=platform.machine(), prourl=__project_url__, pyimp=py_implementation, pyver=platform.python_version(), proname=__project__, prover=__version__) geturls_ua_googlebot_google = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)" geturls_ua_googlebot_google_old = "Googlebot/2.1 (+http://www.google.com/bot.html)" geturls_ua = geturls_ua_firefox_windows7 -geturls_headers_firefox_windows7 = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_firefox_windows7, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close"} -geturls_headers_seamonkey_windows7 = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_seamonkey_windows7, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close"} -geturls_headers_chrome_windows7 = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_chrome_windows7, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close", - 'SEC-CH-UA': "\"Google Chrome\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Chromium\";v=\"117\"", - 'SEC-CH-UA-FULL-VERSION': "117.0.5938.63"} +geturls_headers_firefox_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_firefox_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", + 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"} +geturls_headers_seamonkey_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_seamonkey_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", + 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"} +geturls_headers_chrome_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_chrome_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", + 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Google Chrome\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Chromium\";v=\"117\"", 'SEC-CH-UA-FULL-VERSION': "117.0.5938.63"} geturls_headers_chrome_windows7.update(windows7_ua_addon) -geturls_headers_chromium_windows7 = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_chromium_windows7, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close", - 'SEC-CH-UA': "\"Chromium\";v=\"117\", \"Not;A=Brand\";v=\"24\"", - 'SEC-CH-UA-FULL-VERSION': "117.0.5938.63"} +geturls_headers_chromium_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_chromium_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", + 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Chromium\";v=\"117\", \"Not;A=Brand\";v=\"24\"", 'SEC-CH-UA-FULL-VERSION': "117.0.5938.63"} geturls_headers_chromium_windows7.update(windows7_ua_addon) -geturls_headers_palemoon_windows7 = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_palemoon_windows7, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close"} -geturls_headers_opera_windows7 = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_opera_windows7, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close", - 'SEC-CH-UA': "\"Chromium\";v=\"116\", \"Not;A=Brand\";v=\"8\", \"Opera\";v=\"102\"", - 'SEC-CH-UA-FULL-VERSION': "102.0.4880.56"} +geturls_headers_palemoon_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_palemoon_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", + 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"} +geturls_headers_opera_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_opera_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", + 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Chromium\";v=\"116\", \"Not;A=Brand\";v=\"8\", \"Opera\";v=\"102\"", 'SEC-CH-UA-FULL-VERSION': "102.0.4880.56"} geturls_headers_opera_windows7.update(windows7_ua_addon) -geturls_headers_vivaldi_windows7 = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_vivaldi_windows7, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close", - 'SEC-CH-UA': "\"Google Chrome\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Vivaldi\";v=\"6.2\"", - 'SEC-CH-UA-FULL-VERSION': "6.2.3105.48"} +geturls_headers_vivaldi_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_vivaldi_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", + 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Google Chrome\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Vivaldi\";v=\"6.2\"", 'SEC-CH-UA-FULL-VERSION': "6.2.3105.48"} geturls_headers_vivaldi_windows7.update(windows7_ua_addon) -geturls_headers_internet_explorer_windows7 = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_internet_explorer_windows7, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close"} -geturls_headers_microsoft_edge_windows7 = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_microsoft_edge_windows7, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close", - 'SEC-CH-UA': "\"Microsoft Edge\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Chromium\";v=\"117\"", - 'SEC-CH-UA-FULL-VERSION': "117.0.2045.31"} +geturls_headers_internet_explorer_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_internet_explorer_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': + "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"} +geturls_headers_microsoft_edge_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_microsoft_edge_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", + 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Microsoft Edge\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Chromium\";v=\"117\"", 'SEC-CH-UA-FULL-VERSION': "117.0.2045.31"} geturls_headers_microsoft_edge_windows7.update(windows7_ua_addon) -geturls_headers_pywwwget_python = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_pywwwget_python, - 'Accept-Encoding': "none", - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close", - 'SEC-CH-UA': "\"" + __project__ + "\";v=\"" + str(__version__) + "\", \"Not;A=Brand\";v=\"8\", \"" + py_implementation + "\";v=\"" + str( - platform.release()) + "\"", - 'SEC-CH-UA-FULL-VERSION': str(__version__), - 'SEC-CH-UA-PLATFORM': "" + py_implementation + "", - 'SEC-CH-UA-ARCH': "" + platform.machine() + "", - 'SEC-CH-UA-PLATFORM': str(__version__), - 'SEC-CH-UA-BITNESS': str(PyBitness)} -geturls_headers_pywwwget_python_alt = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_pywwwget_python_alt, - 'Accept-Encoding': "none", - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close", - 'SEC-CH-UA': "\"" + __project__ + "\";v=\"" + str(__version__) + "\", \"Not;A=Brand\";v=\"8\", \"" + py_implementation + "\";v=\"" + str( - platform.release()) + "\"", - 'SEC-CH-UA-FULL-VERSION': str(__version__), - 'SEC-CH-UA-PLATFORM': "" + py_implementation + "", - 'SEC-CH-UA-ARCH': "" + platform.machine() + "", - 'SEC-CH-UA-PLATFORM': str(__version__), - 'SEC-CH-UA-BITNESS': str(PyBitness)} -geturls_headers_googlebot_google = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_googlebot_google, - 'Accept-Encoding': "none", - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close"} -geturls_headers_googlebot_google_old = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_googlebot_google_old, - 'Accept-Encoding': "none", - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close"} +geturls_headers_pywwwget_python = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_pywwwget_python, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", + 'SEC-CH-UA': "\""+__project__+"\";v=\""+str(__version__)+"\", \"Not;A=Brand\";v=\"8\", \""+py_implementation+"\";v=\""+str(platform.release())+"\"", 'SEC-CH-UA-FULL-VERSION': str(__version__), 'SEC-CH-UA-PLATFORM': ""+py_implementation+"", 'SEC-CH-UA-ARCH': ""+platform.machine()+"", 'SEC-CH-UA-PLATFORM': str(__version__), 'SEC-CH-UA-BITNESS': str(PyBitness)} +geturls_headers_pywwwget_python_alt = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_pywwwget_python_alt, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", + 'SEC-CH-UA': "\""+__project__+"\";v=\""+str(__version__)+"\", \"Not;A=Brand\";v=\"8\", \""+py_implementation+"\";v=\""+str(platform.release())+"\"", 'SEC-CH-UA-FULL-VERSION': str(__version__), 'SEC-CH-UA-PLATFORM': ""+py_implementation+"", 'SEC-CH-UA-ARCH': ""+platform.machine()+"", 'SEC-CH-UA-PLATFORM': str(__version__), 'SEC-CH-UA-BITNESS': str(PyBitness)} +geturls_headers_googlebot_google = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_googlebot_google, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", + 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"} +geturls_headers_googlebot_google_old = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_googlebot_google_old, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", + 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"} geturls_headers = geturls_headers_firefox_windows7 geturls_download_sleep = 0 def verbose_printout(dbgtxt, outtype="log", dbgenable=True, dgblevel=20): - if (outtype == "print" and dbgenable): + if(outtype == "print" and dbgenable): print(dbgtxt) return True - elif (outtype == "log" and dbgenable): + elif(outtype == "log" and dbgenable): logging.info(dbgtxt) return True - elif (outtype == "warning" and dbgenable): + elif(outtype == "warning" and dbgenable): logging.warning(dbgtxt) return True - elif (outtype == "error" and dbgenable): + elif(outtype == "error" and dbgenable): logging.error(dbgtxt) return True - elif (outtype == "critical" and dbgenable): + elif(outtype == "critical" and dbgenable): logging.critical(dbgtxt) return True - elif (outtype == "exception" and dbgenable): + elif(outtype == "exception" and dbgenable): logging.exception(dbgtxt) return True - elif (outtype == "logalt" and dbgenable): + elif(outtype == "logalt" and dbgenable): logging.log(dgblevel, dbgtxt) return True - elif (outtype == "debug" and dbgenable): + elif(outtype == "debug" and dbgenable): logging.debug(dbgtxt) return True - elif (not dbgenable): + elif(not dbgenable): return True else: return False return False -def verbose_printout_return( - dbgtxt, - outtype="log", - dbgenable=True, - dgblevel=20): +def verbose_printout_return(dbgtxt, outtype="log", dbgenable=True, dgblevel=20): dbgout = verbose_printout(dbgtxt, outtype, dbgenable, dgblevel) - if (not dbgout): + if(not dbgout): return False return dbgtxt @@ -489,7 +358,7 @@ def listize(varlist): newlistreg = {} newlistrev = {} newlistfull = {} - while (il < ix): + while(il < ix): newlistreg.update({ilx: varlist[il]}) newlistrev.update({varlist[il]: ilx}) ilx = ilx + 1 @@ -508,7 +377,7 @@ def twolistize(varlist): newlistdescreg = {} newlistdescrev = {} newlistfull = {} - while (il < ix): + while(il < ix): newlistnamereg.update({ilx: varlist[il][0].strip()}) newlistnamerev.update({varlist[il][0].strip(): ilx}) newlistdescreg.update({ilx: varlist[il][1].strip()}) @@ -529,7 +398,7 @@ def arglistize(proexec, *varlist): ix = len(varlist) ilx = 1 newarglist = [proexec] - while (il < ix): + while(il < ix): if varlist[il][0] is not None: newarglist.append(varlist[il][0]) if varlist[il][1] is not None: @@ -539,9 +408,9 @@ def arglistize(proexec, *varlist): def fix_header_names(header_dict): - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): header_dict = {k.title(): v for k, v in header_dict.iteritems()} - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): header_dict = {k.title(): v for k, v in header_dict.items()} return header_dict @@ -561,13 +430,13 @@ def hms_string(sec_elapsed): def get_readable_size(bytes, precision=1, unit="IEC"): unit = unit.upper() - if (unit != "IEC" and unit != "SI"): + if(unit != "IEC" and unit != "SI"): unit = "IEC" - if (unit == "IEC"): + if(unit == "IEC"): units = [" B", " KiB", " MiB", " GiB", " TiB", " PiB", " EiB", " ZiB"] unitswos = ["B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB"] unitsize = 1024.0 - if (unit == "SI"): + if(unit == "SI"): units = [" B", " kB", " MB", " GB", " TB", " PB", " EB", " ZB"] unitswos = ["B", "kB", "MB", "GB", "TB", "PB", "EB", "ZB"] unitsize = 1000.0 @@ -575,50 +444,39 @@ def get_readable_size(bytes, precision=1, unit="IEC"): orgbytes = bytes for unit in units: if abs(bytes) < unitsize: - strformat = "%3." + str(precision) + "f%s" + strformat = "%3."+str(precision)+"f%s" pre_return_val = (strformat % (bytes, unit)) pre_return_val = re.sub( r"([0]+) ([A-Za-z]+)", r" \2", pre_return_val) pre_return_val = re.sub(r"\. ([A-Za-z]+)", r" \1", pre_return_val) alt_return_val = pre_return_val.split() - return_val = { - 'Bytes': orgbytes, - 'ReadableWithSuffix': pre_return_val, - 'ReadableWithoutSuffix': alt_return_val[0], - 'ReadableSuffix': alt_return_val[1]} + return_val = {'Bytes': orgbytes, 'ReadableWithSuffix': pre_return_val, + 'ReadableWithoutSuffix': alt_return_val[0], 'ReadableSuffix': alt_return_val[1]} return return_val bytes /= unitsize - strformat = "%." + str(precision) + "f%s" + strformat = "%."+str(precision)+"f%s" pre_return_val = (strformat % (bytes, "YiB")) pre_return_val = re.sub(r"([0]+) ([A-Za-z]+)", r" \2", pre_return_val) pre_return_val = re.sub(r"\. ([A-Za-z]+)", r" \1", pre_return_val) alt_return_val = pre_return_val.split() - return_val = { - 'Bytes': orgbytes, - 'ReadableWithSuffix': pre_return_val, - 'ReadableWithoutSuffix': alt_return_val[0], - 'ReadableSuffix': alt_return_val[1]} + return_val = {'Bytes': orgbytes, 'ReadableWithSuffix': pre_return_val, + 'ReadableWithoutSuffix': alt_return_val[0], 'ReadableSuffix': alt_return_val[1]} return return_val -def get_readable_size_from_file( - infile, - precision=1, - unit="IEC", - usehashes=False, - usehashtypes="md5,sha1"): +def get_readable_size_from_file(infile, precision=1, unit="IEC", usehashes=False, usehashtypes="md5,sha1"): unit = unit.upper() usehashtypes = usehashtypes.lower() getfilesize = os.path.getsize(infile) return_val = get_readable_size(getfilesize, precision, unit) - if (usehashes): + if(usehashes): hashtypelist = usehashtypes.split(",") openfile = open(infile, "rb") filecontents = openfile.read() openfile.close() listnumcount = 0 listnumend = len(hashtypelist) - while (listnumcount < listnumend): + while(listnumcount < listnumend): hashtypelistlow = hashtypelist[listnumcount].strip() hashtypelistup = hashtypelistlow.upper() filehash = hashlib.new(hashtypelistup) @@ -629,27 +487,22 @@ def get_readable_size_from_file( return return_val -def get_readable_size_from_string( - instring, - precision=1, - unit="IEC", - usehashes=False, - usehashtypes="md5,sha1"): +def get_readable_size_from_string(instring, precision=1, unit="IEC", usehashes=False, usehashtypes="md5,sha1"): unit = unit.upper() usehashtypes = usehashtypes.lower() getfilesize = len(instring) return_val = get_readable_size(getfilesize, precision, unit) - if (usehashes): + if(usehashes): hashtypelist = usehashtypes.split(",") listnumcount = 0 listnumend = len(hashtypelist) - while (listnumcount < listnumend): + while(listnumcount < listnumend): hashtypelistlow = hashtypelist[listnumcount].strip() hashtypelistup = hashtypelistlow.upper() filehash = hashlib.new(hashtypelistup) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): filehash.update(instring) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): filehash.update(instring.encode('utf-8')) filegethash = filehash.hexdigest() return_val.update({hashtypelistup: filegethash}) @@ -762,7 +615,8 @@ def ftp_status_to_reason(code): 550: 'Requested action not taken. File unavailable', 551: 'Requested action aborted. Page type unknown', 552: 'Requested file action aborted. Exceeded storage allocation', - 553: 'Requested action not taken. File name not allowed'} + 553: 'Requested action not taken. File name not allowed' + } return reasons.get(code, 'Unknown Status Code') @@ -781,21 +635,13 @@ def sftp_status_to_reason(code): return reasons.get(code, 'Unknown Status Code') -def make_http_headers_from_dict_to_list( - headers={ - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close"}): +def make_http_headers_from_dict_to_list(headers={'Referer': "http://google.com/", 'User-Agent': geturls_ua, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"}): if isinstance(headers, dict): returnval = [] - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): for headkey, headvalue in headers.iteritems(): returnval.append((headkey, headvalue)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): for headkey, headvalue in headers.items(): returnval.append((headkey, headvalue)) elif isinstance(headers, list): @@ -805,23 +651,15 @@ def make_http_headers_from_dict_to_list( return returnval -def make_http_headers_from_dict_to_pycurl( - headers={ - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close"}): +def make_http_headers_from_dict_to_pycurl(headers={'Referer': "http://google.com/", 'User-Agent': geturls_ua, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"}): if isinstance(headers, dict): returnval = [] - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): for headkey, headvalue in headers.iteritems(): - returnval.append(headkey + ": " + headvalue) - if (sys.version[0] >= "3"): + returnval.append(headkey+": "+headvalue) + if(sys.version[0] >= "3"): for headkey, headvalue in headers.items(): - returnval.append(headkey + ": " + headvalue) + returnval.append(headkey+": "+headvalue) elif isinstance(headers, list): returnval = headers else: @@ -834,33 +672,18 @@ def make_http_headers_from_pycurl_to_dict(headers): headers = headers.strip().split('\r\n') for header in headers: parts = header.split(': ', 1) - if (len(parts) == 2): + if(len(parts) == 2): key, value = parts header_dict[key.title()] = value return header_dict -def make_http_headers_from_list_to_dict( - headers=[ - ("Referer", - "http://google.com/"), - ("User-Agent", - geturls_ua), - ("Accept-Encoding", - compression_supported), - ("Accept-Language", - "en-US,en;q=0.8,en-CA,en-GB;q=0.6"), - ("Accept-Charset", - "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7"), - ("Accept", - "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"), - ("Connection", - "close")]): +def make_http_headers_from_list_to_dict(headers=[("Referer", "http://google.com/"), ("User-Agent", geturls_ua), ("Accept-Encoding", compression_supported), ("Accept-Language", "en-US,en;q=0.8,en-CA,en-GB;q=0.6"), ("Accept-Charset", "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7"), ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"), ("Connection", "close")]): if isinstance(headers, list): returnval = {} mli = 0 mlil = len(headers) - while (mli < mlil): + while(mli < mlil): returnval.update({headers[mli][0]: headers[mli][1]}) mli = mli + 1 elif isinstance(headers, dict): @@ -875,38 +698,38 @@ def get_httplib_support(checkvalue=None): returnval = [] returnval.append("ftp") returnval.append("httplib") - if (havehttplib2): + if(havehttplib2): returnval.append("httplib2") returnval.append("urllib") - if (haveurllib3): + if(haveurllib3): returnval.append("urllib3") returnval.append("request3") returnval.append("request") - if (haverequests): + if(haverequests): returnval.append("requests") - if (haveaiohttp): + if(haveaiohttp): returnval.append("aiohttp") - if (havehttpx): + if(havehttpx): returnval.append("httpx") returnval.append("httpx2") - if (havemechanize): + if(havemechanize): returnval.append("mechanize") - if (havepycurl): + if(havepycurl): returnval.append("pycurl") - if (hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): + if(hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): returnval.append("pycurl2") - if (hasattr(pycurl, "CURL_HTTP_VERSION_3_0")): + if(hasattr(pycurl, "CURL_HTTP_VERSION_3_0")): returnval.append("pycurl3") - if (haveparamiko): + if(haveparamiko): returnval.append("sftp") - if (havepysftp): + if(havepysftp): returnval.append("pysftp") - if (checkvalue is not None): - if (checkvalue == "urllib1" or checkvalue == "urllib2"): + if(not checkvalue is None): + if(checkvalue == "urllib1" or checkvalue == "urllib2"): checkvalue = "urllib" - if (checkvalue == "httplib1"): + if(checkvalue == "httplib1"): checkvalue = "httplib" - if (checkvalue in returnval): + if(checkvalue in returnval): returnval = True else: returnval = False @@ -914,9 +737,9 @@ def get_httplib_support(checkvalue=None): def check_httplib_support(checkvalue="urllib"): - if (checkvalue == "urllib1" or checkvalue == "urllib2"): + if(checkvalue == "urllib1" or checkvalue == "urllib2"): checkvalue = "urllib" - if (checkvalue == "httplib1"): + if(checkvalue == "httplib1"): checkvalue = "httplib" returnval = get_httplib_support(checkvalue) return returnval @@ -927,116 +750,99 @@ def get_httplib_support_list(): return returnval -def download_from_url( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - httplibuse="urllib", - buffersize=524288, - sleep=-1, - timeout=10): +def download_from_url(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, haveaiohttp, haverequests, havemechanize, havepycurl, havehttplib2, haveurllib3, havehttpx, havehttpcore, haveparamiko, havepysftp - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (httplibuse == "urllib1" or httplibuse == - "urllib2" or httplibuse == "request"): + if(httplibuse == "urllib1" or httplibuse == "urllib2" or httplibuse == "request"): httplibuse = "urllib" - if (httplibuse == "httplib1"): + if(httplibuse == "httplib1"): httplibuse = "httplib" - if (not haverequests and httplibuse == "requests"): + if(not haverequests and httplibuse == "requests"): httplibuse = "urllib" - if (not haveaiohttp and httplibuse == "aiohttp"): + if(not haveaiohttp and httplibuse == "aiohttp"): httplibuse = "urllib" - if (not havehttpx and httplibuse == "httpx"): + if(not havehttpx and httplibuse == "httpx"): httplibuse = "urllib" - if (not havehttpx and httplibuse == "httpx2"): + if(not havehttpx and httplibuse == "httpx2"): httplibuse = "urllib" - if (not havehttpcore and httplibuse == "httpcore"): + if(not havehttpcore and httplibuse == "httpcore"): httplibuse = "urllib" - if (not havehttpcore and httplibuse == "httpcore2"): + if(not havehttpcore and httplibuse == "httpcore2"): httplibuse = "urllib" - if (not havemechanize and httplibuse == "mechanize"): + if(not havemechanize and httplibuse == "mechanize"): httplibuse = "urllib" - if (not havepycurl and httplibuse == "pycurl"): + if(not havepycurl and httplibuse == "pycurl"): httplibuse = "urllib" - if (not havepycurl and httplibuse == "pycurl2"): + if(not havepycurl and httplibuse == "pycurl2"): httplibuse = "urllib" - if (havepycurl and httplibuse == "pycurl2" and not hasattr( - pycurl, "CURL_HTTP_VERSION_2_0")): + if(havepycurl and httplibuse == "pycurl2" and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): httplibuse = "pycurl" - if (not havepycurl and httplibuse == "pycurl3"): + if(not havepycurl and httplibuse == "pycurl3"): httplibuse = "urllib" - if (havepycurl and httplibuse == "pycurl3" and not hasattr( - pycurl, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): + if(havepycurl and httplibuse == "pycurl3" and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): httplibuse = "pycurl2" - if (havepycurl and httplibuse == "pycurl3" and not hasattr(pycurl, - "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl, - "CURL_HTTP_VERSION_2_0")): + if(havepycurl and httplibuse == "pycurl3" and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): httplibuse = "pycurl" - if (not havehttplib2 and httplibuse == "httplib2"): + if(not havehttplib2 and httplibuse == "httplib2"): httplibuse = "httplib" - if (not haveparamiko and httplibuse == "sftp"): + if(not haveparamiko and httplibuse == "sftp"): httplibuse = "ftp" - if (not havepysftp and httplibuse == "pysftp"): + if(not havepysftp and httplibuse == "pysftp"): httplibuse = "ftp" urlparts = urlparse.urlparse(httpurl) - if (isinstance(httpheaders, list)): + if(isinstance(httpheaders, list)): httpheaders = make_http_headers_from_list_to_dict(httpheaders) httpheaders = fix_header_names(httpheaders) - if (httpuseragent is not None): - if ('User-Agent' in httpheaders): + if(httpuseragent is not None): + if('User-Agent' in httpheaders): httpheaders['User-Agent'] = httpuseragent else: httpuseragent.update({'User-Agent': httpuseragent}) - if (httpreferer is not None): - if ('Referer' in httpheaders): + if(httpreferer is not None): + if('Referer' in httpheaders): httpheaders['Referer'] = httpreferer else: httpuseragent.update({'Referer': httpreferer}) - if (urlparts.username is not None or urlparts.password is not None): - if (sys.version[0] == "2"): + if(urlparts.username is not None or urlparts.password is not None): + if(sys.version[0] == "2"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password)) - if (sys.version[0] >= "3"): + str(urlparts.username+":"+urlparts.password)) + if(sys.version[0] >= "3"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password).encode()).decode("UTF-8") - httpheaders.update({'Authorization': "Basic " + inurlencode}) + str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8") + httpheaders.update({'Authorization': "Basic "+inurlencode}) geturls_opener = build_opener(HTTPCookieProcessor(httpcookie)) - if (httplibuse == "urllib" or httplibuse == "mechanize"): - if (isinstance(httpheaders, dict)): + if(httplibuse == "urllib" or httplibuse == "mechanize"): + if(isinstance(httpheaders, dict)): httpheaders = make_http_headers_from_dict_to_list(httpheaders) - if (httplibuse == "pycurl" or httplibuse == - "pycurl2" or httplibuse == "pycurl3"): - if (isinstance(httpheaders, dict)): + if(httplibuse == "pycurl" or httplibuse == "pycurl2" or httplibuse == "pycurl3"): + if(isinstance(httpheaders, dict)): httpheaders = make_http_headers_from_dict_to_pycurl(httpheaders) geturls_opener.addheaders = httpheaders time.sleep(sleep) - if (postdata is not None and not isinstance(postdata, dict)): + if(postdata is not None and not isinstance(postdata, dict)): postdata = urlencode(postdata) - if (httplibuse == "urllib" or httplibuse == "request"): + if(httplibuse == "urllib" or httplibuse == "request"): geturls_request = Request(httpurl) try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): geturls_text = geturls_opener.open(geturls_request) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): geturls_text = geturls_opener.open( geturls_request, data=postdata) else: geturls_text = geturls_opener.open(geturls_request) except HTTPError as geturls_text_error: geturls_text = geturls_text_error - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) except URLError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.getcode() try: @@ -1051,36 +857,36 @@ def download_from_url( httpurlout = geturls_text.geturl() httpheaderout = geturls_text.info() httpheadersentout = httpheaders - elif (httplibuse == "httplib"): - if (urlparts[0] == "http"): + elif(httplibuse == "httplib"): + if(urlparts[0] == "http"): httpconn = HTTPConnection(urlparts[1], timeout=timeout) - elif (urlparts[0] == "https"): + elif(urlparts[0] == "https"): httpconn = HTTPSConnection(urlparts[1], timeout=timeout) else: return False - if (postdata is not None and not isinstance(postdata, dict)): + if(postdata is not None and not isinstance(postdata, dict)): postdata = urlencode(postdata) try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): httpconn.request("GET", urlparts[2], headers=httpheaders) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): httpconn.request( "GET", urlparts[2], body=postdata, headers=httpheaders) else: httpconn.request("GET", urlparts[2], headers=httpheaders) except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except BlockingIOError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False geturls_text = httpconn.getresponse() httpcodeout = geturls_text.status httpcodereason = geturls_text.reason - if (geturls_text.version == "10"): + if(geturls_text.version == "10"): httpversionout = "1.0" else: httpversionout = "1.1" @@ -1088,36 +894,36 @@ def download_from_url( httpurlout = httpurl httpheaderout = geturls_text.getheaders() httpheadersentout = httpheaders - elif (httplibuse == "httplib2"): - if (urlparts[0] == "http"): + elif(httplibuse == "httplib2"): + if(urlparts[0] == "http"): httpconn = HTTPConnectionWithTimeout(urlparts[1], timeout=timeout) - elif (urlparts[0] == "https"): + elif(urlparts[0] == "https"): httpconn = HTTPSConnectionWithTimeout(urlparts[1], timeout=timeout) else: return False - if (postdata is not None and not isinstance(postdata, dict)): + if(postdata is not None and not isinstance(postdata, dict)): postdata = urlencode(postdata) try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): httpconn.request("GET", urlparts[2], headers=httpheaders) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): httpconn.request( "GET", urlparts[2], body=postdata, headers=httpheaders) else: httpconn.request("GET", urlparts[2], headers=httpheaders) except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except BlockingIOError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False geturls_text = httpconn.getresponse() httpcodeout = geturls_text.status httpcodereason = geturls_text.reason - if (geturls_text.version == "10"): + if(geturls_text.version == "10"): httpversionout = "1.0" else: httpversionout = "1.1" @@ -1125,37 +931,37 @@ def download_from_url( httpurlout = httpurl httpheaderout = geturls_text.getheaders() httpheadersentout = httpheaders - elif (httplibuse == "urllib3" or httplibuse == "request3"): + elif(httplibuse == "urllib3" or httplibuse == "request3"): timeout = urllib3.util.Timeout(connect=timeout, read=timeout) urllib_pool = urllib3.PoolManager(headers=httpheaders, timeout=timeout) try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): geturls_text = urllib_pool.request( "GET", httpurl, headers=httpheaders, preload_content=False) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): geturls_text = urllib_pool.request( "POST", httpurl, body=postdata, headers=httpheaders, preload_content=False) else: geturls_text = urllib_pool.request( "GET", httpurl, headers=httpheaders, preload_content=False) except urllib3.exceptions.ConnectTimeoutError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except urllib3.exceptions.ConnectError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except urllib3.exceptions.MaxRetryError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except ValueError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.status httpcodereason = geturls_text.reason - if (geturls_text.version == "10"): + if(geturls_text.version == "10"): httpversionout = "1.0" else: httpversionout = "1.1" @@ -1163,34 +969,30 @@ def download_from_url( httpurlout = geturls_text.geturl() httpheaderout = geturls_text.info() httpheadersentout = httpheaders - elif (httplibuse == "requests"): + elif(httplibuse == "requests"): try: reqsession = requests.Session() - if (httpmethod == "GET"): + if(httpmethod == "GET"): geturls_text = reqsession.get( httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): geturls_text = reqsession.post( - httpurl, - timeout=timeout, - data=postdata, - headers=httpheaders, - cookies=httpcookie) + httpurl, timeout=timeout, data=postdata, headers=httpheaders, cookies=httpcookie) else: geturls_text = reqsession.get( httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie) except requests.exceptions.ConnectTimeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except requests.exceptions.ConnectError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.status_code httpcodereason = geturls_text.reason - if (geturls_text.raw.version == "10"): + if(geturls_text.raw.version == "10"): httpversionout = "1.0" else: httpversionout = "1.1" @@ -1198,29 +1000,24 @@ def download_from_url( httpurlout = geturls_text.url httpheaderout = geturls_text.headers httpheadersentout = geturls_text.request.headers - elif (httplibuse == "aiohttp"): + elif(httplibuse == "aiohttp"): try: - reqsession = aiohttp.ClientSession( - cookie_jar=httpcookie, - headers=httpheaders, - timeout=timeout, - read_timeout=timeout, - conn_timeout=timeout, - read_bufsize=buffersize) - if (httpmethod == "GET"): + reqsession = aiohttp.ClientSession(cookie_jar=httpcookie, headers=httpheaders, + timeout=timeout, read_timeout=timeout, conn_timeout=timeout, read_bufsize=buffersize) + if(httpmethod == "GET"): geturls_text = reqsession.get(httpurl) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): geturls_text = reqsession.post(httpurl, data=postdata) else: geturls_text = reqsession.get(httpurl) except aiohttp.exceptions.ConnectTimeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except aiohttp.exceptions.ConnectError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.status httpcodereason = geturls_text.reason @@ -1229,93 +1026,85 @@ def download_from_url( httpurlout = geturls_text.url httpheaderout = geturls_text.headers httpheadersentout = geturls_text.request_info.headers - elif (httplibuse == "httpx"): + elif(httplibuse == "httpx"): try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): httpx_pool = httpx.Client( http1=True, http2=False, trust_env=True) geturls_text = httpx_pool.get( httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): httpx_pool = httpx.Client( http1=True, http2=False, trust_env=True) geturls_text = httpx_pool.post( - httpurl, - timeout=timeout, - data=postdata, - headers=httpheaders, - cookies=httpcookie) + httpurl, timeout=timeout, data=postdata, headers=httpheaders, cookies=httpcookie) else: httpx_pool = httpx.Client( http1=True, http2=False, trust_env=True) geturls_text = httpx_pool.get( httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie) except httpx.ConnectTimeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except httpx.ConnectError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.status_code try: httpcodereason = geturls_text.reason_phrase - except BaseException: + except: httpcodereason = http_status_to_reason(geturls_text.status_code) httpversionout = geturls_text.http_version httpmethodout = httpmethod httpurlout = str(geturls_text.url) httpheaderout = geturls_text.headers httpheadersentout = geturls_text.request.headers - elif (httplibuse == "httpx2"): + elif(httplibuse == "httpx2"): try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): httpx_pool = httpx.Client( http1=True, http2=True, trust_env=True) geturls_text = httpx_pool.get( httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): httpx_pool = httpx.Client( http1=True, http2=True, trust_env=True) geturls_text = httpx_pool.post( - httpurl, - timeout=timeout, - data=postdata, - headers=httpheaders, - cookies=httpcookie) + httpurl, timeout=timeout, data=postdata, headers=httpheaders, cookies=httpcookie) else: httpx_pool = httpx.Client( http1=True, http2=True, trust_env=True) geturls_text = httpx_pool.get( httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie) except httpx.ConnectTimeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except httpx.ConnectError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.status_code try: httpcodereason = geturls_text.reason_phrase - except BaseException: + except: httpcodereason = http_status_to_reason(geturls_text.status_code) httpversionout = geturls_text.http_version httpmethodout = httpmethod httpurlout = str(geturls_text.url) httpheaderout = geturls_text.headers httpheadersentout = geturls_text.request.headers - elif (httplibuse == "httpcore"): + elif(httplibuse == "httpcore"): try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): httpx_pool = httpcore.ConnectionPool(http1=True, http2=False) geturls_text = httpx_pool.request( "GET", httpurl, headers=httpheaders) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): httpx_pool = httpcore.ConnectionPool(http1=True, http2=False) geturls_text = httpx_pool.request( "GET", httpurl, data=postdata, headers=httpheaders) @@ -1324,13 +1113,13 @@ def download_from_url( geturls_text = httpx_pool.request( "GET", httpurl, headers=httpheaders) except httpcore.ConnectTimeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except httpcore.ConnectError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.status httpcodereason = http_status_to_reason(geturls_text.status) @@ -1339,13 +1128,13 @@ def download_from_url( httpurlout = str(httpurl) httpheaderout = geturls_text.headers httpheadersentout = httpheaders - elif (httplibuse == "httpcore2"): + elif(httplibuse == "httpcore2"): try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): httpx_pool = httpcore.ConnectionPool(http1=True, http2=True) geturls_text = httpx_pool.request( "GET", httpurl, headers=httpheaders) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): httpx_pool = httpcore.ConnectionPool(http1=True, http2=True) geturls_text = httpx_pool.request( "GET", httpurl, data=postdata, headers=httpheaders) @@ -1354,13 +1143,13 @@ def download_from_url( geturls_text = httpx_pool.request( "GET", httpurl, headers=httpheaders) except httpcore.ConnectTimeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except httpcore.ConnectError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.status httpcodereason = http_status_to_reason(geturls_text.status) @@ -1369,31 +1158,31 @@ def download_from_url( httpurlout = str(httpurl) httpheaderout = geturls_text.headers httpheadersentout = httpheaders - elif (httplibuse == "mechanize"): + elif(httplibuse == "mechanize"): geturls_opener = mechanize.Browser() - if (isinstance(httpheaders, dict)): + if(isinstance(httpheaders, dict)): httpheaders = make_http_headers_from_dict_to_list(httpheaders) time.sleep(sleep) geturls_opener.addheaders = httpheaders geturls_opener.set_cookiejar(httpcookie) geturls_opener.set_handle_robots(False) - if (postdata is not None and not isinstance(postdata, dict)): + if(postdata is not None and not isinstance(postdata, dict)): postdata = urlencode(postdata) try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): geturls_text = geturls_opener.open(httpurl) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): geturls_text = geturls_opener.open(httpurl, data=postdata) else: geturls_text = geturls_opener.open(httpurl) except mechanize.HTTPError as geturls_text_error: geturls_text = geturls_text_error - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) except URLError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.code httpcodereason = geturls_text.msg @@ -1403,11 +1192,11 @@ def download_from_url( httpheaderout = geturls_text.info() reqhead = geturls_opener.request httpheadersentout = reqhead.header_items() - elif (httplibuse == "pycurl"): + elif(httplibuse == "pycurl"): retrieved_body = BytesIO() retrieved_headers = BytesIO() try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): geturls_text = pycurl.Curl() geturls_text.setopt(geturls_text.URL, httpurl) geturls_text.setopt(geturls_text.HTTP_VERSION, @@ -1420,7 +1209,7 @@ def download_from_url( geturls_text.setopt(geturls_text.FOLLOWLOCATION, True) geturls_text.setopt(geturls_text.TIMEOUT, timeout) geturls_text.perform() - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): geturls_text = pycurl.Curl() geturls_text.setopt(geturls_text.URL, httpurl) geturls_text.setopt(geturls_text.HTTP_VERSION, @@ -1449,24 +1238,23 @@ def download_from_url( geturls_text.setopt(geturls_text.TIMEOUT, timeout) geturls_text.perform() retrieved_headers.seek(0) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): pycurlhead = retrieved_headers.read() - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): pycurlhead = retrieved_headers.read().decode('UTF-8') pyhttpverinfo = re.findall( - r'^HTTP/([0-9.]+) (\d+)(?: ([A-Za-z\s]+))?$', - pycurlhead.splitlines()[0].strip().rstrip('\r\n'))[0] + r'^HTTP/([0-9.]+) (\d+)(?: ([A-Za-z\s]+))?$', pycurlhead.splitlines()[0].strip().rstrip('\r\n'))[0] pycurlheadersout = make_http_headers_from_pycurl_to_dict( pycurlhead) retrieved_body.seek(0) except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except ValueError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.getinfo(geturls_text.HTTP_CODE) httpcodereason = http_status_to_reason( @@ -1476,11 +1264,11 @@ def download_from_url( httpurlout = geturls_text.getinfo(geturls_text.EFFECTIVE_URL) httpheaderout = pycurlheadersout httpheadersentout = httpheaders - elif (httplibuse == "pycurl2"): + elif(httplibuse == "pycurl2"): retrieved_body = BytesIO() retrieved_headers = BytesIO() try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): geturls_text = pycurl.Curl() geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_2_0) @@ -1493,7 +1281,7 @@ def download_from_url( geturls_text.setopt(geturls_text.FOLLOWLOCATION, True) geturls_text.setopt(geturls_text.TIMEOUT, timeout) geturls_text.perform() - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): geturls_text = pycurl.Curl() geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_2_0) @@ -1522,24 +1310,23 @@ def download_from_url( geturls_text.setopt(geturls_text.TIMEOUT, timeout) geturls_text.perform() retrieved_headers.seek(0) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): pycurlhead = retrieved_headers.read() - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): pycurlhead = retrieved_headers.read().decode('UTF-8') pyhttpverinfo = re.findall( - r'^HTTP/([0-9.]+) (\d+)(?: ([A-Za-z\s]+))?$', - pycurlhead.splitlines()[0].strip())[0] + r'^HTTP/([0-9.]+) (\d+)(?: ([A-Za-z\s]+))?$', pycurlhead.splitlines()[0].strip())[0] pycurlheadersout = make_http_headers_from_pycurl_to_dict( pycurlhead) retrieved_body.seek(0) except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except ValueError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.getinfo(geturls_text.HTTP_CODE) httpcodereason = http_status_to_reason( @@ -1549,11 +1336,11 @@ def download_from_url( httpurlout = geturls_text.getinfo(geturls_text.EFFECTIVE_URL) httpheaderout = pycurlheadersout httpheadersentout = httpheaders - elif (httplibuse == "pycurl3"): + elif(httplibuse == "pycurl3"): retrieved_body = BytesIO() retrieved_headers = BytesIO() try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): geturls_text = pycurl.Curl() geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_3_0) @@ -1566,7 +1353,7 @@ def download_from_url( geturls_text.setopt(geturls_text.FOLLOWLOCATION, True) geturls_text.setopt(geturls_text.TIMEOUT, timeout) geturls_text.perform() - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): geturls_text = pycurl.Curl() geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_3_0) @@ -1595,24 +1382,23 @@ def download_from_url( geturls_text.setopt(geturls_text.TIMEOUT, timeout) geturls_text.perform() retrieved_headers.seek(0) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): pycurlhead = retrieved_headers.read() - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): pycurlhead = retrieved_headers.read().decode('UTF-8') pyhttpverinfo = re.findall( - r'^HTTP/([0-9.]+) (\d+)(?: ([A-Za-z\s]+))?$', - pycurlhead.splitlines()[0].strip().rstrip('\r\n'))[0] + r'^HTTP/([0-9.]+) (\d+)(?: ([A-Za-z\s]+))?$', pycurlhead.splitlines()[0].strip().rstrip('\r\n'))[0] pycurlheadersout = make_http_headers_from_pycurl_to_dict( pycurlhead) retrieved_body.seek(0) except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except ValueError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.getinfo(geturls_text.HTTP_CODE) httpcodereason = http_status_to_reason( @@ -1622,18 +1408,18 @@ def download_from_url( httpurlout = geturls_text.getinfo(geturls_text.EFFECTIVE_URL) httpheaderout = pycurlheadersout httpheadersentout = httpheaders - elif (httplibuse == "ftp"): + elif(httplibuse == "ftp"): geturls_text = download_file_from_ftp_file(httpurl) - if (not geturls_text): + if(not geturls_text): return False downloadsize = None - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.read(buffersize) @@ -1642,67 +1428,31 @@ def download_from_url( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) strbuf.seek(0) returnval_content = strbuf.read() - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': None, - 'Version': None, - 'Method': None, - 'HeadersSent': None, - 'URL': httpurl, - 'Code': None} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size( + fulldatasize, 2, "IEC"), 'SI': get_readable_size(fulldatasize, 2, "SI")}, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl, 'Code': None} geturls_text.close() - elif (httplibuse == "sftp"): + elif(httplibuse == "sftp"): geturls_text = download_file_from_sftp_file(httpurl) - if (not geturls_text): + if(not geturls_text): return False downloadsize = None - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.read(buffersize) @@ -1711,68 +1461,32 @@ def download_from_url( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) strbuf.seek(0) returnval_content = strbuf.read() - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': None, - 'Version': None, - 'Method': None, - 'HeadersSent': None, - 'URL': httpurl, - 'Code': None} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size( + fulldatasize, 2, "IEC"), 'SI': get_readable_size(fulldatasize, 2, "SI")}, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl, 'Code': None} geturls_text.close() return returnval - elif (httplibuse == "pysftp"): + elif(httplibuse == "pysftp"): geturls_text = download_file_from_pysftp_file(httpurl) - if (not geturls_text): + if(not geturls_text): return False downloadsize = None - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.read(buffersize) @@ -1781,100 +1495,59 @@ def download_from_url( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) strbuf.seek(0) returnval_content = strbuf.read() - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': None, - 'Version': None, - 'Method': None, - 'HeadersSent': None, - 'URL': httpurl, - 'Code': None} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size( + fulldatasize, 2, "IEC"), 'SI': get_readable_size(fulldatasize, 2, "SI")}, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl, 'Code': None} geturls_text.close() return returnval else: returnval = False - if (isinstance(httpheaderout, list) and (httplibuse != - "pycurl" and httplibuse != "pycurl2" and httplibuse != "pycurl3")): + if(isinstance(httpheaderout, list) and (httplibuse != "pycurl" and httplibuse != "pycurl2" and httplibuse != "pycurl3")): httpheaderout = dict( make_http_headers_from_list_to_dict(httpheaderout)) - if (isinstance(httpheaderout, list) and (httplibuse == - "pycurl" or httplibuse == "pycurl2" or httplibuse == "pycurl3")): + if(isinstance(httpheaderout, list) and (httplibuse == "pycurl" or httplibuse == "pycurl2" or httplibuse == "pycurl3")): httpheaderout = dict(make_http_headers_from_pycurl_to_dict( "\r\n".join(httpheaderout))) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): try: prehttpheaderout = httpheaderout httpheaderkeys = httpheaderout.keys() imax = len(httpheaderkeys) ic = 0 httpheaderout = {} - while (ic < imax): + while(ic < imax): httpheaderout.update( {httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]}) ic += 1 except AttributeError: pass httpheaderout = fix_header_names(httpheaderout) - if (isinstance(httpheadersentout, list) and (httplibuse != - "pycurl" and httplibuse != "pycurl2" and httplibuse != "pycurl3")): + if(isinstance(httpheadersentout, list) and (httplibuse != "pycurl" and httplibuse != "pycurl2" and httplibuse != "pycurl3")): httpheadersentout = dict( make_http_headers_from_list_to_dict(httpheadersentout)) - if (isinstance(httpheadersentout, list) and (httplibuse == - "pycurl" or httplibuse == "pycurl2" or httplibuse == "pycurl3")): + if(isinstance(httpheadersentout, list) and (httplibuse == "pycurl" or httplibuse == "pycurl2" or httplibuse == "pycurl3")): httpheadersentout = dict(make_http_headers_from_pycurl_to_dict( "\r\n".join(httpheadersentout))) httpheadersentout = fix_header_names(httpheadersentout) - log.info("Downloading URL " + httpurl) - if (httplibuse == "urllib" or httplibuse == "request" or httplibuse == "request3" or httplibuse == - "aiohttp" or httplibuse == "httplib" or httplibuse == "httplib2" or httplibuse == "urllib3" or httplibuse == "mechanize"): + log.info("Downloading URL "+httpurl) + if(httplibuse == "urllib" or httplibuse == "request" or httplibuse == "request3" or httplibuse == "aiohttp" or httplibuse == "httplib" or httplibuse == "httplib2" or httplibuse == "urllib3" or httplibuse == "mechanize"): downloadsize = httpheaderout.get('Content-Length') - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.read(buffersize) @@ -1883,76 +1556,57 @@ def download_from_url( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) strbuf.seek(0) returnval_content = strbuf.read() geturls_text.close() - if (httpheaderout.get("Content-Encoding") == "gzip"): + if(httpheaderout.get("Content-Encoding") == "gzip"): try: returnval_content = zlib.decompress( - returnval_content, 16 + zlib.MAX_WBITS) + returnval_content, 16+zlib.MAX_WBITS) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "deflate"): + elif(httpheaderout.get("Content-Encoding") == "deflate"): try: returnval_content = zlib.decompress(returnval_content) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "br" and havebrotli): + elif(httpheaderout.get("Content-Encoding") == "br" and havebrotli): try: returnval_content = brotli.decompress(returnval_content) except brotli.error: pass - elif (httpheaderout.get("Content-Encoding") == "zstd" and havezstd): + elif(httpheaderout.get("Content-Encoding") == "zstd" and havezstd): try: returnval_content = zstandard.decompress(returnval_content) except zstandard.error: pass - elif ((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): + elif((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): try: returnval_content = lzma.decompress(returnval_content) except zstandard.error: pass - elif (httpheaderout.get("Content-Encoding") == "bzip2"): + elif(httpheaderout.get("Content-Encoding") == "bzip2"): try: returnval_content = bz2.decompress(returnval_content) except zstandard.error: pass - elif (httplibuse == "httpx" or httplibuse == "httpx2" or httplibuse == "httpcore" or httplibuse == "httpcore2"): + elif(httplibuse == "httpx" or httplibuse == "httpx2" or httplibuse == "httpcore" or httplibuse == "httpcore2"): downloadsize = httpheaderout.get('Content-Length') - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.read() @@ -1961,78 +1615,59 @@ def download_from_url( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) break strbuf.seek(0) returnval_content = strbuf.read() geturls_text.close() - if (httpheaderout.get("Content-Encoding") == "gzip"): + if(httpheaderout.get("Content-Encoding") == "gzip"): try: returnval_content = zlib.decompress( - returnval_content, 16 + zlib.MAX_WBITS) + returnval_content, 16+zlib.MAX_WBITS) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "deflate"): + elif(httpheaderout.get("Content-Encoding") == "deflate"): try: returnval_content = zlib.decompress(returnval_content) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "br" and havebrotli): + elif(httpheaderout.get("Content-Encoding") == "br" and havebrotli): try: returnval_content = brotli.decompress(returnval_content) except brotli.error: pass - elif (httpheaderout.get("Content-Encoding") == "zstd" and havezstd): + elif(httpheaderout.get("Content-Encoding") == "zstd" and havezstd): try: returnval_content = zstandard.decompress(returnval_content) except zstandard.error: pass - elif ((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): + elif((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): try: returnval_content = lzma.decompress(returnval_content) except zstandard.error: pass - elif (httpheaderout.get("Content-Encoding") == "bzip2"): + elif(httpheaderout.get("Content-Encoding") == "bzip2"): try: returnval_content = bz2.decompress(returnval_content) except zstandard.error: pass - elif (httplibuse == "requests"): - log.info("Downloading URL " + httpurl) + elif(httplibuse == "requests"): + log.info("Downloading URL "+httpurl) downloadsize = httpheaderout.get('Content-Length') - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.raw.read(buffersize) @@ -2041,77 +1676,58 @@ def download_from_url( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) strbuf.seek(0) returnval_content = strbuf.read() geturls_text.close() - if (httpheaderout.get("Content-Encoding") == "gzip"): + if(httpheaderout.get("Content-Encoding") == "gzip"): try: returnval_content = zlib.decompress( - returnval_content, 16 + zlib.MAX_WBITS) + returnval_content, 16+zlib.MAX_WBITS) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "deflate"): + elif(httpheaderout.get("Content-Encoding") == "deflate"): try: returnval_content = zlib.decompress(returnval_content) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "br" and havebrotli): + elif(httpheaderout.get("Content-Encoding") == "br" and havebrotli): try: returnval_content = brotli.decompress(returnval_content) except brotli.error: pass - elif (httpheaderout.get("Content-Encoding") == "zstd" and havezstd): + elif(httpheaderout.get("Content-Encoding") == "zstd" and havezstd): try: returnval_content = zstandard.decompress(returnval_content) except zstandard.error: pass - elif ((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): + elif((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): try: returnval_content = lzma.decompress(returnval_content) except zstandard.error: pass - elif (httpheaderout.get("Content-Encoding") == "bzip2"): + elif(httpheaderout.get("Content-Encoding") == "bzip2"): try: returnval_content = bz2.decompress(returnval_content) except zstandard.error: pass - elif (httplibuse == "pycurl" or httplibuse == "pycurl2" or httplibuse == "pycurl3"): - log.info("Downloading URL " + httpurl) + elif(httplibuse == "pycurl" or httplibuse == "pycurl2" or httplibuse == "pycurl3"): + log.info("Downloading URL "+httpurl) downloadsize = httpheaderout.get('Content-Length') - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = retrieved_body.read(buffersize) @@ -2120,508 +1736,261 @@ def download_from_url( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) strbuf.seek(0) returnval_content = strbuf.read() geturls_text.close() - if (httpheaderout.get("Content-Encoding") == "gzip"): + if(httpheaderout.get("Content-Encoding") == "gzip"): try: returnval_content = zlib.decompress( - returnval_content, 16 + zlib.MAX_WBITS) + returnval_content, 16+zlib.MAX_WBITS) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "deflate"): + elif(httpheaderout.get("Content-Encoding") == "deflate"): try: returnval_content = zlib.decompress(returnval_content) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "br" and havebrotli): + elif(httpheaderout.get("Content-Encoding") == "br" and havebrotli): try: returnval_content = brotli.decompress(returnval_content) except brotli.error: pass - elif (httpheaderout.get("Content-Encoding") == "zstd" and havezstd): + elif(httpheaderout.get("Content-Encoding") == "zstd" and havezstd): try: returnval_content = zstandard.decompress(returnval_content) except zstandard.error: pass - elif ((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): + elif((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): try: returnval_content = lzma.decompress(returnval_content) except zstandard.error: pass - elif (httpheaderout.get("Content-Encoding") == "bzip2"): + elif(httpheaderout.get("Content-Encoding") == "bzip2"): try: returnval_content = bz2.decompress(returnval_content) except zstandard.error: pass - elif (httplibuse == "ftp" or httplibuse == "sftp" or httplibuse == "pysftp"): + elif(httplibuse == "ftp" or httplibuse == "sftp" or httplibuse == "pysftp"): pass else: returnval = False - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': httpheaderout, - 'Version': httpversionout, - 'Method': httpmethodout, - 'HeadersSent': httpheadersentout, - 'URL': httpurlout, - 'Code': httpcodeout, - 'Reason': httpcodereason, - 'HTTPLib': httplibuse} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize, 2, "IEC"), 'SI': get_readable_size( + fulldatasize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason, 'HTTPLib': httplibuse} return returnval -def download_from_url_from_list( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - httplibuse="urllib", - buffersize=524288, - sleep=-1, - timeout=10): - if (isinstance(httpurl, list)): +def download_from_url_from_list(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", buffersize=524288, sleep=-1, timeout=10): + if(isinstance(httpurl, list)): pass - elif (isinstance(httpurl, tuple)): + elif(isinstance(httpurl, tuple)): pass - elif (isinstance(httpurl, dict)): + elif(isinstance(httpurl, dict)): httpurl = httpurl.values() else: httpurl = [httpurl] listsize = len(httpurl) listcount = 0 returnval = [] - while (listcount < listsize): - ouputval = download_from_url( - httpurl[listcount], - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - httplibuse, - buffersize, - sleep, - timeout) + while(listcount < listsize): + ouputval = download_from_url(httpurl[listcount], httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, httplibuse, buffersize, sleep, timeout) returnval.append(ouputval) listcount += 1 return returnval -def download_from_url_file( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - httplibuse="urllib", - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +def download_from_url_file(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, tmpfileprefix, tmpfilesuffix, haveaiohttp, haverequests, havemechanize, havehttplib2, haveurllib3, havehttpx, havehttpcore, haveparamiko, havepysftp exec_time_start = time.time() myhash = hashlib.new("sha1") - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): myhash.update(httpurl) myhash.update(str(buffersize)) myhash.update(str(exec_time_start)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): myhash.update(httpurl.encode('utf-8')) myhash.update(str(buffersize).encode('utf-8')) myhash.update(str(exec_time_start).encode('utf-8')) newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest()) - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (httplibuse == "urllib1" or httplibuse == - "urllib2" or httplibuse == "request"): + if(httplibuse == "urllib1" or httplibuse == "urllib2" or httplibuse == "request"): httplibuse = "urllib" - if (httplibuse == "httplib1"): + if(httplibuse == "httplib1"): httplibuse = "httplib" - if (not haverequests and httplibuse == "requests"): + if(not haverequests and httplibuse == "requests"): httplibuse = "urllib" - if (not haveaiohttp and httplibuse == "aiohttp"): + if(not haveaiohttp and httplibuse == "aiohttp"): httplibuse = "urllib" - if (not havehttpx and httplibuse == "httpx"): + if(not havehttpx and httplibuse == "httpx"): httplibuse = "urllib" - if (not havehttpx and httplibuse == "httpx2"): + if(not havehttpx and httplibuse == "httpx2"): httplibuse = "urllib" - if (not havehttpcore and httplibuse == "httpcore"): + if(not havehttpcore and httplibuse == "httpcore"): httplibuse = "urllib" - if (not havehttpcore and httplibuse == "httpcore2"): + if(not havehttpcore and httplibuse == "httpcore2"): httplibuse = "urllib" - if (not havemechanize and httplibuse == "mechanize"): + if(not havemechanize and httplibuse == "mechanize"): httplibuse = "urllib" - if (not havepycurl and httplibuse == "pycurl"): + if(not havepycurl and httplibuse == "pycurl"): httplibuse = "urllib" - if (not havepycurl and httplibuse == "pycurl2"): + if(not havepycurl and httplibuse == "pycurl2"): httplibuse = "urllib" - if (havepycurl and httplibuse == "pycurl2" and not hasattr( - pycurl, "CURL_HTTP_VERSION_2_0")): + if(havepycurl and httplibuse == "pycurl2" and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): httplibuse = "pycurl" - if (not havepycurl and httplibuse == "pycurl3"): + if(not havepycurl and httplibuse == "pycurl3"): httplibuse = "urllib" - if (havepycurl and httplibuse == "pycurl3" and not hasattr( - pycurl, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): + if(havepycurl and httplibuse == "pycurl3" and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): httplibuse = "pycurl2" - if (havepycurl and httplibuse == "pycurl3" and not hasattr(pycurl, - "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl, - "CURL_HTTP_VERSION_2_0")): + if(havepycurl and httplibuse == "pycurl3" and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): httplibuse = "pycurl" - if (not havehttplib2 and httplibuse == "httplib2"): + if(not havehttplib2 and httplibuse == "httplib2"): httplibuse = "httplib" - if (not haveparamiko and httplibuse == "sftp"): + if(not haveparamiko and httplibuse == "sftp"): httplibuse = "ftp" - if (not haveparamiko and httplibuse == "pysftp"): + if(not haveparamiko and httplibuse == "pysftp"): httplibuse = "ftp" - pretmpfilename = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - httplibuse, - buffersize, - sleep, - timeout) - if (not pretmpfilename): + pretmpfilename = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, httplibuse, buffersize, sleep, timeout) + if(not pretmpfilename): return False with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f: tmpfilename = f.name try: - os.utime( - tmpfilename, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - tmpfilename, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get('Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get('Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass - returnval = { - 'Type': "File", - 'Filename': tmpfilename, - 'Filesize': pretmpfilename.get('Contentsize'), - 'FilesizeAlt': { - 'IEC': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "IEC"), - 'SI': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "SI")}, - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': pretmpfilename.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename.get( + 'Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} f.write(pretmpfilename.get('Content')) f.close() exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to download file.") - returnval.update( - { - 'Filesize': os.path.getsize(tmpfilename), - 'FilesizeAlt': { - 'IEC': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "IEC"), - 'SI': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "SI")}, - 'DownloadTime': float( - exec_time_start - - exec_time_end), - 'DownloadTimeReadable': hms_string( - exec_time_start - - exec_time_end)}) + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to download file.") + returnval.update({'Filesize': os.path.getsize(tmpfilename), 'FilesizeAlt': {'IEC': get_readable_size(os.path.getsize(tmpfilename), 2, "IEC"), 'SI': get_readable_size( + os.path.getsize(tmpfilename), 2, "SI")}, 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)}) return returnval -def download_from_url_file_with_list( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - httplibuse="urllib", - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - if (isinstance(httpurl, list)): +def download_from_url_file_with_list(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + if(isinstance(httpurl, list)): pass - elif (isinstance(httpurl, tuple)): + elif(isinstance(httpurl, tuple)): pass - elif (isinstance(httpurl, dict)): + elif(isinstance(httpurl, dict)): httpurl = httpurl.values() else: httpurl = [httpurl] listsize = len(httpurl) listcount = 0 returnval = [] - while (listcount < listsize): - ouputval = download_from_url_file( - httpurl[listcount], - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - httplibuse, - ranges, - buffersize, - sleep, - timeout) + while(listcount < listsize): + ouputval = download_from_url_file(httpurl[listcount], httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, httplibuse, ranges, buffersize, sleep, timeout) returnval.append(ouputval) listcount += 1 return returnval -def download_from_url_to_file( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - httplibuse="urllib", - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +def download_from_url_to_file(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, haveaiohttp, haverequests, havemechanize, havepycurl, havehttplib2, haveurllib3, havehttpx, havehttpcore, haveparamiko, havepysftp - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (httplibuse == "urllib1" or httplibuse == - "urllib2" or httplibuse == "request"): + if(httplibuse == "urllib1" or httplibuse == "urllib2" or httplibuse == "request"): httplibuse = "urllib" - if (httplibuse == "httplib1"): + if(httplibuse == "httplib1"): httplibuse = "httplib" - if (not haverequests and httplibuse == "requests"): + if(not haverequests and httplibuse == "requests"): httplibuse = "urllib" - if (not haveaiohttp and httplibuse == "aiohttp"): + if(not haveaiohttp and httplibuse == "aiohttp"): httplibuse = "urllib" - if (not havehttpx and httplibuse == "httpx"): + if(not havehttpx and httplibuse == "httpx"): httplibuse = "urllib" - if (not havehttpx and httplibuse == "httpx2"): + if(not havehttpx and httplibuse == "httpx2"): httplibuse = "urllib" - if (not havehttpcore and httplibuse == "httpcore"): + if(not havehttpcore and httplibuse == "httpcore"): httplibuse = "urllib" - if (not havehttpcore and httplibuse == "httpcore2"): + if(not havehttpcore and httplibuse == "httpcore2"): httplibuse = "urllib" - if (not havemechanize and httplibuse == "mechanize"): + if(not havemechanize and httplibuse == "mechanize"): httplibuse = "urllib" - if (not havepycurl and httplibuse == "pycurl"): + if(not havepycurl and httplibuse == "pycurl"): httplibuse = "urllib" - if (not havepycurl and httplibuse == "pycurl2"): + if(not havepycurl and httplibuse == "pycurl2"): httplibuse = "urllib" - if (havepycurl and httplibuse == "pycurl2" and not hasattr( - pycurl, "CURL_HTTP_VERSION_2_0")): + if(havepycurl and httplibuse == "pycurl2" and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): httplibuse = "pycurl" - if (not havepycurl and httplibuse == "pycurl3"): + if(not havepycurl and httplibuse == "pycurl3"): httplibuse = "urllib" - if (havepycurl and httplibuse == "pycurl3" and not hasattr( - pycurl, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): + if(havepycurl and httplibuse == "pycurl3" and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): httplibuse = "pycurl2" - if (havepycurl and httplibuse == "pycurl3" and not hasattr(pycurl, - "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl, - "CURL_HTTP_VERSION_2_0")): + if(havepycurl and httplibuse == "pycurl3" and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): httplibuse = "pycurl" - if (not havehttplib2 and httplibuse == "httplib2"): + if(not havehttplib2 and httplibuse == "httplib2"): httplibuse = "httplib" - if (not haveparamiko and httplibuse == "sftp"): + if(not haveparamiko and httplibuse == "sftp"): httplibuse = "ftp" - if (not havepysftp and httplibuse == "pysftp"): + if(not havepysftp and httplibuse == "pysftp"): httplibuse = "ftp" - if (not outfile == "-"): + if(not outfile == "-"): outpath = outpath.rstrip(os.path.sep) - filepath = os.path.realpath(outpath + os.path.sep + outfile) - if (not os.path.exists(outpath)): + filepath = os.path.realpath(outpath+os.path.sep+outfile) + if(not os.path.exists(outpath)): os.makedirs(outpath) - if (os.path.exists(outpath) and os.path.isfile(outpath)): + if(os.path.exists(outpath) and os.path.isfile(outpath)): return False - if (os.path.exists(filepath) and os.path.isdir(filepath)): + if(os.path.exists(filepath) and os.path.isdir(filepath)): return False - pretmpfilename = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - httplibuse, - ranges, - buffersize[0], - sleep, - timeout) - if (not pretmpfilename): + pretmpfilename = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, httplibuse, ranges, buffersize[0], sleep, timeout) + if(not pretmpfilename): return False tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 - log.info("Moving file " + tmpfilename + " to " + filepath) + log.info("Moving file "+tmpfilename+" to "+filepath) exec_time_start = time.time() shutil.move(tmpfilename, filepath) try: - os.utime( - filepath, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - filepath, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get('Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get('Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to move file.") - if (os.path.exists(tmpfilename)): + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to move file.") + if(os.path.exists(tmpfilename)): os.remove(tmpfilename) - returnval = { - 'Type': "File", - 'Filename': filepath, - 'Filesize': downloadsize, - 'FilesizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} - if (outfile == "-"): - pretmpfilename = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - httplibuse, - ranges, - buffersize[0], - sleep, - timeout) + returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} + if(outfile == "-"): + pretmpfilename = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, httplibuse, ranges, buffersize[0], sleep, timeout) tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 @@ -2636,31 +2005,12 @@ def download_from_url_to_file( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Copying " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Copied " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize f.write(databytes) f.seek(0) @@ -2669,1835 +2019,372 @@ def download_from_url_to_file( ft.close() os.remove(tmpfilename) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to copy file.") - returnval = { - 'Type': "Content", - 'Content': fdata, - 'Contentsize': downloadsize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': httplibuse} + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to copy file.") + returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': httplibuse} return returnval -def download_from_url_to_file_with_list( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - httplibuse="urllib", - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - if (isinstance(httpurl, list)): +def download_from_url_to_file_with_list(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + if(isinstance(httpurl, list)): pass - elif (isinstance(httpurl, tuple)): + elif(isinstance(httpurl, tuple)): pass - elif (isinstance(httpurl, dict)): + elif(isinstance(httpurl, dict)): httpurl = httpurl.values() else: httpurl = [httpurl] listsize = len(httpurl) listcount = 0 returnval = [] - while (listcount < listsize): - ouputval = download_from_url_to_file( - httpurl[listcount], - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - httplibuse, - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) + while(listcount < listsize): + ouputval = download_from_url_to_file(httpurl[listcount], httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, httplibuse, outfile, outpath, ranges, buffersize, sleep, timeout) returnval.append(ouputval) listcount += 1 return returnval -def download_from_url_with_urllib( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "urllib", - buffersize, - sleep, - timeout) +def download_from_url_with_urllib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "urllib", buffersize, sleep, timeout) return returnval -def download_from_url_with_request( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "urllib", - buffersize, - sleep, - timeout) +def download_from_url_with_request(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "urllib", buffersize, sleep, timeout) return returnval -def download_from_url_with_request3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "request3", - buffersize, - sleep, - timeout) +def download_from_url_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "request3", buffersize, sleep, timeout) return returnval -def download_from_url_with_httplib( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "httplib", - buffersize, - sleep, - timeout) +def download_from_url_with_httplib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "httplib", buffersize, sleep, timeout) return returnval -def download_from_url_with_httplib2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "httplib2", - buffersize, - sleep, - timeout) +def download_from_url_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "httplib2", buffersize, sleep, timeout) return returnval -def download_from_url_with_urllib3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "urllib3", - buffersize, - sleep, - timeout) +def download_from_url_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "urllib3", buffersize, sleep, timeout) return returnval -def download_from_url_with_requests( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "requests", - buffersize, - sleep, - timeout) +def download_from_url_with_requests(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "requests", buffersize, sleep, timeout) return returnval -def download_from_url_with_aiohttp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "aiohttp", - buffersize, - sleep, - timeout) +def download_from_url_with_aiohttp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "aiohttp", buffersize, sleep, timeout) return returnval -def download_from_url_with_httpx( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "httpx", - buffersize, - sleep, - timeout) +def download_from_url_with_httpx(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "httpx", buffersize, sleep, timeout) return returnval -def download_from_url_with_httpx2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "httpx2", - buffersize, - sleep, - timeout) +def download_from_url_with_httpx2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "httpx2", buffersize, sleep, timeout) return returnval -def download_from_url_with_httpcore( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "httpcore", - buffersize, - sleep, - timeout) +def download_from_url_with_httpcore(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "httpcore", buffersize, sleep, timeout) return returnval -def download_from_url_with_httpcore2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "httpcore2", - buffersize, - sleep, - timeout) +def download_from_url_with_httpcore2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "httpcore2", buffersize, sleep, timeout) return returnval -def download_from_url_with_mechanize( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "mechanize", - buffersize, - sleep, - timeout) +def download_from_url_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "mechanize", buffersize, sleep, timeout) return returnval -def download_from_url_with_pycurl( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "pycurl", - buffersize, - sleep, - timeout) +def download_from_url_with_pycurl(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "pycurl", buffersize, sleep, timeout) return returnval -def download_from_url_with_pycurl2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "pycurl2", - buffersize, - sleep, - timeout) +def download_from_url_with_pycurl2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "pycurl2", buffersize, sleep, timeout) return returnval -def download_from_url_with_pycurl3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "pycurl3", - buffersize, - sleep, - timeout) +def download_from_url_with_pycurl3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "pycurl3", buffersize, sleep, timeout) return returnval -def download_from_url_with_ftp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "ftp", - buffersize, - sleep, - timeout) +def download_from_url_with_ftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "ftp", buffersize, sleep, timeout) return returnval -def download_from_url_with_sftp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "sftp", - buffersize, - sleep, - timeout) +def download_from_url_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "sftp", buffersize, sleep, timeout) return returnval -def download_from_url_with_pysftp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "pysftp", - buffersize, - sleep, - timeout) +def download_from_url_with_pysftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "pysftp", buffersize, sleep, timeout) return returnval -def download_from_url_file_with_urllib( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "urllib", - ranges, - buffersize, - sleep, - timeout) +def download_from_url_file_with_urllib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "urllib", ranges, buffersize, sleep, timeout) return returnval -def download_from_url_file_with_request( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "urllib", - ranges, - buffersize, - sleep, - timeout) +def download_from_url_file_with_request(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "urllib", ranges, buffersize, sleep, timeout) return returnval -def download_from_url_file_with_request3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "request3", - ranges, - buffersize, - sleep, - timeout) +def download_from_url_file_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "request3", ranges, buffersize, sleep, timeout) return returnval -def download_from_url_file_with_httplib( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "httplib", - ranges, - buffersize, - sleep, - timeout) +def download_from_url_file_with_httplib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "httplib", ranges, buffersize, sleep, timeout) return returnval -def download_from_url_file_with_httplib2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "httplib2", - ranges, - buffersize, - sleep, - timeout) +def download_from_url_file_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "httplib2", ranges, buffersize, sleep, timeout) return returnval -def download_from_url_file_with_urllib3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "urllib3", - ranges, - buffersize, - sleep, - timeout) +def download_from_url_file_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "urllib3", ranges, buffersize, sleep, timeout) return returnval -def download_from_url_file_with_requests( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "requests", - ranges, - buffersize, - sleep, - timeout) +def download_from_url_file_with_requests(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "requests", ranges, buffersize, sleep, timeout) return returnval -def download_from_url_file_with_aiohttp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "aiohttp", - ranges, - buffersize, - sleep, - timeout) +def download_from_url_file_with_aiohttp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "aiohttp", ranges, buffersize, sleep, timeout) return returnval -def download_from_url_file_with_httpx( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "httpx", - ranges, - buffersize, - sleep, - timeout) +def download_from_url_file_with_httpx(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "httpx", ranges, buffersize, sleep, timeout) return returnval -def download_from_url_file_with_httpx2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "httpx2", - ranges, - buffersize, - sleep, - timeout) +def download_from_url_file_with_httpx2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "httpx2", ranges, buffersize, sleep, timeout) return returnval -def download_from_url_file_with_httpcore( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "httpcore", - ranges, - buffersize, - sleep, - timeout) +def download_from_url_file_with_httpcore(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "httpcore", ranges, buffersize, sleep, timeout) return returnval -def download_from_url_file_with_httpcore2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "httpcore2", - ranges, - buffersize, - sleep, - timeout) +def download_from_url_file_with_httpcore2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "httpcore2", ranges, buffersize, sleep, timeout) return returnval -def download_from_url_file_with_mechanize( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "mechanize", - ranges, - buffersize, - sleep, - timeout) +def download_from_url_file_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "mechanize", ranges, buffersize, sleep, timeout) return returnval -def download_from_url_file_with_pycurl( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "pycurl", - ranges, - buffersize, - sleep, - timeout) +def download_from_url_file_with_pycurl(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "pycurl", ranges, buffersize, sleep, timeout) return returnval -def download_from_url_file_with_pycurl2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "pycurl2", - ranges, - buffersize, - sleep, - timeout) +def download_from_url_file_with_pycurl2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "pycurl2", ranges, buffersize, sleep, timeout) return returnval -def download_from_url_file_with_pycurl3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "pycurl3", - ranges, - buffersize, - sleep, - timeout) +def download_from_url_file_with_pycurl3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "pycurl3", ranges, buffersize, sleep, timeout) return returnval -def download_from_url_file_with_ftp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "ftp", - ranges, - buffersize, - sleep, - timeout) +def download_from_url_file_with_ftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "ftp", ranges, buffersize, sleep, timeout) return returnval -def download_from_url_file_with_sftp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "sftp", - ranges, - buffersize, - sleep, - timeout) +def download_from_url_file_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "sftp", ranges, buffersize, sleep, timeout) return returnval -def download_from_url_file_with_pysftp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - returnval = download_from_url_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "pysftp", - ranges, - buffersize, - sleep, - timeout) +def download_from_url_file_with_pysftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, "pysftp", ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_urllib( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - returnval = download_from_url_to_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "urllib", - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) +def download_from_url_to_file_with_urllib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, + httpmethod, postdata, "urllib", outfile, outpath, ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_request( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - returnval = download_from_url_to_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "request", - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) +def download_from_url_to_file_with_request(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, + httpmethod, postdata, "request", outfile, outpath, ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_request3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - returnval = download_from_url_to_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "urllib", - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) +def download_from_url_to_file_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, + httpmethod, postdata, "urllib", outfile, outpath, ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_httplib( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - returnval = download_from_url_to_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "httplib", - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) +def download_from_url_to_file_with_httplib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, + httpmethod, postdata, "httplib", outfile, outpath, ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_httplib2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - returnval = download_from_url_to_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "httplib2", - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) +def download_from_url_to_file_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, + httpmethod, postdata, "httplib2", outfile, outpath, ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_urllib3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - returnval = download_from_url_to_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "urllib3", - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) +def download_from_url_to_file_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, + httpmethod, postdata, "urllib3", outfile, outpath, ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_requests( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - returnval = download_from_url_to_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "requests", - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) +def download_from_url_to_file_with_requests(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, + httpmethod, postdata, "requests", outfile, outpath, ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_aiohttp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - returnval = download_from_url_to_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "aiohttp", - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) +def download_from_url_to_file_with_aiohttp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, + httpmethod, postdata, "aiohttp", outfile, outpath, ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_httpx( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - returnval = download_from_url_to_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "httpx", - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) +def download_from_url_to_file_with_httpx(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, + httpmethod, postdata, "httpx", outfile, outpath, ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_httpx2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - returnval = download_from_url_to_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "httpx2", - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) +def download_from_url_to_file_with_httpx2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, + httpmethod, postdata, "httpx2", outfile, outpath, ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_httpcore( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - returnval = download_from_url_to_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "httpcore", - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) +def download_from_url_to_file_with_httpcore(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, + httpmethod, postdata, "httpcore", outfile, outpath, ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_httpcore2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - returnval = download_from_url_to_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "httpcore2", - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) +def download_from_url_to_file_with_httpcore2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, + httpmethod, postdata, "httpcore2", outfile, outpath, ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_mechanize( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - returnval = download_from_url_to_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "mechanize", - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) +def download_from_url_to_file_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, + httpmethod, postdata, "mechanize", outfile, outpath, ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_pycurl( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - returnval = download_from_url_to_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "pycurl", - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) +def download_from_url_to_file_with_pycurl(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, + httpmethod, postdata, "pycurl", outfile, outpath, ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_pycurl2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - returnval = download_from_url_to_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "pycurl2", - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) +def download_from_url_to_file_with_pycurl2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, + httpmethod, postdata, "pycurl2", outfile, outpath, ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_pycurl3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - returnval = download_from_url_to_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "pycurl3", - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) +def download_from_url_to_file_with_pycurl3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, + httpmethod, postdata, "pycurl3", outfile, outpath, ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_ftp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - returnval = download_from_url_to_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "ftp", - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) +def download_from_url_to_file_with_ftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, + httpmethod, postdata, "ftp", outfile, outpath, ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_sftp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - returnval = download_from_url_to_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "sftp", - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) +def download_from_url_to_file_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, + httpmethod, postdata, "sftp", outfile, outpath, ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_pysftp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - returnval = download_from_url_to_file( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - "pysftp", - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) +def download_from_url_to_file_with_pysftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, + httpmethod, postdata, "pysftp", outfile, outpath, ranges, buffersize, sleep, timeout) return returnval @@ -4505,41 +2392,41 @@ def download_file_from_ftp_file(url): urlparts = urlparse.urlparse(url) file_name = os.path.basename(urlparts.path) file_dir = os.path.dirname(urlparts.path) - if (urlparts.username is not None): + if(urlparts.username is not None): ftp_username = urlparts.username else: ftp_username = "anonymous" - if (urlparts.password is not None): + if(urlparts.password is not None): ftp_password = urlparts.password - elif (urlparts.password is None and urlparts.username == "anonymous"): + elif(urlparts.password is None and urlparts.username == "anonymous"): ftp_password = "anonymous" else: ftp_password = "" - if (urlparts.scheme == "ftp"): + if(urlparts.scheme == "ftp"): ftp = FTP() - elif (urlparts.scheme == "ftps"): + elif(urlparts.scheme == "ftps"): ftp = FTP_TLS() else: return False - if (urlparts.scheme == "http" or urlparts.scheme == "https"): + if(urlparts.scheme == "http" or urlparts.scheme == "https"): return False ftp_port = urlparts.port - if (urlparts.port is None): + if(urlparts.port is None): ftp_port = 21 try: ftp.connect(urlparts.hostname, ftp_port) except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False ftp.login(urlparts.username, urlparts.password) - if (urlparts.scheme == "ftps"): + if(urlparts.scheme == "ftps"): ftp.prot_p() ftpfile = BytesIO() - ftp.retrbinary("RETR " + urlparts.path, ftpfile.write) - # ftp.storbinary("STOR "+urlparts.path, ftpfile.write); + ftp.retrbinary("RETR "+urlparts.path, ftpfile.write) + #ftp.storbinary("STOR "+urlparts.path, ftpfile.write); ftp.close() ftpfile.seek(0, 0) return ftpfile @@ -4554,39 +2441,39 @@ def upload_file_to_ftp_file(ftpfile, url): urlparts = urlparse.urlparse(url) file_name = os.path.basename(urlparts.path) file_dir = os.path.dirname(urlparts.path) - if (urlparts.username is not None): + if(urlparts.username is not None): ftp_username = urlparts.username else: ftp_username = "anonymous" - if (urlparts.password is not None): + if(urlparts.password is not None): ftp_password = urlparts.password - elif (urlparts.password is None and urlparts.username == "anonymous"): + elif(urlparts.password is None and urlparts.username == "anonymous"): ftp_password = "anonymous" else: ftp_password = "" - if (urlparts.scheme == "ftp"): + if(urlparts.scheme == "ftp"): ftp = FTP() - elif (urlparts.scheme == "ftps"): + elif(urlparts.scheme == "ftps"): ftp = FTP_TLS() else: return False - if (urlparts.scheme == "http" or urlparts.scheme == "https"): + if(urlparts.scheme == "http" or urlparts.scheme == "https"): return False ftp_port = urlparts.port - if (urlparts.port is None): + if(urlparts.port is None): ftp_port = 21 try: ftp.connect(urlparts.hostname, ftp_port) except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False ftp.login(urlparts.username, urlparts.password) - if (urlparts.scheme == "ftps"): + if(urlparts.scheme == "ftps"): ftp.prot_p() - ftp.storbinary("STOR " + urlparts.path, ftpfile) + ftp.storbinary("STOR "+urlparts.path, ftpfile) ftp.close() ftpfile.seek(0, 0) return ftpfile @@ -4599,29 +2486,29 @@ def upload_file_to_ftp_string(ftpstring, url): return ftpfile -if (haveparamiko): +if(haveparamiko): def download_file_from_sftp_file(url): urlparts = urlparse.urlparse(url) file_name = os.path.basename(urlparts.path) file_dir = os.path.dirname(urlparts.path) - if (urlparts.scheme == "http" or urlparts.scheme == "https"): + if(urlparts.scheme == "http" or urlparts.scheme == "https"): return False sftp_port = urlparts.port - if (urlparts.port is None): + if(urlparts.port is None): sftp_port = 22 else: sftp_port = urlparts.port - if (urlparts.username is not None): + if(urlparts.username is not None): sftp_username = urlparts.username else: sftp_username = "anonymous" - if (urlparts.password is not None): + if(urlparts.password is not None): sftp_password = urlparts.password - elif (urlparts.password is None and urlparts.username == "anonymous"): + elif(urlparts.password is None and urlparts.username == "anonymous"): sftp_password = "anonymous" else: sftp_password = "" - if (urlparts.scheme != "sftp"): + if(urlparts.scheme != "sftp"): return False ssh = paramiko.SSHClient() ssh.load_system_host_keys() @@ -4632,10 +2519,10 @@ def download_file_from_sftp_file(url): except paramiko.ssh_exception.SSHException: return False except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False sftp = ssh.open_sftp() sftpfile = BytesIO() @@ -4648,7 +2535,7 @@ def download_file_from_sftp_file(url): def download_file_from_sftp_file(url): return False -if (haveparamiko): +if(haveparamiko): def download_file_from_sftp_string(url): sftpfile = download_file_from_sftp_file(url) return sftpfile.read() @@ -4656,29 +2543,29 @@ def download_file_from_sftp_string(url): def download_file_from_ftp_string(url): return False -if (haveparamiko): +if(haveparamiko): def upload_file_to_sftp_file(sftpfile, url): urlparts = urlparse.urlparse(url) file_name = os.path.basename(urlparts.path) file_dir = os.path.dirname(urlparts.path) sftp_port = urlparts.port - if (urlparts.scheme == "http" or urlparts.scheme == "https"): + if(urlparts.scheme == "http" or urlparts.scheme == "https"): return False - if (urlparts.port is None): + if(urlparts.port is None): sftp_port = 22 else: sftp_port = urlparts.port - if (urlparts.username is not None): + if(urlparts.username is not None): sftp_username = urlparts.username else: sftp_username = "anonymous" - if (urlparts.password is not None): + if(urlparts.password is not None): sftp_password = urlparts.password - elif (urlparts.password is None and urlparts.username == "anonymous"): + elif(urlparts.password is None and urlparts.username == "anonymous"): sftp_password = "anonymous" else: sftp_password = "" - if (urlparts.scheme != "sftp"): + if(urlparts.scheme != "sftp"): return False ssh = paramiko.SSHClient() ssh.load_system_host_keys() @@ -4689,10 +2576,10 @@ def upload_file_to_sftp_file(sftpfile, url): except paramiko.ssh_exception.SSHException: return False except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False sftp = ssh.open_sftp() sftp.putfo(sftpfile, urlparts.path) @@ -4704,7 +2591,7 @@ def upload_file_to_sftp_file(sftpfile, url): def upload_file_to_sftp_file(sftpfile, url): return False -if (haveparamiko): +if(haveparamiko): def upload_file_to_sftp_string(sftpstring, url): sftpfileo = BytesIO(sftpstring) sftpfile = upload_file_to_sftp_files(ftpfileo, url) @@ -4715,43 +2602,40 @@ def upload_file_to_sftp_string(url): return False -if (havepysftp): +if(havepysftp): def download_file_from_pysftp_file(url): urlparts = urlparse.urlparse(url) file_name = os.path.basename(urlparts.path) file_dir = os.path.dirname(urlparts.path) - if (urlparts.scheme == "http" or urlparts.scheme == "https"): + if(urlparts.scheme == "http" or urlparts.scheme == "https"): return False sftp_port = urlparts.port - if (urlparts.port is None): + if(urlparts.port is None): sftp_port = 22 else: sftp_port = urlparts.port - if (urlparts.username is not None): + if(urlparts.username is not None): sftp_username = urlparts.username else: sftp_username = "anonymous" - if (urlparts.password is not None): + if(urlparts.password is not None): sftp_password = urlparts.password - elif (urlparts.password is None and urlparts.username == "anonymous"): + elif(urlparts.password is None and urlparts.username == "anonymous"): sftp_password = "anonymous" else: sftp_password = "" - if (urlparts.scheme != "sftp"): + if(urlparts.scheme != "sftp"): return False try: - pysftp.Connection( - urlparts.hostname, - port=sftp_port, - username=urlparts.username, - password=urlparts.password) + pysftp.Connection(urlparts.hostname, port=sftp_port, + username=urlparts.username, password=urlparts.password) except paramiko.ssh_exception.SSHException: return False except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False sftp = ssh.open_sftp() sftpfile = BytesIO() @@ -4764,7 +2648,7 @@ def download_file_from_pysftp_file(url): def download_file_from_pysftp_file(url): return False -if (havepysftp): +if(havepysftp): def download_file_from_pysftp_string(url): sftpfile = download_file_from_pysftp_file(url) return sftpfile.read() @@ -4772,43 +2656,40 @@ def download_file_from_pysftp_string(url): def download_file_from_ftp_string(url): return False -if (havepysftp): +if(havepysftp): def upload_file_to_pysftp_file(sftpfile, url): urlparts = urlparse.urlparse(url) file_name = os.path.basename(urlparts.path) file_dir = os.path.dirname(urlparts.path) sftp_port = urlparts.port - if (urlparts.scheme == "http" or urlparts.scheme == "https"): + if(urlparts.scheme == "http" or urlparts.scheme == "https"): return False - if (urlparts.port is None): + if(urlparts.port is None): sftp_port = 22 else: sftp_port = urlparts.port - if (urlparts.username is not None): + if(urlparts.username is not None): sftp_username = urlparts.username else: sftp_username = "anonymous" - if (urlparts.password is not None): + if(urlparts.password is not None): sftp_password = urlparts.password - elif (urlparts.password is None and urlparts.username == "anonymous"): + elif(urlparts.password is None and urlparts.username == "anonymous"): sftp_password = "anonymous" else: sftp_password = "" - if (urlparts.scheme != "sftp"): + if(urlparts.scheme != "sftp"): return False try: - pysftp.Connection( - urlparts.hostname, - port=sftp_port, - username=urlparts.username, - password=urlparts.password) + pysftp.Connection(urlparts.hostname, port=sftp_port, + username=urlparts.username, password=urlparts.password) except paramiko.ssh_exception.SSHException: return False except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False sftp = ssh.open_sftp() sftp.putfo(sftpfile, urlparts.path) @@ -4820,7 +2701,7 @@ def upload_file_to_pysftp_file(sftpfile, url): def upload_file_to_pysftp_file(sftpfile, url): return False -if (havepysftp): +if(havepysftp): def upload_file_to_pysftp_string(sftpstring, url): sftpfileo = BytesIO(sftpstring) sftpfile = upload_file_to_pysftp_files(ftpfileo, url) diff --git a/pywwwgetold-dl.py b/pywwwgetold-dl.py index 124b536..807eff8 100644 --- a/pywwwgetold-dl.py +++ b/pywwwgetold-dl.py @@ -15,15 +15,13 @@ $FileInfo: pywwwget-dl.py - Last Update: 10/5/2023 Ver. 2.0.2 RC 1 - Author: cooldude2k $ ''' -from __future__ import absolute_import, division, print_function - -import argparse -import logging as log -import os +from __future__ import division, absolute_import, print_function import re +import os import sys - import pywwwgetold +import argparse +import logging as log __project__ = pywwwget.__project__ __program_name__ = pywwwget.__program_name__ @@ -67,17 +65,12 @@ geturls_download_sleep = pywwwget.geturls_download_sleep parser = argparse.ArgumentParser( - description="Python libary/module to download files.", - conflict_handler="resolve", - add_help=True) + description="Python libary/module to download files.", conflict_handler="resolve", add_help=True) parser.add_argument("url", help="enter a url") parser.add_argument("-V", "--version", action="version", - version=__program_name__ + " " + __version__) -parser.add_argument( - "-u", - "--update", - action="store_true", - help="update this program to latest version. Make sure that you have sufficient permissions (run with sudo if needed)") + version=__program_name__+" "+__version__) +parser.add_argument("-u", "--update", action="store_true", + help="update this program to latest version. Make sure that you have sufficient permissions (run with sudo if needed)") parser.add_argument("-d", "--dump-user-agent", action="store_true", help="display the current browser identification") parser.add_argument("-u", "--user-agent", default=geturls_ua_firefox_windows7, @@ -88,17 +81,10 @@ help="specify a file name for output") parser.add_argument("-o", "--output-directory", default=os.path.realpath( os.getcwd()), help="specify a directory to output file to") -parser.add_argument( - "-l", - "--use-httplib", - default="urllib", - help="select library to download file can be urllib or requests or mechanize") -parser.add_argument( - "-b", - "--set-buffersize", - default=524288, - type=int, - help="set how big buffersize is in bytes. how much it will download") +parser.add_argument("-l", "--use-httplib", default="urllib", + help="select library to download file can be urllib or requests or mechanize") +parser.add_argument("-b", "--set-buffersize", default=524288, type=int, + help="set how big buffersize is in bytes. how much it will download") parser.add_argument("-t", "--timeout", default=10, type=int, help="set timeout time for http request") parser.add_argument("-s", "--sleep", default=10, type=int, @@ -107,74 +93,32 @@ help="print various debugging information") getargs = parser.parse_args() -if (not pywwwget.check_httplib_support(getargs.use_httplib)): +if(not pywwwget.check_httplib_support(getargs.use_httplib)): getargs.use_httplib = "urllib" getargs_cj = geturls_cj -getargs_headers = { - 'Referer': getargs.referer, - 'User-Agent': getargs.user_agent, - 'Accept-Encoding': "gzip, deflate", - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close"} +getargs_headers = {'Referer': getargs.referer, 'User-Agent': getargs.user_agent, 'Accept-Encoding': "gzip, deflate", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", + 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"} getargs.output_directory = os.path.realpath(getargs.output_directory) -if (getargs.verbose): +if(getargs.verbose == True): log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG) -if (getargs.dump_user_agent): +if(getargs.dump_user_agent == True): print(getargs.user_agent) sys.exit() -if (getargs.output_document == "-"): - if (sys.version[0] == "2"): - precontstr = pywwwget.download_from_url_to_file( - getargs.url, - getargs_headers, - getargs.user_agent, - getargs.referer, - geturls_cj, - httplibuse=getargs.use_httplib, - buffersize=[ - getargs.set_buffersize, - getargs.set_buffersize], - outfile=getargs.output_document, - outpath=os.getcwd(), - sleep=getargs.sleep, - timeout=getargs.timeout) +if(getargs.output_document == "-"): + if(sys.version[0] == "2"): + precontstr = pywwwget.download_from_url_to_file(getargs.url, getargs_headers, getargs.user_agent, getargs.referer, geturls_cj, httplibuse=getargs.use_httplib, buffersize=[ + getargs.set_buffersize, getargs.set_buffersize], outfile=getargs.output_document, outpath=os.getcwd(), sleep=getargs.sleep, timeout=getargs.timeout) print(precontstr['Content']) - if (sys.version[0] >= "3"): - precontstr = pywwwget.download_from_url_to_file( - getargs.url, - getargs_headers, - getargs.user_agent, - getargs.referer, - geturls_cj, - httplibuse=getargs.use_httplib, - buffersize=[ - getargs.set_buffersize, - getargs.set_buffersize], - outfile=getargs.output_document, - outpath=os.getcwd(), - sleep=getargs.sleep, - timeout=getargs.timeout) + if(sys.version[0] >= "3"): + precontstr = pywwwget.download_from_url_to_file(getargs.url, getargs_headers, getargs.user_agent, getargs.referer, geturls_cj, httplibuse=getargs.use_httplib, buffersize=[ + getargs.set_buffersize, getargs.set_buffersize], outfile=getargs.output_document, outpath=os.getcwd(), sleep=getargs.sleep, timeout=getargs.timeout) print(precontstr['Content'].decode('ascii', 'replace')) -if (getargs.output_document != "-"): - pywwwget.download_from_url_to_file( - getargs.url, - getargs_headers, - getargs.user_agent, - getargs.referer, - geturls_cj, - httplibuse=getargs.use_httplib, - buffersize=[ - getargs.set_buffersize, - getargs.set_buffersize], - outfile=getargs.output_document, - outpath=getargs.output_directory, - sleep=getargs.sleep, - timeout=getargs.timeout) +if(getargs.output_document != "-"): + pywwwget.download_from_url_to_file(getargs.url, getargs_headers, getargs.user_agent, getargs.referer, geturls_cj, httplibuse=getargs.use_httplib, buffersize=[ + getargs.set_buffersize, getargs.set_buffersize], outfile=getargs.output_document, outpath=getargs.output_directory, sleep=getargs.sleep, timeout=getargs.timeout) diff --git a/pywwwgetold.py b/pywwwgetold.py index 6c39772..6539d5b 100755 --- a/pywwwgetold.py +++ b/pywwwgetold.py @@ -15,28 +15,26 @@ $FileInfo: pywwwgetold.py - Last Update: 10/5/2023 Ver. 2.0.2 RC 1 - Author: cooldude2k $ ''' -from __future__ import absolute_import, division, print_function - -import argparse -import bz2 -import datetime -import email.utils -import hashlib -import logging as log -import os -import platform +from __future__ import division, absolute_import, print_function import re -import shutil -import socket -import subprocess +import os import sys +import hashlib +import shutil +import platform import tempfile -import time import urllib import zlib -from base64 import b64encode +import time +import argparse +import subprocess +import socket +import email.utils +import datetime +import time +import logging as log from ftplib import FTP, FTP_TLS - +from base64 import b64encode try: from cgi import parse_qsl except ImportError: @@ -128,9 +126,9 @@ havelzma = True except ImportError: havelzma = False -if (sys.version[0] == "2"): +if(sys.version[0] == "2"): try: - from io import BytesIO, StringIO + from io import StringIO, BytesIO except ImportError: try: from cStringIO import StringIO @@ -139,26 +137,22 @@ from StringIO import StringIO from StringIO import StringIO as BytesIO # From http://python-future.org/compatible_idioms.html + from urlparse import urlparse, urlunparse, urlsplit, urlunsplit, urljoin from urllib import urlencode from urllib import urlopen as urlopenalt - - import cookielib + from urllib2 import urlopen, Request, install_opener, HTTPError, URLError, build_opener, HTTPCookieProcessor import urlparse + import cookielib from httplib import HTTPConnection, HTTPSConnection - from urllib2 import (HTTPCookieProcessor, HTTPError, Request, URLError, - build_opener, install_opener, urlopen) - from urlparse import urljoin, urlparse, urlsplit, urlunparse, urlunsplit -if (sys.version[0] >= "3"): - import http.cookiejar as cookielib +if(sys.version[0] >= "3"): + from io import StringIO, BytesIO + # From http://python-future.org/compatible_idioms.html + from urllib.parse import urlparse, urlunparse, urlsplit, urlunsplit, urljoin, urlencode + from urllib.request import urlopen, Request, install_opener, build_opener, HTTPCookieProcessor + from urllib.error import HTTPError, URLError import urllib.parse as urlparse + import http.cookiejar as cookielib from http.client import HTTPConnection, HTTPSConnection - from io import BytesIO, StringIO - from urllib.error import HTTPError, URLError - # From http://python-future.org/compatible_idioms.html - from urllib.parse import (urlencode, urljoin, urlparse, urlsplit, - urlunparse, urlunsplit) - from urllib.request import (HTTPCookieProcessor, Request, build_opener, - install_opener, urlopen) __program_name__ = "PyWWW-Get" __program_alt_name__ = "PyWWWGet" @@ -167,53 +161,50 @@ __project_url__ = "https://github.com/GameMaker2k/PyWWW-Get" __version_info__ = (2, 0, 2, "RC 1", 1) __version_date_info__ = (2023, 10, 5, "RC 1", 1) -__version_date__ = str(__version_date_info__[0]) + "." + str(__version_date_info__[ - 1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2) +__version_date__ = str(__version_date_info__[0])+"."+str(__version_date_info__[ + 1]).zfill(2)+"."+str(__version_date_info__[2]).zfill(2) __revision__ = __version_info__[3] __revision_id__ = "$Id$" -if (__version_info__[4] is not None): +if(__version_info__[4] is not None): __version_date_plusrc__ = __version_date__ + \ - "-" + str(__version_date_info__[4]) -if (__version_info__[4] is None): + "-"+str(__version_date_info__[4]) +if(__version_info__[4] is None): __version_date_plusrc__ = __version_date__ -if (__version_info__[3] is not None): - __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str( - __version_info__[2]) + " " + str(__version_info__[3]) -if (__version_info__[3] is None): - __version__ = str(__version_info__[ - 0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2]) +if(__version_info__[3] is not None): + __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str( + __version_info__[2])+" "+str(__version_info__[3]) +if(__version_info__[3] is None): + __version__ = str( + __version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]) tmpfileprefix = "py" + \ - str(sys.version_info[0]) + __program_small_name__ + \ - str(__version_info__[0]) + "-" + str(sys.version_info[0])+__program_small_name__ + \ + str(__version_info__[0])+"-" tmpfilesuffix = "-" pytempdir = tempfile.gettempdir() PyBitness = platform.architecture() -if (PyBitness == "32bit" or PyBitness == "32"): +if(PyBitness == "32bit" or PyBitness == "32"): PyBitness = "32" -elif (PyBitness == "64bit" or PyBitness == "64"): +elif(PyBitness == "64bit" or PyBitness == "64"): PyBitness = "64" else: PyBitness = "32" compression_supported_list = ['identity', 'gzip', 'deflate', 'bzip2'] -if (havebrotli): +if(havebrotli): compression_supported_list.append('br') -if (havezstd): +if(havezstd): compression_supported_list.append('zstd') -if (havelzma): +if(havelzma): compression_supported_list.append('lzma') compression_supported_list.append('xz') compression_supported = ', '.join(compression_supported_list) geturls_cj = cookielib.CookieJar() windowsNT4_ua_string = "Windows NT 4.0" -windowsNT4_ua_addon = { - 'SEC-CH-UA-PLATFORM': "Windows", - 'SEC-CH-UA-ARCH': "x86", - 'SEC-CH-UA-BITNESS': "32", - 'SEC-CH-UA-PLATFORM': "4.0.0"} +windowsNT4_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", + 'SEC-CH-UA-BITNESS': "32", 'SEC-CH-UA-PLATFORM': "4.0.0"} windows2k_ua_string = "Windows NT 5.0" windows2k_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "32", 'SEC-CH-UA-PLATFORM': "5.0.0"} @@ -221,11 +212,8 @@ windowsXP_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "32", 'SEC-CH-UA-PLATFORM': "5.1.0"} windowsXP64_ua_string = "Windows NT 5.2; Win64; x64" -windowsXP64_ua_addon = { - 'SEC-CH-UA-PLATFORM': "Windows", - 'SEC-CH-UA-ARCH': "x86", - 'SEC-CH-UA-BITNESS': "64", - 'SEC-CH-UA-PLATFORM': "5.1.0"} +windowsXP64_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", + 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "5.1.0"} windows7_ua_string = "Windows NT 6.1; Win64; x64" windows7_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "6.1.0"} @@ -236,230 +224,110 @@ windows81_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "6.3.0"} windows10_ua_string = "Windows NT 10.0; Win64; x64" -windows10_ua_addon = { - 'SEC-CH-UA-PLATFORM': "Windows", - 'SEC-CH-UA-ARCH': "x86", - 'SEC-CH-UA-BITNESS': "64", - 'SEC-CH-UA-PLATFORM': "10.0.0"} +windows10_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", + 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "10.0.0"} windows11_ua_string = "Windows NT 11.0; Win64; x64" -windows11_ua_addon = { - 'SEC-CH-UA-PLATFORM': "Windows", - 'SEC-CH-UA-ARCH': "x86", - 'SEC-CH-UA-BITNESS': "64", - 'SEC-CH-UA-PLATFORM': "11.0.0"} -geturls_ua_firefox_windows7 = "Mozilla/5.0 (" + windows7_ua_string + \ +windows11_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", + 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "11.0.0"} +geturls_ua_firefox_windows7 = "Mozilla/5.0 ("+windows7_ua_string + \ "; rv:109.0) Gecko/20100101 Firefox/117.0" -geturls_ua_seamonkey_windows7 = "Mozilla/5.0 (" + windows7_ua_string + \ +geturls_ua_seamonkey_windows7 = "Mozilla/5.0 ("+windows7_ua_string + \ "; rv:91.0) Gecko/20100101 Firefox/91.0 SeaMonkey/2.53.17" -geturls_ua_chrome_windows7 = "Mozilla/5.0 (" + windows7_ua_string + \ +geturls_ua_chrome_windows7 = "Mozilla/5.0 ("+windows7_ua_string + \ ") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36" -geturls_ua_chromium_windows7 = "Mozilla/5.0 (" + windows7_ua_string + \ +geturls_ua_chromium_windows7 = "Mozilla/5.0 ("+windows7_ua_string + \ ") AppleWebKit/537.36 (KHTML, like Gecko) Chromium/117.0.0.0 Chrome/117.0.0.0 Safari/537.36" -geturls_ua_palemoon_windows7 = "Mozilla/5.0 (" + windows7_ua_string + \ +geturls_ua_palemoon_windows7 = "Mozilla/5.0 ("+windows7_ua_string + \ "; rv:102.0) Gecko/20100101 Goanna/6.3 Firefox/102.0 PaleMoon/32.4.0.1" -geturls_ua_opera_windows7 = "Mozilla/5.0 (" + windows7_ua_string + \ +geturls_ua_opera_windows7 = "Mozilla/5.0 ("+windows7_ua_string + \ ") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36 OPR/102.0.0.0" -geturls_ua_vivaldi_windows7 = "Mozilla/5.0 (" + windows7_ua_string + \ +geturls_ua_vivaldi_windows7 = "Mozilla/5.0 ("+windows7_ua_string + \ ") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36 Vivaldi/6.2.3105.48" geturls_ua_internet_explorer_windows7 = "Mozilla/5.0 (" + \ - windows7_ua_string + "; Trident/7.0; rv:11.0) like Gecko" -geturls_ua_microsoft_edge_windows7 = "Mozilla/5.0 (" + windows7_ua_string + \ + windows7_ua_string+"; Trident/7.0; rv:11.0) like Gecko" +geturls_ua_microsoft_edge_windows7 = "Mozilla/5.0 ("+windows7_ua_string + \ ") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36 Edg/117.0.2045.31" geturls_ua_pywwwget_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prourl})".format( proname=__project__, prover=__version__, prourl=__project_url__) -if (platform.python_implementation() != ""): +if(platform.python_implementation() != ""): py_implementation = platform.python_implementation() -if (platform.python_implementation() == ""): +if(platform.python_implementation() == ""): py_implementation = "Python" -geturls_ua_pywwwget_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format( - osver=platform.system() + - " " + - platform.release(), - archtype=platform.machine(), - prourl=__project_url__, - pyimp=py_implementation, - pyver=platform.python_version(), - proname=__project__, - prover=__version__) +geturls_ua_pywwwget_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver=platform.system( +)+" "+platform.release(), archtype=platform.machine(), prourl=__project_url__, pyimp=py_implementation, pyver=platform.python_version(), proname=__project__, prover=__version__) geturls_ua_googlebot_google = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)" geturls_ua_googlebot_google_old = "Googlebot/2.1 (+http://www.google.com/bot.html)" geturls_ua = geturls_ua_firefox_windows7 -geturls_headers_firefox_windows7 = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_firefox_windows7, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close"} -geturls_headers_seamonkey_windows7 = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_seamonkey_windows7, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close"} -geturls_headers_chrome_windows7 = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_chrome_windows7, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close", - 'SEC-CH-UA': "\"Google Chrome\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Chromium\";v=\"117\"", - 'SEC-CH-UA-FULL-VERSION': "117.0.5938.63"} +geturls_headers_firefox_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_firefox_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", + 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"} +geturls_headers_seamonkey_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_seamonkey_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", + 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"} +geturls_headers_chrome_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_chrome_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", + 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Google Chrome\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Chromium\";v=\"117\"", 'SEC-CH-UA-FULL-VERSION': "117.0.5938.63"} geturls_headers_chrome_windows7.update(windows7_ua_addon) -geturls_headers_chromium_windows7 = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_chromium_windows7, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close", - 'SEC-CH-UA': "\"Chromium\";v=\"117\", \"Not;A=Brand\";v=\"24\"", - 'SEC-CH-UA-FULL-VERSION': "117.0.5938.63"} +geturls_headers_chromium_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_chromium_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", + 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Chromium\";v=\"117\", \"Not;A=Brand\";v=\"24\"", 'SEC-CH-UA-FULL-VERSION': "117.0.5938.63"} geturls_headers_chromium_windows7.update(windows7_ua_addon) -geturls_headers_palemoon_windows7 = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_palemoon_windows7, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close"} -geturls_headers_opera_windows7 = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_opera_windows7, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close", - 'SEC-CH-UA': "\"Chromium\";v=\"116\", \"Not;A=Brand\";v=\"8\", \"Opera\";v=\"102\"", - 'SEC-CH-UA-FULL-VERSION': "102.0.4880.56"} +geturls_headers_palemoon_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_palemoon_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", + 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"} +geturls_headers_opera_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_opera_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", + 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Chromium\";v=\"116\", \"Not;A=Brand\";v=\"8\", \"Opera\";v=\"102\"", 'SEC-CH-UA-FULL-VERSION': "102.0.4880.56"} geturls_headers_opera_windows7.update(windows7_ua_addon) -geturls_headers_vivaldi_windows7 = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_vivaldi_windows7, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close", - 'SEC-CH-UA': "\"Google Chrome\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Vivaldi\";v=\"6.2\"", - 'SEC-CH-UA-FULL-VERSION': "6.2.3105.48"} +geturls_headers_vivaldi_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_vivaldi_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", + 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Google Chrome\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Vivaldi\";v=\"6.2\"", 'SEC-CH-UA-FULL-VERSION': "6.2.3105.48"} geturls_headers_vivaldi_windows7.update(windows7_ua_addon) -geturls_headers_internet_explorer_windows7 = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_internet_explorer_windows7, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close"} -geturls_headers_microsoft_edge_windows7 = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_microsoft_edge_windows7, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close", - 'SEC-CH-UA': "\"Microsoft Edge\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Chromium\";v=\"117\"", - 'SEC-CH-UA-FULL-VERSION': "117.0.2045.31"} +geturls_headers_internet_explorer_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_internet_explorer_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': + "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"} +geturls_headers_microsoft_edge_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_microsoft_edge_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", + 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Microsoft Edge\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Chromium\";v=\"117\"", 'SEC-CH-UA-FULL-VERSION': "117.0.2045.31"} geturls_headers_microsoft_edge_windows7.update(windows7_ua_addon) -geturls_headers_pywwwget_python = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_pywwwget_python, - 'Accept-Encoding': "none", - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close", - 'SEC-CH-UA': "\"" + __project__ + "\";v=\"" + str(__version__) + "\", \"Not;A=Brand\";v=\"8\", \"" + py_implementation + "\";v=\"" + str( - platform.release()) + "\"", - 'SEC-CH-UA-FULL-VERSION': str(__version__), - 'SEC-CH-UA-PLATFORM': "" + py_implementation + "", - 'SEC-CH-UA-ARCH': "" + platform.machine() + "", - 'SEC-CH-UA-PLATFORM': str(__version__), - 'SEC-CH-UA-BITNESS': str(PyBitness)} -geturls_headers_pywwwget_python_alt = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_pywwwget_python_alt, - 'Accept-Encoding': "none", - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close", - 'SEC-CH-UA': "\"" + __project__ + "\";v=\"" + str(__version__) + "\", \"Not;A=Brand\";v=\"8\", \"" + py_implementation + "\";v=\"" + str( - platform.release()) + "\"", - 'SEC-CH-UA-FULL-VERSION': str(__version__), - 'SEC-CH-UA-PLATFORM': "" + py_implementation + "", - 'SEC-CH-UA-ARCH': "" + platform.machine() + "", - 'SEC-CH-UA-PLATFORM': str(__version__), - 'SEC-CH-UA-BITNESS': str(PyBitness)} -geturls_headers_googlebot_google = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_googlebot_google, - 'Accept-Encoding': "none", - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close"} -geturls_headers_googlebot_google_old = { - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua_googlebot_google_old, - 'Accept-Encoding': "none", - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close"} +geturls_headers_pywwwget_python = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_pywwwget_python, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", + 'SEC-CH-UA': "\""+__project__+"\";v=\""+str(__version__)+"\", \"Not;A=Brand\";v=\"8\", \""+py_implementation+"\";v=\""+str(platform.release())+"\"", 'SEC-CH-UA-FULL-VERSION': str(__version__), 'SEC-CH-UA-PLATFORM': ""+py_implementation+"", 'SEC-CH-UA-ARCH': ""+platform.machine()+"", 'SEC-CH-UA-PLATFORM': str(__version__), 'SEC-CH-UA-BITNESS': str(PyBitness)} +geturls_headers_pywwwget_python_alt = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_pywwwget_python_alt, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", + 'SEC-CH-UA': "\""+__project__+"\";v=\""+str(__version__)+"\", \"Not;A=Brand\";v=\"8\", \""+py_implementation+"\";v=\""+str(platform.release())+"\"", 'SEC-CH-UA-FULL-VERSION': str(__version__), 'SEC-CH-UA-PLATFORM': ""+py_implementation+"", 'SEC-CH-UA-ARCH': ""+platform.machine()+"", 'SEC-CH-UA-PLATFORM': str(__version__), 'SEC-CH-UA-BITNESS': str(PyBitness)} +geturls_headers_googlebot_google = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_googlebot_google, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", + 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"} +geturls_headers_googlebot_google_old = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_googlebot_google_old, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", + 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"} geturls_headers = geturls_headers_firefox_windows7 geturls_download_sleep = 0 def verbose_printout(dbgtxt, outtype="log", dbgenable=True, dgblevel=20): - if (outtype == "print" and dbgenable): + if(outtype == "print" and dbgenable): print(dbgtxt) return True - elif (outtype == "log" and dbgenable): + elif(outtype == "log" and dbgenable): logging.info(dbgtxt) return True - elif (outtype == "warning" and dbgenable): + elif(outtype == "warning" and dbgenable): logging.warning(dbgtxt) return True - elif (outtype == "error" and dbgenable): + elif(outtype == "error" and dbgenable): logging.error(dbgtxt) return True - elif (outtype == "critical" and dbgenable): + elif(outtype == "critical" and dbgenable): logging.critical(dbgtxt) return True - elif (outtype == "exception" and dbgenable): + elif(outtype == "exception" and dbgenable): logging.exception(dbgtxt) return True - elif (outtype == "logalt" and dbgenable): + elif(outtype == "logalt" and dbgenable): logging.log(dgblevel, dbgtxt) return True - elif (outtype == "debug" and dbgenable): + elif(outtype == "debug" and dbgenable): logging.debug(dbgtxt) return True - elif (not dbgenable): + elif(not dbgenable): return True else: return False return False -def verbose_printout_return( - dbgtxt, - outtype="log", - dbgenable=True, - dgblevel=20): +def verbose_printout_return(dbgtxt, outtype="log", dbgenable=True, dgblevel=20): dbgout = verbose_printout(dbgtxt, outtype, dbgenable, dgblevel) - if (not dbgout): + if(not dbgout): return False return dbgtxt @@ -490,7 +358,7 @@ def listize(varlist): newlistreg = {} newlistrev = {} newlistfull = {} - while (il < ix): + while(il < ix): newlistreg.update({ilx: varlist[il]}) newlistrev.update({varlist[il]: ilx}) ilx = ilx + 1 @@ -509,7 +377,7 @@ def twolistize(varlist): newlistdescreg = {} newlistdescrev = {} newlistfull = {} - while (il < ix): + while(il < ix): newlistnamereg.update({ilx: varlist[il][0].strip()}) newlistnamerev.update({varlist[il][0].strip(): ilx}) newlistdescreg.update({ilx: varlist[il][1].strip()}) @@ -530,7 +398,7 @@ def arglistize(proexec, *varlist): ix = len(varlist) ilx = 1 newarglist = [proexec] - while (il < ix): + while(il < ix): if varlist[il][0] is not None: newarglist.append(varlist[il][0]) if varlist[il][1] is not None: @@ -540,9 +408,9 @@ def arglistize(proexec, *varlist): def fix_header_names(header_dict): - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): header_dict = {k.title(): v for k, v in header_dict.iteritems()} - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): header_dict = {k.title(): v for k, v in header_dict.items()} return header_dict @@ -562,13 +430,13 @@ def hms_string(sec_elapsed): def get_readable_size(bytes, precision=1, unit="IEC"): unit = unit.upper() - if (unit != "IEC" and unit != "SI"): + if(unit != "IEC" and unit != "SI"): unit = "IEC" - if (unit == "IEC"): + if(unit == "IEC"): units = [" B", " KiB", " MiB", " GiB", " TiB", " PiB", " EiB", " ZiB"] unitswos = ["B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB"] unitsize = 1024.0 - if (unit == "SI"): + if(unit == "SI"): units = [" B", " kB", " MB", " GB", " TB", " PB", " EB", " ZB"] unitswos = ["B", "kB", "MB", "GB", "TB", "PB", "EB", "ZB"] unitsize = 1000.0 @@ -576,50 +444,39 @@ def get_readable_size(bytes, precision=1, unit="IEC"): orgbytes = bytes for unit in units: if abs(bytes) < unitsize: - strformat = "%3." + str(precision) + "f%s" + strformat = "%3."+str(precision)+"f%s" pre_return_val = (strformat % (bytes, unit)) pre_return_val = re.sub( r"([0]+) ([A-Za-z]+)", r" \2", pre_return_val) pre_return_val = re.sub(r"\. ([A-Za-z]+)", r" \1", pre_return_val) alt_return_val = pre_return_val.split() - return_val = { - 'Bytes': orgbytes, - 'ReadableWithSuffix': pre_return_val, - 'ReadableWithoutSuffix': alt_return_val[0], - 'ReadableSuffix': alt_return_val[1]} + return_val = {'Bytes': orgbytes, 'ReadableWithSuffix': pre_return_val, + 'ReadableWithoutSuffix': alt_return_val[0], 'ReadableSuffix': alt_return_val[1]} return return_val bytes /= unitsize - strformat = "%." + str(precision) + "f%s" + strformat = "%."+str(precision)+"f%s" pre_return_val = (strformat % (bytes, "YiB")) pre_return_val = re.sub(r"([0]+) ([A-Za-z]+)", r" \2", pre_return_val) pre_return_val = re.sub(r"\. ([A-Za-z]+)", r" \1", pre_return_val) alt_return_val = pre_return_val.split() - return_val = { - 'Bytes': orgbytes, - 'ReadableWithSuffix': pre_return_val, - 'ReadableWithoutSuffix': alt_return_val[0], - 'ReadableSuffix': alt_return_val[1]} + return_val = {'Bytes': orgbytes, 'ReadableWithSuffix': pre_return_val, + 'ReadableWithoutSuffix': alt_return_val[0], 'ReadableSuffix': alt_return_val[1]} return return_val -def get_readable_size_from_file( - infile, - precision=1, - unit="IEC", - usehashes=False, - usehashtypes="md5,sha1"): +def get_readable_size_from_file(infile, precision=1, unit="IEC", usehashes=False, usehashtypes="md5,sha1"): unit = unit.upper() usehashtypes = usehashtypes.lower() getfilesize = os.path.getsize(infile) return_val = get_readable_size(getfilesize, precision, unit) - if (usehashes): + if(usehashes): hashtypelist = usehashtypes.split(",") openfile = open(infile, "rb") filecontents = openfile.read() openfile.close() listnumcount = 0 listnumend = len(hashtypelist) - while (listnumcount < listnumend): + while(listnumcount < listnumend): hashtypelistlow = hashtypelist[listnumcount].strip() hashtypelistup = hashtypelistlow.upper() filehash = hashlib.new(hashtypelistup) @@ -630,27 +487,22 @@ def get_readable_size_from_file( return return_val -def get_readable_size_from_string( - instring, - precision=1, - unit="IEC", - usehashes=False, - usehashtypes="md5,sha1"): +def get_readable_size_from_string(instring, precision=1, unit="IEC", usehashes=False, usehashtypes="md5,sha1"): unit = unit.upper() usehashtypes = usehashtypes.lower() getfilesize = len(instring) return_val = get_readable_size(getfilesize, precision, unit) - if (usehashes): + if(usehashes): hashtypelist = usehashtypes.split(",") listnumcount = 0 listnumend = len(hashtypelist) - while (listnumcount < listnumend): + while(listnumcount < listnumend): hashtypelistlow = hashtypelist[listnumcount].strip() hashtypelistup = hashtypelistlow.upper() filehash = hashlib.new(hashtypelistup) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): filehash.update(instring) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): filehash.update(instring.encode('utf-8')) filegethash = filehash.hexdigest() return_val.update({hashtypelistup: filegethash}) @@ -763,7 +615,8 @@ def ftp_status_to_reason(code): 550: 'Requested action not taken. File unavailable', 551: 'Requested action aborted. Page type unknown', 552: 'Requested file action aborted. Exceeded storage allocation', - 553: 'Requested action not taken. File name not allowed'} + 553: 'Requested action not taken. File name not allowed' + } return reasons.get(code, 'Unknown Status Code') @@ -782,21 +635,13 @@ def sftp_status_to_reason(code): return reasons.get(code, 'Unknown Status Code') -def make_http_headers_from_dict_to_list( - headers={ - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close"}): +def make_http_headers_from_dict_to_list(headers={'Referer': "http://google.com/", 'User-Agent': geturls_ua, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"}): if isinstance(headers, dict): returnval = [] - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): for headkey, headvalue in headers.iteritems(): returnval.append((headkey, headvalue)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): for headkey, headvalue in headers.items(): returnval.append((headkey, headvalue)) elif isinstance(headers, list): @@ -806,23 +651,15 @@ def make_http_headers_from_dict_to_list( return returnval -def make_http_headers_from_dict_to_pycurl( - headers={ - 'Referer': "http://google.com/", - 'User-Agent': geturls_ua, - 'Accept-Encoding': compression_supported, - 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", - 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", - 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Connection': "close"}): +def make_http_headers_from_dict_to_pycurl(headers={'Referer': "http://google.com/", 'User-Agent': geturls_ua, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"}): if isinstance(headers, dict): returnval = [] - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): for headkey, headvalue in headers.iteritems(): - returnval.append(headkey + ": " + headvalue) - if (sys.version[0] >= "3"): + returnval.append(headkey+": "+headvalue) + if(sys.version[0] >= "3"): for headkey, headvalue in headers.items(): - returnval.append(headkey + ": " + headvalue) + returnval.append(headkey+": "+headvalue) elif isinstance(headers, list): returnval = headers else: @@ -835,33 +672,18 @@ def make_http_headers_from_pycurl_to_dict(headers): headers = headers.strip().split('\r\n') for header in headers: parts = header.split(': ', 1) - if (len(parts) == 2): + if(len(parts) == 2): key, value = parts header_dict[key.title()] = value return header_dict -def make_http_headers_from_list_to_dict( - headers=[ - ("Referer", - "http://google.com/"), - ("User-Agent", - geturls_ua), - ("Accept-Encoding", - compression_supported), - ("Accept-Language", - "en-US,en;q=0.8,en-CA,en-GB;q=0.6"), - ("Accept-Charset", - "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7"), - ("Accept", - "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"), - ("Connection", - "close")]): +def make_http_headers_from_list_to_dict(headers=[("Referer", "http://google.com/"), ("User-Agent", geturls_ua), ("Accept-Encoding", compression_supported), ("Accept-Language", "en-US,en;q=0.8,en-CA,en-GB;q=0.6"), ("Accept-Charset", "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7"), ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"), ("Connection", "close")]): if isinstance(headers, list): returnval = {} mli = 0 mlil = len(headers) - while (mli < mlil): + while(mli < mlil): returnval.update({headers[mli][0]: headers[mli][1]}) mli = mli + 1 elif isinstance(headers, dict): @@ -876,38 +698,38 @@ def get_httplib_support(checkvalue=None): returnval = [] returnval.append("ftp") returnval.append("httplib") - if (havehttplib2): + if(havehttplib2): returnval.append("httplib2") returnval.append("urllib") - if (haveurllib3): + if(haveurllib3): returnval.append("urllib3") returnval.append("request3") returnval.append("request") - if (haverequests): + if(haverequests): returnval.append("requests") - if (haveaiohttp): + if(haveaiohttp): returnval.append("aiohttp") - if (havehttpx): + if(havehttpx): returnval.append("httpx") returnval.append("httpx2") - if (havemechanize): + if(havemechanize): returnval.append("mechanize") - if (havepycurl): + if(havepycurl): returnval.append("pycurl") - if (hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): + if(hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): returnval.append("pycurl2") - if (hasattr(pycurl, "CURL_HTTP_VERSION_3_0")): + if(hasattr(pycurl, "CURL_HTTP_VERSION_3_0")): returnval.append("pycurl3") - if (haveparamiko): + if(haveparamiko): returnval.append("sftp") - if (havepysftp): + if(havepysftp): returnval.append("pysftp") - if (checkvalue is not None): - if (checkvalue == "urllib1" or checkvalue == "urllib2"): + if(not checkvalue is None): + if(checkvalue == "urllib1" or checkvalue == "urllib2"): checkvalue = "urllib" - if (checkvalue == "httplib1"): + if(checkvalue == "httplib1"): checkvalue = "httplib" - if (checkvalue in returnval): + if(checkvalue in returnval): returnval = True else: returnval = False @@ -915,9 +737,9 @@ def get_httplib_support(checkvalue=None): def check_httplib_support(checkvalue="urllib"): - if (checkvalue == "urllib1" or checkvalue == "urllib2"): + if(checkvalue == "urllib1" or checkvalue == "urllib2"): checkvalue = "urllib" - if (checkvalue == "httplib1"): + if(checkvalue == "httplib1"): checkvalue = "httplib" returnval = get_httplib_support(checkvalue) return returnval @@ -928,1155 +750,429 @@ def get_httplib_support_list(): return returnval -def download_from_url( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - httplibuse="urllib", - buffersize=524288, - sleep=-1, - timeout=10): +def download_from_url(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, haveaiohttp, haverequests, havemechanize, havepycurl, havehttplib2, haveurllib3, havehttpx, havehttpcore, haveparamiko, havepysftp - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (httplibuse == "urllib1" or httplibuse == - "urllib2" or httplibuse == "request"): + if(httplibuse == "urllib1" or httplibuse == "urllib2" or httplibuse == "request"): httplibuse = "urllib" - if (httplibuse == "httplib1"): + if(httplibuse == "httplib1"): httplibuse = "httplib" - if (not haverequests and httplibuse == "requests"): + if(not haverequests and httplibuse == "requests"): httplibuse = "urllib" - if (not haveaiohttp and httplibuse == "aiohttp"): + if(not haveaiohttp and httplibuse == "aiohttp"): httplibuse = "urllib" - if (not havehttpx and httplibuse == "httpx"): + if(not havehttpx and httplibuse == "httpx"): httplibuse = "urllib" - if (not havehttpx and httplibuse == "httpx2"): + if(not havehttpx and httplibuse == "httpx2"): httplibuse = "urllib" - if (not havehttpcore and httplibuse == "httpcore"): + if(not havehttpcore and httplibuse == "httpcore"): httplibuse = "urllib" - if (not havehttpcore and httplibuse == "httpcore2"): + if(not havehttpcore and httplibuse == "httpcore2"): httplibuse = "urllib" - if (not havemechanize and httplibuse == "mechanize"): + if(not havemechanize and httplibuse == "mechanize"): httplibuse = "urllib" - if (not havepycurl and httplibuse == "pycurl"): + if(not havepycurl and httplibuse == "pycurl"): httplibuse = "urllib" - if (not havepycurl and httplibuse == "pycurl2"): + if(not havepycurl and httplibuse == "pycurl2"): httplibuse = "urllib" - if (havepycurl and httplibuse == "pycurl2" and not hasattr( - pycurl, "CURL_HTTP_VERSION_2_0")): + if(havepycurl and httplibuse == "pycurl2" and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): httplibuse = "pycurl" - if (not havepycurl and httplibuse == "pycurl3"): + if(not havepycurl and httplibuse == "pycurl3"): httplibuse = "urllib" - if (havepycurl and httplibuse == "pycurl3" and not hasattr( - pycurl, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): + if(havepycurl and httplibuse == "pycurl3" and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): httplibuse = "pycurl2" - if (havepycurl and httplibuse == "pycurl3" and not hasattr(pycurl, - "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl, - "CURL_HTTP_VERSION_2_0")): + if(havepycurl and httplibuse == "pycurl3" and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): httplibuse = "pycurl" - if (not havehttplib2 and httplibuse == "httplib2"): + if(not havehttplib2 and httplibuse == "httplib2"): httplibuse = "httplib" - if (not haveparamiko and httplibuse == "sftp"): + if(not haveparamiko and httplibuse == "sftp"): httplibuse = "ftp" - if (not havepysftp and httplibuse == "pysftp"): + if(not havepysftp and httplibuse == "pysftp"): httplibuse = "ftp" - if (httplibuse == "urllib" or httplibuse == "request"): + if(httplibuse == "urllib" or httplibuse == "request"): returnval = download_from_url_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - elif (httplibuse == "request"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + elif(httplibuse == "request"): returnval = download_from_url_with_request( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - elif (httplibuse == "request3"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + elif(httplibuse == "request3"): returnval = download_from_url_with_request3( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - elif (httplibuse == "httplib"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + elif(httplibuse == "httplib"): returnval = download_from_url_with_httplib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - elif (httplibuse == "httplib2"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + elif(httplibuse == "httplib2"): returnval = download_from_url_with_httplib2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - elif (httplibuse == "urllib3" or httplibuse == "request3"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + elif(httplibuse == "urllib3" or httplibuse == "request3"): returnval = download_from_url_with_urllib3( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - elif (httplibuse == "requests"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + elif(httplibuse == "requests"): returnval = download_from_url_with_requests( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - elif (httplibuse == "aiohttp"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + elif(httplibuse == "aiohttp"): returnval = download_from_url_with_aiohttp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - elif (httplibuse == "httpx"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + elif(httplibuse == "httpx"): returnval = download_from_url_with_httpx( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - elif (httplibuse == "httpx2"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + elif(httplibuse == "httpx2"): returnval = download_from_url_with_httpx2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - elif (httplibuse == "httpcore"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + elif(httplibuse == "httpcore"): returnval = download_from_url_with_httpcore( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - elif (httplibuse == "httpcore2"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + elif(httplibuse == "httpcore2"): returnval = download_from_url_with_httpcore2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - elif (httplibuse == "mechanize"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + elif(httplibuse == "mechanize"): returnval = download_from_url_with_mechanize( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - elif (httplibuse == "pycurl"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + elif(httplibuse == "pycurl"): returnval = download_from_url_with_pycurl( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - elif (httplibuse == "pycurl2"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + elif(httplibuse == "pycurl2"): returnval = download_from_url_with_pycurl2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - elif (httplibuse == "pycurl3"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + elif(httplibuse == "pycurl3"): returnval = download_from_url_with_pycurl3( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - elif (httplibuse == "ftp"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + elif(httplibuse == "ftp"): returnval = download_from_url_with_ftp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - elif (httplibuse == "sftp"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + elif(httplibuse == "sftp"): returnval = download_from_url_with_sftp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - elif (httplibuse == "pysftp"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + elif(httplibuse == "pysftp"): returnval = download_from_url_with_pysftp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) else: returnval = False return returnval -def download_from_url_from_list( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - httplibuse="urllib", - buffersize=524288, - sleep=-1, - timeout=10): - if (isinstance(httpurl, list)): +def download_from_url_from_list(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", buffersize=524288, sleep=-1, timeout=10): + if(isinstance(httpurl, list)): pass - elif (isinstance(httpurl, tuple)): + elif(isinstance(httpurl, tuple)): pass - elif (isinstance(httpurl, dict)): + elif(isinstance(httpurl, dict)): httpurl = httpurl.values() else: httpurl = [httpurl] listsize = len(httpurl) listcount = 0 returnval = [] - while (listcount < listsize): - ouputval = download_from_url( - httpurl[listcount], - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - httplibuse, - buffersize, - sleep, - timeout) + while(listcount < listsize): + ouputval = download_from_url(httpurl[listcount], httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, httplibuse, buffersize, sleep, timeout) returnval.append(ouputval) listcount += 1 return returnval -def download_from_url_file( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - httplibuse="urllib", - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +def download_from_url_file(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, haveaiohttp, haverequests, havemechanize, havepycurl, havehttplib2, haveurllib3, havehttpx, havehttpcore, haveparamiko, havepysftp - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (httplibuse == "urllib1" or httplibuse == - "urllib2" or httplibuse == "request"): + if(httplibuse == "urllib1" or httplibuse == "urllib2" or httplibuse == "request"): httplibuse = "urllib" - if (httplibuse == "httplib1"): + if(httplibuse == "httplib1"): httplibuse = "httplib" - if (not haverequests and httplibuse == "requests"): + if(not haverequests and httplibuse == "requests"): httplibuse = "urllib" - if (not haveaiohttp and httplibuse == "aiohttp"): + if(not haveaiohttp and httplibuse == "aiohttp"): httplibuse = "urllib" - if (not havehttpx and httplibuse == "httpx"): + if(not havehttpx and httplibuse == "httpx"): httplibuse = "urllib" - if (not havehttpx and httplibuse == "httpx2"): + if(not havehttpx and httplibuse == "httpx2"): httplibuse = "urllib" - if (not havehttpcore and httplibuse == "httpcore"): + if(not havehttpcore and httplibuse == "httpcore"): httplibuse = "urllib" - if (not havehttpcore and httplibuse == "httpcore2"): + if(not havehttpcore and httplibuse == "httpcore2"): httplibuse = "urllib" - if (not havemechanize and httplibuse == "mechanize"): + if(not havemechanize and httplibuse == "mechanize"): httplibuse = "urllib" - if (not havepycurl and httplibuse == "pycurl"): + if(not havepycurl and httplibuse == "pycurl"): httplibuse = "urllib" - if (not havepycurl and httplibuse == "pycurl2"): + if(not havepycurl and httplibuse == "pycurl2"): httplibuse = "urllib" - if (havepycurl and httplibuse == "pycurl2" and not hasattr( - pycurl, "CURL_HTTP_VERSION_2_0")): + if(havepycurl and httplibuse == "pycurl2" and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): httplibuse = "pycurl" - if (not havepycurl and httplibuse == "pycurl3"): + if(not havepycurl and httplibuse == "pycurl3"): httplibuse = "urllib" - if (havepycurl and httplibuse == "pycurl3" and not hasattr( - pycurl, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): + if(havepycurl and httplibuse == "pycurl3" and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): httplibuse = "pycurl2" - if (havepycurl and httplibuse == "pycurl3" and not hasattr(pycurl, - "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl, - "CURL_HTTP_VERSION_2_0")): + if(havepycurl and httplibuse == "pycurl3" and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): httplibuse = "pycurl" - if (not havehttplib2 and httplibuse == "httplib2"): + if(not havehttplib2 and httplibuse == "httplib2"): httplibuse = "httplib" - if (not haveparamiko and httplibuse == "sftp"): + if(not haveparamiko and httplibuse == "sftp"): httplibuse = "ftp" - if (not haveparamiko and httplibuse == "pysftp"): + if(not haveparamiko and httplibuse == "pysftp"): httplibuse = "ftp" - if (httplibuse == "urllib" or httplibuse == "request"): + if(httplibuse == "urllib" or httplibuse == "request"): returnval = download_from_url_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "request"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "request"): returnval = download_from_url_file_with_request( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "request3"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "request3"): returnval = download_from_url_file_with_request3( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "httplib"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "httplib"): returnval = download_from_url_file_with_httplib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "httplib2"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "httplib2"): returnval = download_from_url_file_with_httplib2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "urllib3" or httplibuse == "request3"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "urllib3" or httplibuse == "request3"): returnval = download_from_url_file_with_urllib3( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "requests"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "requests"): returnval = download_from_url_file_with_requests( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "aiohttp"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "aiohttp"): returnval = download_from_url_file_with_aiohttp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "httpx"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "httpx"): returnval = download_from_url_file_with_httpx( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "httpx2"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "httpx2"): returnval = download_from_url_file_with_httpx2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "httpcore"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "httpcore"): returnval = download_from_url_file_with_httpcore( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "httpcore2"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "httpcore2"): returnval = download_from_url_file_with_httpcore2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "mechanize"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "mechanize"): returnval = download_from_url_file_with_mechanize( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "pycurl"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "pycurl"): returnval = download_from_url_file_with_pycurl( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "pycurl2"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "pycurl2"): returnval = download_from_url_file_with_pycurl2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "pycurl3"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "pycurl3"): returnval = download_from_url_file_with_pycurl3( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "ftp"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "ftp"): returnval = download_from_url_file_with_ftp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "sftp"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "sftp"): returnval = download_from_url_file_with_sftp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "pysftp"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "pysftp"): returnval = download_from_url_file_with_pysftp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) else: returnval = False return returnval -def download_from_url_file_with_list( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - httplibuse="urllib", - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): - if (isinstance(httpurl, list)): +def download_from_url_file_with_list(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): + if(isinstance(httpurl, list)): pass - elif (isinstance(httpurl, tuple)): + elif(isinstance(httpurl, tuple)): pass - elif (isinstance(httpurl, dict)): + elif(isinstance(httpurl, dict)): httpurl = httpurl.values() else: httpurl = [httpurl] listsize = len(httpurl) listcount = 0 returnval = [] - while (listcount < listsize): - ouputval = download_from_url_file( - httpurl[listcount], - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - httplibuse, - ranges, - buffersize, - sleep, - timeout) + while(listcount < listsize): + ouputval = download_from_url_file(httpurl[listcount], httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, httplibuse, ranges, buffersize, sleep, timeout) returnval.append(ouputval) listcount += 1 return returnval -def download_from_url_to_file( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - httplibuse="urllib", - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +def download_from_url_to_file(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, haveaiohttp, haverequests, havemechanize, havepycurl, havehttplib2, haveurllib3, havehttpx, havehttpcore, haveparamiko, havepysftp - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (httplibuse == "urllib1" or httplibuse == - "urllib2" or httplibuse == "request"): + if(httplibuse == "urllib1" or httplibuse == "urllib2" or httplibuse == "request"): httplibuse = "urllib" - if (httplibuse == "httplib1"): + if(httplibuse == "httplib1"): httplibuse = "httplib" - if (not haverequests and httplibuse == "requests"): + if(not haverequests and httplibuse == "requests"): httplibuse = "urllib" - if (not haveaiohttp and httplibuse == "aiohttp"): + if(not haveaiohttp and httplibuse == "aiohttp"): httplibuse = "urllib" - if (not havehttpx and httplibuse == "httpx"): + if(not havehttpx and httplibuse == "httpx"): httplibuse = "urllib" - if (not havehttpx and httplibuse == "httpx2"): + if(not havehttpx and httplibuse == "httpx2"): httplibuse = "urllib" - if (not havehttpcore and httplibuse == "httpcore"): + if(not havehttpcore and httplibuse == "httpcore"): httplibuse = "urllib" - if (not havehttpcore and httplibuse == "httpcore2"): + if(not havehttpcore and httplibuse == "httpcore2"): httplibuse = "urllib" - if (not havemechanize and httplibuse == "mechanize"): + if(not havemechanize and httplibuse == "mechanize"): httplibuse = "urllib" - if (not havepycurl and httplibuse == "pycurl"): + if(not havepycurl and httplibuse == "pycurl"): httplibuse = "urllib" - if (not havepycurl and httplibuse == "pycurl2"): + if(not havepycurl and httplibuse == "pycurl2"): httplibuse = "urllib" - if (havepycurl and httplibuse == "pycurl2" and not hasattr( - pycurl, "CURL_HTTP_VERSION_2_0")): + if(havepycurl and httplibuse == "pycurl2" and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): httplibuse = "pycurl" - if (not havepycurl and httplibuse == "pycurl3"): + if(not havepycurl and httplibuse == "pycurl3"): httplibuse = "urllib" - if (havepycurl and httplibuse == "pycurl3" and not hasattr( - pycurl, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): + if(havepycurl and httplibuse == "pycurl3" and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): httplibuse = "pycurl2" - if (havepycurl and httplibuse == "pycurl3" and not hasattr(pycurl, - "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl, - "CURL_HTTP_VERSION_2_0")): + if(havepycurl and httplibuse == "pycurl3" and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): httplibuse = "pycurl" - if (not havehttplib2 and httplibuse == "httplib2"): + if(not havehttplib2 and httplibuse == "httplib2"): httplibuse = "httplib" - if (not haveparamiko and httplibuse == "sftp"): + if(not haveparamiko and httplibuse == "sftp"): httplibuse = "ftp" - if (not havepysftp and httplibuse == "pysftp"): + if(not havepysftp and httplibuse == "pysftp"): httplibuse = "ftp" - if (httplibuse == "urllib" or httplibuse == "request"): + if(httplibuse == "urllib" or httplibuse == "request"): returnval = download_from_url_to_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "request"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, ranges, buffersize, sleep, timeout) + elif(httplibuse == "request"): returnval = download_from_url_to_file_with_request( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "request3"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, ranges, buffersize, sleep, timeout) + elif(httplibuse == "request3"): returnval = download_from_url_to_file_with_request3( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "httplib"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, ranges, buffersize, sleep, timeout) + elif(httplibuse == "httplib"): returnval = download_from_url_to_file_with_httplib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "httplib2"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, ranges, buffersize, sleep, timeout) + elif(httplibuse == "httplib2"): returnval = download_from_url_to_file_with_httplib2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "urllib3" or httplibuse == "request3"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, ranges, buffersize, sleep, timeout) + elif(httplibuse == "urllib3" or httplibuse == "request3"): returnval = download_from_url_to_file_with_urllib3( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "requests"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, ranges, buffersize, sleep, timeout) + elif(httplibuse == "requests"): returnval = download_from_url_to_file_with_requests( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "aiohttp"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, ranges, buffersize, sleep, timeout) + elif(httplibuse == "aiohttp"): returnval = download_from_url_to_file_with_aiohttp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "httpx"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, ranges, buffersize, sleep, timeout) + elif(httplibuse == "httpx"): returnval = download_from_url_to_file_with_httpx( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "httpx2"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "httpx2"): returnval = download_from_url_to_file_with_httpx2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "httpcore"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "httpcore"): returnval = download_from_url_to_file_with_httpcore( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "httpcore2"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "httpcore2"): returnval = download_from_url_to_file_with_httpcore2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "mechanize"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) + elif(httplibuse == "mechanize"): returnval = download_from_url_to_file_with_mechanize( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "pycurl"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, ranges, buffersize, sleep, timeout) + elif(httplibuse == "pycurl"): returnval = download_from_url_to_file_with_pycurl( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "pycurl2"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, ranges, buffersize, sleep, timeout) + elif(httplibuse == "pycurl2"): returnval = download_from_url_to_file_with_pycurl2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "pycurl3"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, ranges, buffersize, sleep, timeout) + elif(httplibuse == "pycurl3"): returnval = download_from_url_to_file_with_pycurl3( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "ftp"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, ranges, buffersize, sleep, timeout) + elif(httplibuse == "ftp"): returnval = download_from_url_to_file_with_ftp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "sftp"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, ranges, buffersize, sleep, timeout) + elif(httplibuse == "sftp"): returnval = download_from_url_to_file_with_sftp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) - elif (httplibuse == "pysftp"): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, ranges, buffersize, sleep, timeout) + elif(httplibuse == "pysftp"): returnval = download_from_url_to_file_with_pysftp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, ranges, buffersize, sleep, timeout) else: returnval = False return returnval -def download_from_url_to_file_with_list( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - httplibuse="urllib", - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): - if (isinstance(httpurl, list)): +def download_from_url_to_file_with_list(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): + if(isinstance(httpurl, list)): pass - elif (isinstance(httpurl, tuple)): + elif(isinstance(httpurl, tuple)): pass - elif (isinstance(httpurl, dict)): + elif(isinstance(httpurl, dict)): httpurl = httpurl.values() else: httpurl = [httpurl] listsize = len(httpurl) listcount = 0 returnval = [] - while (listcount < listsize): - ouputval = download_from_url_to_file( - httpurl[listcount], - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - httplibuse, - outfile, - outpath, - ranges, - buffersize, - sleep, - timeout) + while(listcount < listsize): + ouputval = download_from_url_to_file(httpurl[listcount], httpheaders, httpuseragent, httpreferer, + httpcookie, httpmethod, postdata, httplibuse, outfile, outpath, ranges, buffersize, sleep, timeout) returnval.append(ouputval) listcount += 1 return returnval -def download_from_url_with_urllib( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +def download_from_url_with_urllib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 urlparts = urlparse.urlparse(httpurl) - if (isinstance(httpheaders, list)): + if(isinstance(httpheaders, list)): httpheaders = make_http_headers_from_list_to_dict(httpheaders) httpheaders = fix_header_names(httpheaders) - if (httpuseragent is not None): - if ('User-Agent' in httpheaders): + if(httpuseragent is not None): + if('User-Agent' in httpheaders): httpheaders['User-Agent'] = httpuseragent else: httpuseragent.update({'User-Agent': httpuseragent}) - if (httpreferer is not None): - if ('Referer' in httpheaders): + if(httpreferer is not None): + if('Referer' in httpheaders): httpheaders['Referer'] = httpreferer else: httpuseragent.update({'Referer': httpreferer}) - if (urlparts.username is not None or urlparts.password is not None): - if (sys.version[0] == "2"): + if(urlparts.username is not None or urlparts.password is not None): + if(sys.version[0] == "2"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password)) - if (sys.version[0] >= "3"): + str(urlparts.username+":"+urlparts.password)) + if(sys.version[0] >= "3"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password).encode()).decode("UTF-8") - httpheaders.update({'Authorization': "Basic " + inurlencode}) + str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8") + httpheaders.update({'Authorization': "Basic "+inurlencode}) geturls_opener = build_opener(HTTPCookieProcessor(httpcookie)) - if (isinstance(httpheaders, dict)): + if(isinstance(httpheaders, dict)): httpheaders = make_http_headers_from_dict_to_list(httpheaders) geturls_opener.addheaders = httpheaders time.sleep(sleep) - if (postdata is not None and not isinstance(postdata, dict)): + if(postdata is not None and not isinstance(postdata, dict)): postdata = urlencode(postdata) try: geturls_request = Request(httpurl) - if (httpmethod == "GET"): + if(httpmethod == "GET"): geturls_text = geturls_opener.open(geturls_request) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): geturls_text = geturls_opener.open(geturls_request, data=postdata) else: geturls_text = geturls_opener.open(geturls_request) except HTTPError as geturls_text_error: geturls_text = geturls_text_error - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) except URLError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.getcode() try: @@ -2091,35 +1187,35 @@ def download_from_url_with_urllib( httpurlout = geturls_text.geturl() httpheaderout = geturls_text.info() httpheadersentout = httpheaders - if (isinstance(httpheaderout, list)): + if(isinstance(httpheaderout, list)): httpheaderout = dict( make_http_headers_from_list_to_dict(httpheaderout)) httpheaderout = fix_header_names(httpheaderout) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): try: prehttpheaderout = httpheaderout httpheaderkeys = httpheaderout.keys() imax = len(httpheaderkeys) ic = 0 httpheaderout = {} - while (ic < imax): + while(ic < imax): httpheaderout.update( {httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]}) ic += 1 except AttributeError: pass - if (isinstance(httpheadersentout, list)): + if(isinstance(httpheadersentout, list)): httpheadersentout = dict( make_http_headers_from_list_to_dict(httpheadersentout)) httpheadersentout = fix_header_names(httpheadersentout) downloadsize = httpheaderout.get('Content-Length') - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.read(buffersize) @@ -2128,331 +1224,145 @@ def download_from_url_with_urllib( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) strbuf.seek(0) returnval_content = strbuf.read() - if (httpheaderout.get("Content-Encoding") == "gzip"): + if(httpheaderout.get("Content-Encoding") == "gzip"): try: returnval_content = zlib.decompress( - returnval_content, 16 + zlib.MAX_WBITS) + returnval_content, 16+zlib.MAX_WBITS) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "deflate"): + elif(httpheaderout.get("Content-Encoding") == "deflate"): try: returnval_content = zlib.decompress(returnval_content) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "br" and havebrotli): + elif(httpheaderout.get("Content-Encoding") == "br" and havebrotli): try: returnval_content = brotli.decompress(returnval_content) except brotli.error: pass - elif (httpheaderout.get("Content-Encoding") == "zstd" and havezstd): + elif(httpheaderout.get("Content-Encoding") == "zstd" and havezstd): try: returnval_content = zstandard.decompress(returnval_content) except zstandard.error: pass - elif ((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): + elif((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): try: returnval_content = lzma.decompress(returnval_content) except zstandard.error: pass - elif (httpheaderout.get("Content-Encoding") == "bzip2"): + elif(httpheaderout.get("Content-Encoding") == "bzip2"): try: returnval_content = bz2.decompress(returnval_content) except zstandard.error: pass - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': httpheaderout, - 'Version': httpversionout, - 'Method': httpmethodout, - 'HeadersSent': httpheadersentout, - 'URL': httpurlout, - 'Code': httpcodeout, - 'Reason': httpcodereason, - 'HTTPLib': "urllib"} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize, 2, "IEC"), 'SI': get_readable_size( + fulldatasize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason, 'HTTPLib': "urllib"} geturls_text.close() return returnval -def download_from_url_file_with_urllib( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +def download_from_url_file_with_urllib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, tmpfileprefix, tmpfilesuffix exec_time_start = time.time() myhash = hashlib.new("sha1") - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): myhash.update(httpurl) myhash.update(str(buffersize)) myhash.update(str(exec_time_start)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): myhash.update(httpurl.encode('utf-8')) myhash.update(str(buffersize).encode('utf-8')) myhash.update(str(exec_time_start).encode('utf-8')) newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest()) - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 pretmpfilename = download_from_url_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + if(not pretmpfilename): return False with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f: tmpfilename = f.name try: - os.utime( - tmpfilename, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - tmpfilename, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get('Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get('Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass - returnval = { - 'Type': "File", - 'Filename': tmpfilename, - 'Filesize': pretmpfilename.get('Contentsize'), - 'FilesizeAlt': { - 'IEC': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "IEC"), - 'SI': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "SI")}, - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': pretmpfilename.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename.get( + 'Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} f.write(pretmpfilename.get('Content')) f.close() exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to download file.") - returnval.update( - { - 'Filesize': os.path.getsize(tmpfilename), - 'FilesizeAlt': { - 'IEC': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "IEC"), - 'SI': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "SI")}, - 'DownloadTime': float( - exec_time_start - - exec_time_end), - 'DownloadTimeReadable': hms_string( - exec_time_start - - exec_time_end)}) + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to download file.") + returnval.update({'Filesize': os.path.getsize(tmpfilename), 'FilesizeAlt': {'IEC': get_readable_size(os.path.getsize(tmpfilename), 2, "IEC"), 'SI': get_readable_size( + os.path.getsize(tmpfilename), 2, "SI")}, 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)}) return returnval -def download_from_url_to_file_with_urllib( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +def download_from_url_to_file_with_urllib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (not outfile == "-"): + if(not outfile == "-"): outpath = outpath.rstrip(os.path.sep) - filepath = os.path.realpath(outpath + os.path.sep + outfile) - if (not os.path.exists(outpath)): + filepath = os.path.realpath(outpath+os.path.sep+outfile) + if(not os.path.exists(outpath)): os.makedirs(outpath) - if (os.path.exists(outpath) and os.path.isfile(outpath)): + if(os.path.exists(outpath) and os.path.isfile(outpath)): return False - if (os.path.exists(filepath) and os.path.isdir(filepath)): + if(os.path.exists(filepath) and os.path.isdir(filepath)): return False pretmpfilename = download_from_url_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) + if(not pretmpfilename): return False tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 - log.info("Moving file " + tmpfilename + " to " + filepath) + log.info("Moving file "+tmpfilename+" to "+filepath) exec_time_start = time.time() shutil.move(tmpfilename, filepath) try: - os.utime( - filepath, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - filepath, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get('Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get('Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to move file.") - if (os.path.exists(tmpfilename)): + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to move file.") + if(os.path.exists(tmpfilename)): os.remove(tmpfilename) - returnval = { - 'Type': "File", - 'Filename': filepath, - 'Filesize': downloadsize, - 'FilesizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} - if (outfile == "-"): + returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} + if(outfile == "-"): pretmpfilename = download_from_url_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 @@ -2467,31 +1377,12 @@ def download_from_url_to_file_with_urllib( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Copying " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Copied " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize f.write(databytes) f.seek(0) @@ -2500,109 +1391,73 @@ def download_from_url_to_file_with_urllib( ft.close() os.remove(tmpfilename) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to copy file.") - returnval = { - 'Type': "Content", - 'Content': fdata, - 'Contentsize': downloadsize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to copy file.") + returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} return returnval -def download_from_url_with_httplib( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +def download_from_url_with_httplib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 urlparts = urlparse.urlparse(httpurl) - if (isinstance(httpheaders, list)): + if(isinstance(httpheaders, list)): httpheaders = make_http_headers_from_list_to_dict(httpheaders) httpheaders = fix_header_names(httpheaders) - if (httpuseragent is not None): - if ('User-Agent' in httpheaders): + if(httpuseragent is not None): + if('User-Agent' in httpheaders): httpheaders['User-Agent'] = httpuseragent else: httpuseragent.update({'User-Agent': httpuseragent}) - if (httpreferer is not None): - if ('Referer' in httpheaders): + if(httpreferer is not None): + if('Referer' in httpheaders): httpheaders['Referer'] = httpreferer else: httpuseragent.update({'Referer': httpreferer}) - if (urlparts.username is not None or urlparts.password is not None): - if (sys.version[0] == "2"): + if(urlparts.username is not None or urlparts.password is not None): + if(sys.version[0] == "2"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password)) - if (sys.version[0] >= "3"): + str(urlparts.username+":"+urlparts.password)) + if(sys.version[0] >= "3"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password).encode()).decode("UTF-8") - httpheaders.update({'Authorization': "Basic " + inurlencode}) + str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8") + httpheaders.update({'Authorization': "Basic "+inurlencode}) geturls_opener = build_opener(HTTPCookieProcessor(httpcookie)) geturls_opener.addheaders = httpheaders time.sleep(sleep) - if (urlparts[0] == "http"): + if(urlparts[0] == "http"): httpconn = HTTPConnection(urlparts[1], timeout=timeout) - elif (urlparts[0] == "https"): + elif(urlparts[0] == "https"): httpconn = HTTPSConnection(urlparts[1], timeout=timeout) else: return False - if (postdata is not None and not isinstance(postdata, dict)): + if(postdata is not None and not isinstance(postdata, dict)): postdata = urlencode(postdata) try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): httpconn.request("GET", urlparts[2], headers=httpheaders) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): httpconn.request( "GET", urlparts[2], body=postdata, headers=httpheaders) else: httpconn.request("GET", urlparts[2], headers=httpheaders) except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except BlockingIOError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False geturls_text = httpconn.getresponse() httpcodeout = geturls_text.status httpcodereason = geturls_text.reason - if (geturls_text.version == "10"): + if(geturls_text.version == "10"): httpversionout = "1.0" else: httpversionout = "1.1" @@ -2610,35 +1465,35 @@ def download_from_url_with_httplib( httpurlout = httpurl httpheaderout = geturls_text.getheaders() httpheadersentout = httpheaders - if (isinstance(httpheaderout, list)): + if(isinstance(httpheaderout, list)): httpheaderout = dict( make_http_headers_from_list_to_dict(httpheaderout)) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): try: prehttpheaderout = httpheaderout httpheaderkeys = httpheaderout.keys() imax = len(httpheaderkeys) ic = 0 httpheaderout = {} - while (ic < imax): + while(ic < imax): httpheaderout.update( {httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]}) ic += 1 except AttributeError: pass httpheaderout = fix_header_names(httpheaderout) - if (isinstance(httpheadersentout, list)): + if(isinstance(httpheadersentout, list)): httpheadersentout = dict( make_http_headers_from_list_to_dict(httpheadersentout)) httpheadersentout = fix_header_names(httpheadersentout) downloadsize = httpheaderout.get('Content-Length') - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.read(buffersize) @@ -2647,331 +1502,145 @@ def download_from_url_with_httplib( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) strbuf.seek(0) returnval_content = strbuf.read() - if (httpheaderout.get("Content-Encoding") == "gzip"): + if(httpheaderout.get("Content-Encoding") == "gzip"): try: returnval_content = zlib.decompress( - returnval_content, 16 + zlib.MAX_WBITS) + returnval_content, 16+zlib.MAX_WBITS) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "deflate"): + elif(httpheaderout.get("Content-Encoding") == "deflate"): try: returnval_content = zlib.decompress(returnval_content) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "br" and havebrotli): + elif(httpheaderout.get("Content-Encoding") == "br" and havebrotli): try: returnval_content = brotli.decompress(returnval_content) except brotli.error: pass - elif (httpheaderout.get("Content-Encoding") == "zstd" and havezstd): + elif(httpheaderout.get("Content-Encoding") == "zstd" and havezstd): try: returnval_content = zstandard.decompress(returnval_content) except zstandard.error: pass - elif ((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): + elif((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): try: returnval_content = lzma.decompress(returnval_content) except zstandard.error: pass - elif (httpheaderout.get("Content-Encoding") == "bzip2"): + elif(httpheaderout.get("Content-Encoding") == "bzip2"): try: returnval_content = bz2.decompress(returnval_content) except zstandard.error: pass - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': httpheaderout, - 'Version': httpversionout, - 'Method': httpmethodout, - 'HeadersSent': httpheadersentout, - 'URL': httpurlout, - 'Code': httpcodeout, - 'Reason': httpcodereason, - 'HTTPLib': "httplib"} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize, 2, "IEC"), 'SI': get_readable_size( + fulldatasize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason, 'HTTPLib': "httplib"} geturls_text.close() return returnval -def download_from_url_file_with_httplib( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +def download_from_url_file_with_httplib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, tmpfileprefix, tmpfilesuffix exec_time_start = time.time() myhash = hashlib.new("sha1") - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): myhash.update(httpurl) myhash.update(str(buffersize)) myhash.update(str(exec_time_start)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): myhash.update(httpurl.encode('utf-8')) myhash.update(str(buffersize).encode('utf-8')) myhash.update(str(exec_time_start).encode('utf-8')) newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest()) - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 pretmpfilename = download_from_url_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + if(not pretmpfilename): return False with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f: tmpfilename = f.name try: - os.utime( - tmpfilename, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - tmpfilename, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get('Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get('Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass - returnval = { - 'Type': "File", - 'Filename': tmpfilename, - 'Filesize': pretmpfilename.get('Contentsize'), - 'FilesizeAlt': { - 'IEC': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "IEC"), - 'SI': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "SI")}, - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': pretmpfilename.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename.get( + 'Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} f.write(pretmpfilename.get('Content')) f.close() exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to download file.") - returnval.update( - { - 'Filesize': os.path.getsize(tmpfilename), - 'FilesizeAlt': { - 'IEC': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "IEC"), - 'SI': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "SI")}, - 'DownloadTime': float( - exec_time_start - - exec_time_end), - 'DownloadTimeReadable': hms_string( - exec_time_start - - exec_time_end)}) + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to download file.") + returnval.update({'Filesize': os.path.getsize(tmpfilename), 'FilesizeAlt': {'IEC': get_readable_size(os.path.getsize(tmpfilename), 2, "IEC"), 'SI': get_readable_size( + os.path.getsize(tmpfilename), 2, "SI")}, 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)}) return returnval -def download_from_url_to_file_with_httplib( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +def download_from_url_to_file_with_httplib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (not outfile == "-"): + if(not outfile == "-"): outpath = outpath.rstrip(os.path.sep) - filepath = os.path.realpath(outpath + os.path.sep + outfile) - if (not os.path.exists(outpath)): + filepath = os.path.realpath(outpath+os.path.sep+outfile) + if(not os.path.exists(outpath)): os.makedirs(outpath) - if (os.path.exists(outpath) and os.path.isfile(outpath)): + if(os.path.exists(outpath) and os.path.isfile(outpath)): return False - if (os.path.exists(filepath) and os.path.isdir(filepath)): + if(os.path.exists(filepath) and os.path.isdir(filepath)): return False pretmpfilename = download_from_url_file_with_httplib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) + if(not pretmpfilename): return False tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 - log.info("Moving file " + tmpfilename + " to " + filepath) + log.info("Moving file "+tmpfilename+" to "+filepath) exec_time_start = time.time() shutil.move(tmpfilename, filepath) try: - os.utime( - filepath, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - filepath, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get('Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get('Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to move file.") - if (os.path.exists(tmpfilename)): + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to move file.") + if(os.path.exists(tmpfilename)): os.remove(tmpfilename) - returnval = { - 'Type': "File", - 'Filename': filepath, - 'Filesize': downloadsize, - 'FilesizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} - if (outfile == "-"): + returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} + if(outfile == "-"): pretmpfilename = download_from_url_file_with_httplib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 @@ -2986,31 +1655,12 @@ def download_from_url_to_file_with_httplib( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Copying " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Copied " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize f.write(databytes) f.seek(0) @@ -3019,110 +1669,74 @@ def download_from_url_to_file_with_httplib( ft.close() os.remove(tmpfilename) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to copy file.") - returnval = { - 'Type': "Content", - 'Content': fdata, - 'Contentsize': downloadsize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to copy file.") + returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} return returnval -if (havehttplib2): - def download_from_url_with_httplib2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(havehttplib2): + def download_from_url_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 urlparts = urlparse.urlparse(httpurl) - if (isinstance(httpheaders, list)): + if(isinstance(httpheaders, list)): httpheaders = make_http_headers_from_list_to_dict(httpheaders) httpheaders = fix_header_names(httpheaders) - if (httpuseragent is not None): - if ('User-Agent' in httpheaders): + if(httpuseragent is not None): + if('User-Agent' in httpheaders): httpheaders['User-Agent'] = httpuseragent else: httpuseragent.update({'User-Agent': httpuseragent}) - if (httpreferer is not None): - if ('Referer' in httpheaders): + if(httpreferer is not None): + if('Referer' in httpheaders): httpheaders['Referer'] = httpreferer else: httpuseragent.update({'Referer': httpreferer}) - if (urlparts.username is not None or urlparts.password is not None): - if (sys.version[0] == "2"): + if(urlparts.username is not None or urlparts.password is not None): + if(sys.version[0] == "2"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password)) - if (sys.version[0] >= "3"): + str(urlparts.username+":"+urlparts.password)) + if(sys.version[0] >= "3"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password).encode()).decode("UTF-8") - httpheaders.update({'Authorization': "Basic " + inurlencode}) + str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8") + httpheaders.update({'Authorization': "Basic "+inurlencode}) geturls_opener = build_opener(HTTPCookieProcessor(httpcookie)) geturls_opener.addheaders = httpheaders time.sleep(sleep) - if (urlparts[0] == "http"): + if(urlparts[0] == "http"): httpconn = HTTPConnectionWithTimeout(urlparts[1], timeout=timeout) - elif (urlparts[0] == "https"): + elif(urlparts[0] == "https"): httpconn = HTTPSConnectionWithTimeout(urlparts[1], timeout=timeout) else: return False - if (postdata is not None and not isinstance(postdata, dict)): + if(postdata is not None and not isinstance(postdata, dict)): postdata = urlencode(postdata) try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): httpconn.request("GET", urlparts[2], headers=httpheaders) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): httpconn.request( "GET", urlparts[2], body=postdata, headers=httpheaders) else: httpconn.request("GET", urlparts[2], headers=httpheaders) except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except BlockingIOError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False geturls_text = httpconn.getresponse() httpcodeout = geturls_text.status httpcodereason = geturls_text.reason - if (geturls_text.version == "10"): + if(geturls_text.version == "10"): httpversionout = "1.0" else: httpversionout = "1.1" @@ -3130,35 +1744,35 @@ def download_from_url_with_httplib2( httpurlout = httpurl httpheaderout = geturls_text.getheaders() httpheadersentout = httpheaders - if (isinstance(httpheaderout, list)): + if(isinstance(httpheaderout, list)): httpheaderout = dict( make_http_headers_from_list_to_dict(httpheaderout)) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): try: prehttpheaderout = httpheaderout httpheaderkeys = httpheaderout.keys() imax = len(httpheaderkeys) ic = 0 httpheaderout = {} - while (ic < imax): + while(ic < imax): httpheaderout.update( {httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]}) ic += 1 except AttributeError: pass httpheaderout = fix_header_names(httpheaderout) - if (isinstance(httpheadersentout, list)): + if(isinstance(httpheadersentout, list)): httpheadersentout = dict( make_http_headers_from_list_to_dict(httpheadersentout)) httpheadersentout = fix_header_names(httpheadersentout) downloadsize = httpheaderout.get('Content-Length') - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.read(buffersize) @@ -3167,389 +1781,157 @@ def download_from_url_with_httplib2( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) strbuf.seek(0) returnval_content = strbuf.read() - if (httpheaderout.get("Content-Encoding") == "gzip"): + if(httpheaderout.get("Content-Encoding") == "gzip"): try: returnval_content = zlib.decompress( - returnval_content, 16 + zlib.MAX_WBITS) + returnval_content, 16+zlib.MAX_WBITS) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "deflate"): + elif(httpheaderout.get("Content-Encoding") == "deflate"): try: returnval_content = zlib.decompress(returnval_content) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "br" and havebrotli): + elif(httpheaderout.get("Content-Encoding") == "br" and havebrotli): try: returnval_content = brotli.decompress(returnval_content) except brotli.error: pass - elif (httpheaderout.get("Content-Encoding") == "zstd" and havezstd): + elif(httpheaderout.get("Content-Encoding") == "zstd" and havezstd): try: returnval_content = zstandard.decompress(returnval_content) except zstandard.error: pass - elif ((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): + elif((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): try: returnval_content = lzma.decompress(returnval_content) except zstandard.error: pass - elif (httpheaderout.get("Content-Encoding") == "bzip2"): + elif(httpheaderout.get("Content-Encoding") == "bzip2"): try: returnval_content = bz2.decompress(returnval_content) except zstandard.error: pass - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': httpheaderout, - 'Version': httpversionout, - 'Method': httpmethodout, - 'HeadersSent': httpheadersentout, - 'URL': httpurlout, - 'Code': httpcodeout, - 'Reason': httpcodereason, - 'HTTPLib': "httplib2"} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize, 2, "IEC"), 'SI': get_readable_size( + fulldatasize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason, 'HTTPLib': "httplib2"} geturls_text.close() return returnval -if (not havehttplib2): - def download_from_url_with_httplib2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(not havehttplib2): + def download_from_url_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) return returnval -if (havehttplib2): - def download_from_url_file_with_httplib2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(havehttplib2): + def download_from_url_file_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, tmpfileprefix, tmpfilesuffix exec_time_start = time.time() myhash = hashlib.new("sha1") - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): myhash.update(httpurl) myhash.update(str(buffersize)) myhash.update(str(exec_time_start)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): myhash.update(httpurl.encode('utf-8')) myhash.update(str(buffersize).encode('utf-8')) myhash.update(str(exec_time_start).encode('utf-8')) newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest()) - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 pretmpfilename = download_from_url_with_httplib2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + if(not pretmpfilename): return False with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f: tmpfilename = f.name try: - os.utime( - tmpfilename, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - tmpfilename, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass - returnval = { - 'Type': "File", - 'Filename': tmpfilename, - 'Filesize': pretmpfilename.get('Contentsize'), - 'FilesizeAlt': { - 'IEC': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "IEC"), - 'SI': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "SI")}, - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': pretmpfilename.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename.get( + 'Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} f.write(pretmpfilename.get('Content')) f.close() exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to download file.") - returnval.update( - { - 'Filesize': os.path.getsize(tmpfilename), - 'FilesizeAlt': { - 'IEC': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "IEC"), - 'SI': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "SI")}, - 'DownloadTime': float( - exec_time_start - - exec_time_end), - 'DownloadTimeReadable': hms_string( - exec_time_start - - exec_time_end)}) + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to download file.") + returnval.update({'Filesize': os.path.getsize(tmpfilename), 'FilesizeAlt': {'IEC': get_readable_size(os.path.getsize(tmpfilename), 2, "IEC"), 'SI': get_readable_size( + os.path.getsize(tmpfilename), 2, "SI")}, 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)}) return returnval -if (not havehttplib2): - def download_from_url_file_with_httplib2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(not havehttplib2): + def download_from_url_file_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) return returnval -if (havehttplib2): - def download_from_url_to_file_with_httplib2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(havehttplib2): + def download_from_url_to_file_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (not outfile == "-"): + if(not outfile == "-"): outpath = outpath.rstrip(os.path.sep) - filepath = os.path.realpath(outpath + os.path.sep + outfile) - if (not os.path.exists(outpath)): + filepath = os.path.realpath(outpath+os.path.sep+outfile) + if(not os.path.exists(outpath)): os.makedirs(outpath) - if (os.path.exists(outpath) and os.path.isfile(outpath)): + if(os.path.exists(outpath) and os.path.isfile(outpath)): return False - if (os.path.exists(filepath) and os.path.isdir(filepath)): + if(os.path.exists(filepath) and os.path.isdir(filepath)): return False pretmpfilename = download_from_url_file_with_httplib2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) + if(not pretmpfilename): return False tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 - log.info("Moving file " + tmpfilename + " to " + filepath) + log.info("Moving file "+tmpfilename+" to "+filepath) exec_time_start = time.time() shutil.move(tmpfilename, filepath) try: - os.utime( - filepath, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - filepath, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to move file.") - if (os.path.exists(tmpfilename)): + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to move file.") + if(os.path.exists(tmpfilename)): os.remove(tmpfilename) - returnval = { - 'Type': "File", - 'Filename': filepath, - 'Filesize': downloadsize, - 'FilesizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} - if (outfile == "-"): + returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} + if(outfile == "-"): pretmpfilename = download_from_url_file_with_httplib2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 @@ -3564,31 +1946,12 @@ def download_from_url_to_file_with_httplib2( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Copying " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Copied " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize f.write(databytes) f.seek(0) @@ -3597,230 +1960,92 @@ def download_from_url_to_file_with_httplib2( ft.close() os.remove(tmpfilename) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to copy file.") - returnval = { - 'Type': "Content", - 'Content': fdata, - 'Contentsize': downloadsize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to copy file.") + returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} return returnval -if (not havehttplib2): - def download_from_url_to_file_with_httplib2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(not havehttplib2): + def download_from_url_to_file_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): returnval = download_from_url_to_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - outfile, - outpath, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep, timeout) return returnval -def download_from_url_with_request( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +def download_from_url_with_request(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) return returnval -def download_from_url_file_with_request( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +def download_from_url_file_with_request(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) return returnval -def download_from_url_to_file_with_request( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +def download_from_url_to_file_with_request(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): returnval = download_from_url_to_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - outfile, - outpath, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep, timeout) return returnval -if (haverequests): - def download_from_url_with_requests( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(haverequests): + def download_from_url_with_requests(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 urlparts = urlparse.urlparse(httpurl) - if (isinstance(httpheaders, list)): + if(isinstance(httpheaders, list)): httpheaders = make_http_headers_from_list_to_dict(httpheaders) httpheaders = fix_header_names(httpheaders) - if (httpuseragent is not None): - if ('User-Agent' in httpheaders): + if(httpuseragent is not None): + if('User-Agent' in httpheaders): httpheaders['User-Agent'] = httpuseragent else: httpuseragent.update({'User-Agent': httpuseragent}) - if (httpreferer is not None): - if ('Referer' in httpheaders): + if(httpreferer is not None): + if('Referer' in httpheaders): httpheaders['Referer'] = httpreferer else: httpuseragent.update({'Referer': httpreferer}) - if (urlparts.username is not None or urlparts.password is not None): - if (sys.version[0] == "2"): + if(urlparts.username is not None or urlparts.password is not None): + if(sys.version[0] == "2"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password)) - if (sys.version[0] >= "3"): + str(urlparts.username+":"+urlparts.password)) + if(sys.version[0] >= "3"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password).encode()).decode("UTF-8") - httpheaders.update({'Authorization': "Basic " + inurlencode}) + str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8") + httpheaders.update({'Authorization': "Basic "+inurlencode}) time.sleep(sleep) - if (postdata is not None and not isinstance(postdata, dict)): + if(postdata is not None and not isinstance(postdata, dict)): postdata = urlencode(postdata) try: reqsession = requests.Session() - if (httpmethod == "GET"): + if(httpmethod == "GET"): geturls_text = reqsession.get( httpurl, headers=httpheaders, cookies=httpcookie, stream=True) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): geturls_text = reqsession.post( - httpurl, - data=postdata, - headers=httpheaders, - cookies=httpcookie, - stream=True) + httpurl, data=postdata, headers=httpheaders, cookies=httpcookie, stream=True) else: geturls_text = reqsession.get( httpurl, headers=httpheaders, cookies=httpcookie, stream=True) except requests.exceptions.ConnectTimeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except requests.exceptions.ConnectError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.status_code httpcodereason = geturls_text.reason - if (geturls_text.raw.version == "10"): + if(geturls_text.raw.version == "10"): httpversionout = "1.0" else: httpversionout = "1.1" @@ -3828,35 +2053,35 @@ def download_from_url_with_requests( httpurlout = geturls_text.url httpheaderout = geturls_text.headers httpheadersentout = geturls_text.request.headers - if (isinstance(httpheaderout, list)): + if(isinstance(httpheaderout, list)): httpheaderout = dict( make_http_headers_from_list_to_dict(httpheaderout)) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): try: prehttpheaderout = httpheaderout httpheaderkeys = httpheaderout.keys() imax = len(httpheaderkeys) ic = 0 httpheaderout = {} - while (ic < imax): + while(ic < imax): httpheaderout.update( {httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]}) ic += 1 except AttributeError: pass httpheaderout = fix_header_names(httpheaderout) - if (isinstance(httpheadersentout, list)): + if(isinstance(httpheadersentout, list)): httpheadersentout = dict( make_http_headers_from_list_to_dict(httpheadersentout)) httpheadersentout = fix_header_names(httpheadersentout) downloadsize = httpheaderout.get('Content-Length') - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.raw.read(buffersize) @@ -3865,389 +2090,157 @@ def download_from_url_with_requests( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) strbuf.seek(0) returnval_content = strbuf.read() - if (httpheaderout.get("Content-Encoding") == "gzip"): + if(httpheaderout.get("Content-Encoding") == "gzip"): try: returnval_content = zlib.decompress( - returnval_content, 16 + zlib.MAX_WBITS) + returnval_content, 16+zlib.MAX_WBITS) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "deflate"): + elif(httpheaderout.get("Content-Encoding") == "deflate"): try: returnval_content = zlib.decompress(returnval_content) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "br" and havebrotli): + elif(httpheaderout.get("Content-Encoding") == "br" and havebrotli): try: returnval_content = brotli.decompress(returnval_content) except brotli.error: pass - elif (httpheaderout.get("Content-Encoding") == "zstd" and havezstd): + elif(httpheaderout.get("Content-Encoding") == "zstd" and havezstd): try: returnval_content = zstandard.decompress(returnval_content) except zstandard.error: pass - elif ((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): + elif((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): try: returnval_content = lzma.decompress(returnval_content) except zstandard.error: pass - elif (httpheaderout.get("Content-Encoding") == "bzip2"): + elif(httpheaderout.get("Content-Encoding") == "bzip2"): try: returnval_content = bz2.decompress(returnval_content) except zstandard.error: pass - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': httpheaderout, - 'Version': httpversionout, - 'Method': httpmethodout, - 'HeadersSent': httpheadersentout, - 'URL': httpurlout, - 'Code': httpcodeout, - 'Reason': httpcodereason, - 'HTTPLib': "requests"} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize, 2, "IEC"), 'SI': get_readable_size( + fulldatasize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason, 'HTTPLib': "requests"} geturls_text.close() return returnval -if (not haverequests): - def download_from_url_with_requests( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(not haverequests): + def download_from_url_with_requests(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) return returnval -if (haverequests): - def download_from_url_file_with_requests( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(haverequests): + def download_from_url_file_with_requests(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, tmpfileprefix, tmpfilesuffix exec_time_start = time.time() myhash = hashlib.new("sha1") - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): myhash.update(httpurl) myhash.update(str(buffersize)) myhash.update(str(exec_time_start)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): myhash.update(httpurl.encode('utf-8')) myhash.update(str(buffersize).encode('utf-8')) myhash.update(str(exec_time_start).encode('utf-8')) newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest()) - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 pretmpfilename = download_from_url_with_requests( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + if(not pretmpfilename): return False with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f: tmpfilename = f.name try: - os.utime( - tmpfilename, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - tmpfilename, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass - returnval = { - 'Type': "File", - 'Filename': tmpfilename, - 'Filesize': pretmpfilename.get('Contentsize'), - 'FilesizeAlt': { - 'IEC': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "IEC"), - 'SI': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "SI")}, - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': pretmpfilename.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename.get( + 'Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} f.write(pretmpfilename.get('Content')) f.close() exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to download file.") - returnval.update( - { - 'Filesize': os.path.getsize(tmpfilename), - 'FilesizeAlt': { - 'IEC': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "IEC"), - 'SI': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "SI")}, - 'DownloadTime': float( - exec_time_start - - exec_time_end), - 'DownloadTimeReadable': hms_string( - exec_time_start - - exec_time_end)}) + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to download file.") + returnval.update({'Filesize': os.path.getsize(tmpfilename), 'FilesizeAlt': {'IEC': get_readable_size(os.path.getsize(tmpfilename), 2, "IEC"), 'SI': get_readable_size( + os.path.getsize(tmpfilename), 2, "SI")}, 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)}) return returnval -if (not haverequests): - def download_from_url_file_with_requests( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(not haverequests): + def download_from_url_file_with_requests(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) return returnval -if (haverequests): - def download_from_url_to_file_with_requests( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(haverequests): + def download_from_url_to_file_with_requests(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (not outfile == "-"): + if(not outfile == "-"): outpath = outpath.rstrip(os.path.sep) - filepath = os.path.realpath(outpath + os.path.sep + outfile) - if (not os.path.exists(outpath)): + filepath = os.path.realpath(outpath+os.path.sep+outfile) + if(not os.path.exists(outpath)): os.makedirs(outpath) - if (os.path.exists(outpath) and os.path.isfile(outpath)): + if(os.path.exists(outpath) and os.path.isfile(outpath)): return False - if (os.path.exists(filepath) and os.path.isdir(filepath)): + if(os.path.exists(filepath) and os.path.isdir(filepath)): return False pretmpfilename = download_from_url_file_with_requests( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) + if(not pretmpfilename): return False tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 - log.info("Moving file " + tmpfilename + " to " + filepath) + log.info("Moving file "+tmpfilename+" to "+filepath) exec_time_start = time.time() shutil.move(tmpfilename, filepath) try: - os.utime( - filepath, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - filepath, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to move file.") - if (os.path.exists(tmpfilename)): + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to move file.") + if(os.path.exists(tmpfilename)): os.remove(tmpfilename) - returnval = { - 'Type': "File", - 'Filename': filepath, - 'Filesize': downloadsize, - 'FilesizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} - if (outfile == "-"): + returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} + if(outfile == "-"): pretmpfilename = download_from_url_file_with_requests( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 @@ -4262,31 +2255,12 @@ def download_from_url_to_file_with_requests( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Copying " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Copied " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize f.write(databytes) f.seek(0) @@ -4295,136 +2269,67 @@ def download_from_url_to_file_with_requests( ft.close() os.remove(tmpfilename) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to copy file.") - returnval = { - 'Type': "Content", - 'Content': fdata, - 'Contentsize': downloadsize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to copy file.") + returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} return returnval -if (not haverequests): - def download_from_url_to_file_with_requests( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(not haverequests): + def download_from_url_to_file_with_requests(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): returnval = download_from_url_to_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - outfile, - outpath, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep, timeout) return returnval -if (haveaiohttp): - def download_from_url_with_aiohttp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(haveaiohttp): + def download_from_url_with_aiohttp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 urlparts = urlparse.urlparse(httpurl) - if (isinstance(httpheaders, list)): + if(isinstance(httpheaders, list)): httpheaders = make_http_headers_from_list_to_dict(httpheaders) httpheaders = fix_header_names(httpheaders) - if (httpuseragent is not None): - if ('User-Agent' in httpheaders): + if(httpuseragent is not None): + if('User-Agent' in httpheaders): httpheaders['User-Agent'] = httpuseragent else: httpuseragent.update({'User-Agent': httpuseragent}) - if (httpreferer is not None): - if ('Referer' in httpheaders): + if(httpreferer is not None): + if('Referer' in httpheaders): httpheaders['Referer'] = httpreferer else: httpuseragent.update({'Referer': httpreferer}) - if (urlparts.username is not None or urlparts.password is not None): - if (sys.version[0] == "2"): + if(urlparts.username is not None or urlparts.password is not None): + if(sys.version[0] == "2"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password)) - if (sys.version[0] >= "3"): + str(urlparts.username+":"+urlparts.password)) + if(sys.version[0] >= "3"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password).encode()).decode("UTF-8") - httpheaders.update({'Authorization': "Basic " + inurlencode}) + str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8") + httpheaders.update({'Authorization': "Basic "+inurlencode}) time.sleep(sleep) - if (postdata is not None and not isinstance(postdata, dict)): + if(postdata is not None and not isinstance(postdata, dict)): postdata = urlencode(postdata) try: - reqsession = aiohttp.ClientSession( - cookie_jar=httpcookie, - headers=httpheaders, - timeout=timeout, - read_timeout=timeout, - conn_timeout=timeout, - read_bufsize=buffersize) - if (httpmethod == "GET"): + reqsession = aiohttp.ClientSession(cookie_jar=httpcookie, headers=httpheaders, + timeout=timeout, read_timeout=timeout, conn_timeout=timeout, read_bufsize=buffersize) + if(httpmethod == "GET"): geturls_text = reqsession.get(httpurl) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): geturls_text = reqsession.post(httpurl, data=postdata) else: geturls_text = reqsession.get(httpurl) except aiohttp.exceptions.ConnectTimeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except aiohttp.exceptions.ConnectError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.status httpcodereason = geturls_text.reason @@ -4433,35 +2338,35 @@ def download_from_url_with_aiohttp( httpurlout = geturls_text.url httpheaderout = geturls_text.headers httpheadersentout = geturls_text.request_info.headers - if (isinstance(httpheaderout, list)): + if(isinstance(httpheaderout, list)): httpheaderout = dict( make_http_headers_from_list_to_dict(httpheaderout)) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): try: prehttpheaderout = httpheaderout httpheaderkeys = httpheaderout.keys() imax = len(httpheaderkeys) ic = 0 httpheaderout = {} - while (ic < imax): + while(ic < imax): httpheaderout.update( {httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]}) ic += 1 except AttributeError: pass httpheaderout = fix_header_names(httpheaderout) - if (isinstance(httpheadersentout, list)): + if(isinstance(httpheadersentout, list)): httpheadersentout = dict( make_http_headers_from_list_to_dict(httpheadersentout)) httpheadersentout = fix_header_names(httpheadersentout) downloadsize = httpheaderout.get('Content-Length') - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.read(buffersize) @@ -4470,389 +2375,157 @@ def download_from_url_with_aiohttp( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) strbuf.seek(0) returnval_content = strbuf.read() - if (httpheaderout.get("Content-Encoding") == "gzip"): + if(httpheaderout.get("Content-Encoding") == "gzip"): try: returnval_content = zlib.decompress( - returnval_content, 16 + zlib.MAX_WBITS) + returnval_content, 16+zlib.MAX_WBITS) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "deflate"): + elif(httpheaderout.get("Content-Encoding") == "deflate"): try: returnval_content = zlib.decompress(returnval_content) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "br" and havebrotli): + elif(httpheaderout.get("Content-Encoding") == "br" and havebrotli): try: returnval_content = brotli.decompress(returnval_content) except brotli.error: pass - elif (httpheaderout.get("Content-Encoding") == "zstd" and havezstd): + elif(httpheaderout.get("Content-Encoding") == "zstd" and havezstd): try: returnval_content = zstandard.decompress(returnval_content) except zstandard.error: pass - elif ((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): + elif((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): try: returnval_content = lzma.decompress(returnval_content) except zstandard.error: pass - elif (httpheaderout.get("Content-Encoding") == "bzip2"): + elif(httpheaderout.get("Content-Encoding") == "bzip2"): try: returnval_content = bz2.decompress(returnval_content) except zstandard.error: pass - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': httpheaderout, - 'Version': httpversionout, - 'Method': httpmethodout, - 'HeadersSent': httpheadersentout, - 'URL': httpurlout, - 'Code': httpcodeout, - 'Reason': httpcodereason, - 'HTTPLib': "aiohttp"} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize, 2, "IEC"), 'SI': get_readable_size( + fulldatasize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason, 'HTTPLib': "aiohttp"} geturls_text.close() return returnval -if (not haveaiohttp): - def download_from_url_with_aiohttp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(not haveaiohttp): + def download_from_url_with_aiohttp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) return returnval -if (haveaiohttp): - def download_from_url_file_with_aiohttp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(haveaiohttp): + def download_from_url_file_with_aiohttp(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, tmpfileprefix, tmpfilesuffix exec_time_start = time.time() myhash = hashlib.new("sha1") - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): myhash.update(httpurl) myhash.update(str(buffersize)) myhash.update(str(exec_time_start)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): myhash.update(httpurl.encode('utf-8')) myhash.update(str(buffersize).encode('utf-8')) myhash.update(str(exec_time_start).encode('utf-8')) newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest()) - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 pretmpfilename = download_from_url_with_aiohttp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + if(not pretmpfilename): return False with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f: tmpfilename = f.name try: - os.utime( - tmpfilename, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - tmpfilename, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass - returnval = { - 'Type': "File", - 'Filename': tmpfilename, - 'Filesize': pretmpfilename.get('Contentsize'), - 'FilesizeAlt': { - 'IEC': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "IEC"), - 'SI': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "SI")}, - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': pretmpfilename.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename.get( + 'Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} f.write(pretmpfilename.get('Content')) f.close() exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to download file.") - returnval.update( - { - 'Filesize': os.path.getsize(tmpfilename), - 'FilesizeAlt': { - 'IEC': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "IEC"), - 'SI': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "SI")}, - 'DownloadTime': float( - exec_time_start - - exec_time_end), - 'DownloadTimeReadable': hms_string( - exec_time_start - - exec_time_end)}) + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to download file.") + returnval.update({'Filesize': os.path.getsize(tmpfilename), 'FilesizeAlt': {'IEC': get_readable_size(os.path.getsize(tmpfilename), 2, "IEC"), 'SI': get_readable_size( + os.path.getsize(tmpfilename), 2, "SI")}, 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)}) return returnval -if (not haveaiohttp): - def download_from_url_file_with_aiohttp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(not haveaiohttp): + def download_from_url_file_with_aiohttp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) return returnval -if (haveaiohttp): - def download_from_url_to_file_with_aiohttp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(haveaiohttp): + def download_from_url_to_file_with_aiohttp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (not outfile == "-"): + if(not outfile == "-"): outpath = outpath.rstrip(os.path.sep) - filepath = os.path.realpath(outpath + os.path.sep + outfile) - if (not os.path.exists(outpath)): + filepath = os.path.realpath(outpath+os.path.sep+outfile) + if(not os.path.exists(outpath)): os.makedirs(outpath) - if (os.path.exists(outpath) and os.path.isfile(outpath)): + if(os.path.exists(outpath) and os.path.isfile(outpath)): return False - if (os.path.exists(filepath) and os.path.isdir(filepath)): + if(os.path.exists(filepath) and os.path.isdir(filepath)): return False pretmpfilename = download_from_url_file_with_aiohttp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) + if(not pretmpfilename): return False tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 - log.info("Moving file " + tmpfilename + " to " + filepath) + log.info("Moving file "+tmpfilename+" to "+filepath) exec_time_start = time.time() shutil.move(tmpfilename, filepath) try: - os.utime( - filepath, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - filepath, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to move file.") - if (os.path.exists(tmpfilename)): + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to move file.") + if(os.path.exists(tmpfilename)): os.remove(tmpfilename) - returnval = { - 'Type': "File", - 'Filename': filepath, - 'Filesize': downloadsize, - 'FilesizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} - if (outfile == "-"): + returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} + if(outfile == "-"): pretmpfilename = download_from_url_file_with_aiohttp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 @@ -4867,31 +2540,12 @@ def download_from_url_to_file_with_aiohttp( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Copying " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Copied " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize f.write(databytes) f.seek(0) @@ -4900,182 +2554,114 @@ def download_from_url_to_file_with_aiohttp( ft.close() os.remove(tmpfilename) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to copy file.") - returnval = { - 'Type': "Content", - 'Content': fdata, - 'Contentsize': downloadsize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to copy file.") + returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} return returnval -if (not haveaiohttp): - def download_from_url_to_file_with_aiohttp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(not haveaiohttp): + def download_from_url_to_file_with_aiohttp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): returnval = download_from_url_to_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - outfile, - outpath, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep, timeout) return returnval -if (havehttpx): - def download_from_url_with_httpx( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(havehttpx): + def download_from_url_with_httpx(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 urlparts = urlparse.urlparse(httpurl) - if (isinstance(httpheaders, list)): + if(isinstance(httpheaders, list)): httpheaders = make_http_headers_from_list_to_dict(httpheaders) httpheaders = fix_header_names(httpheaders) - if (httpuseragent is not None): - if ('User-Agent' in httpheaders): + if(httpuseragent is not None): + if('User-Agent' in httpheaders): httpheaders['User-Agent'] = httpuseragent else: httpuseragent.update({'User-Agent': httpuseragent}) - if (httpreferer is not None): - if ('Referer' in httpheaders): + if(httpreferer is not None): + if('Referer' in httpheaders): httpheaders['Referer'] = httpreferer else: httpuseragent.update({'Referer': httpreferer}) - if (urlparts.username is not None or urlparts.password is not None): - if (sys.version[0] == "2"): + if(urlparts.username is not None or urlparts.password is not None): + if(sys.version[0] == "2"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password)) - if (sys.version[0] >= "3"): + str(urlparts.username+":"+urlparts.password)) + if(sys.version[0] >= "3"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password).encode()).decode("UTF-8") - httpheaders.update({'Authorization': "Basic " + inurlencode}) + str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8") + httpheaders.update({'Authorization': "Basic "+inurlencode}) time.sleep(sleep) - if (postdata is not None and not isinstance(postdata, dict)): + if(postdata is not None and not isinstance(postdata, dict)): postdata = urlencode(postdata) try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): httpx_pool = httpx.Client( http1=True, http2=False, trust_env=True) geturls_text = httpx_pool.get( httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): httpx_pool = httpx.Client( http1=True, http2=False, trust_env=True) geturls_text = httpx_pool.post( - httpurl, - timeout=timeout, - data=postdata, - headers=httpheaders, - cookies=httpcookie) + httpurl, timeout=timeout, data=postdata, headers=httpheaders, cookies=httpcookie) else: httpx_pool = httpx.Client( http1=True, http2=False, trust_env=True) geturls_text = httpx_pool.get( httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie) except httpx.ConnectTimeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except httpx.ConnectError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.status_code try: httpcodereason = geturls_text.reason_phrase - except BaseException: + except: httpcodereason = http_status_to_reason(geturls_text.status_code) httpversionout = geturls_text.http_version httpmethodout = httpmethod httpurlout = str(geturls_text.url) httpheaderout = geturls_text.headers httpheadersentout = geturls_text.request.headers - if (isinstance(httpheaderout, list)): + if(isinstance(httpheaderout, list)): httpheaderout = dict( make_http_headers_from_list_to_dict(httpheaderout)) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): try: prehttpheaderout = httpheaderout httpheaderkeys = httpheaderout.keys() imax = len(httpheaderkeys) ic = 0 httpheaderout = {} - while (ic < imax): + while(ic < imax): httpheaderout.update( {httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]}) ic += 1 except AttributeError: pass httpheaderout = fix_header_names(httpheaderout) - if (isinstance(httpheadersentout, list)): + if(isinstance(httpheadersentout, list)): httpheadersentout = dict( make_http_headers_from_list_to_dict(httpheadersentout)) httpheadersentout = fix_header_names(httpheadersentout) downloadsize = httpheaderout.get('Content-Length') - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.read() @@ -5084,391 +2670,159 @@ def download_from_url_with_httpx( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) break strbuf.seek(0) returnval_content = strbuf.read() geturls_text.close() - if (httpheaderout.get("Content-Encoding") == "gzip"): + if(httpheaderout.get("Content-Encoding") == "gzip"): try: returnval_content = zlib.decompress( - returnval_content, 16 + zlib.MAX_WBITS) + returnval_content, 16+zlib.MAX_WBITS) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "deflate"): + elif(httpheaderout.get("Content-Encoding") == "deflate"): try: returnval_content = zlib.decompress(returnval_content) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "br" and havebrotli): + elif(httpheaderout.get("Content-Encoding") == "br" and havebrotli): try: returnval_content = brotli.decompress(returnval_content) except brotli.error: pass - elif (httpheaderout.get("Content-Encoding") == "zstd" and havezstd): + elif(httpheaderout.get("Content-Encoding") == "zstd" and havezstd): try: returnval_content = zstandard.decompress(returnval_content) except zstandard.error: pass - elif ((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): + elif((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): try: returnval_content = lzma.decompress(returnval_content) except zstandard.error: pass - elif (httpheaderout.get("Content-Encoding") == "bzip2"): + elif(httpheaderout.get("Content-Encoding") == "bzip2"): try: returnval_content = bz2.decompress(returnval_content) except zstandard.error: pass - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': httpheaderout, - 'Version': httpversionout, - 'Method': httpmethodout, - 'HeadersSent': httpheadersentout, - 'URL': httpurlout, - 'Code': httpcodeout, - 'Reason': httpcodereason, - 'HTTPLib': "httpx"} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize, 2, "IEC"), 'SI': get_readable_size( + fulldatasize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason, 'HTTPLib': "httpx"} geturls_text.close() return returnval -if (not havehttpx): - def download_from_url_with_httpx( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(not havehttpx): + def download_from_url_with_httpx(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) return returnval -if (havehttpx): - def download_from_url_file_with_httpx( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(havehttpx): + def download_from_url_file_with_httpx(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, tmpfileprefix, tmpfilesuffix exec_time_start = time.time() myhash = hashlib.new("sha1") - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): myhash.update(httpurl) myhash.update(str(buffersize)) myhash.update(str(exec_time_start)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): myhash.update(httpurl.encode('utf-8')) myhash.update(str(buffersize).encode('utf-8')) myhash.update(str(exec_time_start).encode('utf-8')) newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest()) - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 pretmpfilename = download_from_url_with_httpx( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + if(not pretmpfilename): return False with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f: tmpfilename = f.name try: - os.utime( - tmpfilename, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - tmpfilename, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass - returnval = { - 'Type': "File", - 'Filename': tmpfilename, - 'Filesize': pretmpfilename.get('Contentsize'), - 'FilesizeAlt': { - 'IEC': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "IEC"), - 'SI': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "SI")}, - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': pretmpfilename.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename.get( + 'Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} f.write(pretmpfilename.get('Content')) f.close() exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to download file.") - returnval.update( - { - 'Filesize': os.path.getsize(tmpfilename), - 'FilesizeAlt': { - 'IEC': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "IEC"), - 'SI': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "SI")}, - 'DownloadTime': float( - exec_time_start - - exec_time_end), - 'DownloadTimeReadable': hms_string( - exec_time_start - - exec_time_end)}) + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to download file.") + returnval.update({'Filesize': os.path.getsize(tmpfilename), 'FilesizeAlt': {'IEC': get_readable_size(os.path.getsize(tmpfilename), 2, "IEC"), 'SI': get_readable_size( + os.path.getsize(tmpfilename), 2, "SI")}, 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)}) return returnval -if (not havehttpx): - def download_from_url_file_with_httpx( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(not havehttpx): + def download_from_url_file_with_httpx(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) return returnval -if (havehttpx): - def download_from_url_to_file_with_httpx( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(havehttpx): + def download_from_url_to_file_with_httpx(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (not outfile == "-"): + if(not outfile == "-"): outpath = outpath.rstrip(os.path.sep) - filepath = os.path.realpath(outpath + os.path.sep + outfile) - if (not os.path.exists(outpath)): + filepath = os.path.realpath(outpath+os.path.sep+outfile) + if(not os.path.exists(outpath)): os.makedirs(outpath) - if (os.path.exists(outpath) and os.path.isfile(outpath)): + if(os.path.exists(outpath) and os.path.isfile(outpath)): return False - if (os.path.exists(filepath) and os.path.isdir(filepath)): + if(os.path.exists(filepath) and os.path.isdir(filepath)): return False pretmpfilename = download_from_url_file_with_httpx( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) + if(not pretmpfilename): return False tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 - log.info("Moving file " + tmpfilename + " to " + filepath) + log.info("Moving file "+tmpfilename+" to "+filepath) exec_time_start = time.time() shutil.move(tmpfilename, filepath) try: - os.utime( - filepath, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - filepath, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to move file.") - if (os.path.exists(tmpfilename)): + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to move file.") + if(os.path.exists(tmpfilename)): os.remove(tmpfilename) - returnval = { - 'Type': "File", - 'Filename': filepath, - 'Filesize': downloadsize, - 'FilesizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} - if (outfile == "-"): + returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} + if(outfile == "-"): pretmpfilename = download_from_url_file_with_httpx( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 @@ -5483,31 +2837,12 @@ def download_from_url_to_file_with_httpx( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Copying " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Copied " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize f.write(databytes) f.seek(0) @@ -5516,182 +2851,114 @@ def download_from_url_to_file_with_httpx( ft.close() os.remove(tmpfilename) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to copy file.") - returnval = { - 'Type': "Content", - 'Content': fdata, - 'Contentsize': downloadsize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to copy file.") + returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} return returnval -if (not havehttpx): - def download_from_url_to_file_with_httpx( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(not havehttpx): + def download_from_url_to_file_with_httpx(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): returnval = download_from_url_to_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - outfile, - outpath, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep, timeout) return returnval -if (havehttpx): - def download_from_url_with_httpx2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(havehttpx): + def download_from_url_with_httpx2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 urlparts = urlparse.urlparse(httpurl) - if (isinstance(httpheaders, list)): + if(isinstance(httpheaders, list)): httpheaders = make_http_headers_from_list_to_dict(httpheaders) httpheaders = fix_header_names(httpheaders) - if (httpuseragent is not None): - if ('User-Agent' in httpheaders): + if(httpuseragent is not None): + if('User-Agent' in httpheaders): httpheaders['User-Agent'] = httpuseragent else: httpuseragent.update({'User-Agent': httpuseragent}) - if (httpreferer is not None): - if ('Referer' in httpheaders): + if(httpreferer is not None): + if('Referer' in httpheaders): httpheaders['Referer'] = httpreferer else: httpuseragent.update({'Referer': httpreferer}) - if (urlparts.username is not None or urlparts.password is not None): - if (sys.version[0] == "2"): + if(urlparts.username is not None or urlparts.password is not None): + if(sys.version[0] == "2"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password)) - if (sys.version[0] >= "3"): + str(urlparts.username+":"+urlparts.password)) + if(sys.version[0] >= "3"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password).encode()).decode("UTF-8") - httpheaders.update({'Authorization': "Basic " + inurlencode}) + str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8") + httpheaders.update({'Authorization': "Basic "+inurlencode}) time.sleep(sleep) - if (postdata is not None and not isinstance(postdata, dict)): + if(postdata is not None and not isinstance(postdata, dict)): postdata = urlencode(postdata) try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): httpx_pool = httpx.Client( http1=True, http2=True, trust_env=True) geturls_text = httpx_pool.get( httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): httpx_pool = httpx.Client( http1=True, http2=True, trust_env=True) geturls_text = httpx_pool.post( - httpurl, - timeout=timeout, - data=postdata, - headers=httpheaders, - cookies=httpcookie) + httpurl, timeout=timeout, data=postdata, headers=httpheaders, cookies=httpcookie) else: httpx_pool = httpx.Client( http1=True, http2=True, trust_env=True) geturls_text = httpx_pool.get( httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie) except httpx.ConnectTimeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except httpx.ConnectError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.status_code try: httpcodereason = geturls_text.reason_phrase - except BaseException: + except: httpcodereason = http_status_to_reason(geturls_text.status_code) httpversionout = geturls_text.http_version httpmethodout = httpmethod httpurlout = str(geturls_text.url) httpheaderout = geturls_text.headers httpheadersentout = geturls_text.request.headers - if (isinstance(httpheaderout, list)): + if(isinstance(httpheaderout, list)): httpheaderout = dict( make_http_headers_from_list_to_dict(httpheaderout)) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): try: prehttpheaderout = httpheaderout httpheaderkeys = httpheaderout.keys() imax = len(httpheaderkeys) ic = 0 httpheaderout = {} - while (ic < imax): + while(ic < imax): httpheaderout.update( {httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]}) ic += 1 except AttributeError: pass httpheaderout = fix_header_names(httpheaderout) - if (isinstance(httpheadersentout, list)): + if(isinstance(httpheadersentout, list)): httpheadersentout = dict( make_http_headers_from_list_to_dict(httpheadersentout)) httpheadersentout = fix_header_names(httpheadersentout) downloadsize = httpheaderout.get('Content-Length') - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.read() @@ -5700,391 +2967,159 @@ def download_from_url_with_httpx2( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) break strbuf.seek(0) returnval_content = strbuf.read() geturls_text.close() - if (httpheaderout.get("Content-Encoding") == "gzip"): + if(httpheaderout.get("Content-Encoding") == "gzip"): try: returnval_content = zlib.decompress( - returnval_content, 16 + zlib.MAX_WBITS) + returnval_content, 16+zlib.MAX_WBITS) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "deflate"): + elif(httpheaderout.get("Content-Encoding") == "deflate"): try: returnval_content = zlib.decompress(returnval_content) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "br" and havebrotli): + elif(httpheaderout.get("Content-Encoding") == "br" and havebrotli): try: returnval_content = brotli.decompress(returnval_content) except brotli.error: pass - elif (httpheaderout.get("Content-Encoding") == "zstd" and havezstd): + elif(httpheaderout.get("Content-Encoding") == "zstd" and havezstd): try: returnval_content = zstandard.decompress(returnval_content) except zstandard.error: pass - elif ((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): + elif((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): try: returnval_content = lzma.decompress(returnval_content) except zstandard.error: pass - elif (httpheaderout.get("Content-Encoding") == "bzip2"): + elif(httpheaderout.get("Content-Encoding") == "bzip2"): try: returnval_content = bz2.decompress(returnval_content) except zstandard.error: pass - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': httpheaderout, - 'Version': httpversionout, - 'Method': httpmethodout, - 'HeadersSent': httpheadersentout, - 'URL': httpurlout, - 'Code': httpcodeout, - 'Reason': httpcodereason, - 'HTTPLib': "httpx2"} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize, 2, "IEC"), 'SI': get_readable_size( + fulldatasize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason, 'HTTPLib': "httpx2"} geturls_text.close() return returnval -if (not havehttpx): - def download_from_url_with_httpx2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(not havehttpx): + def download_from_url_with_httpx2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) return returnval -if (havehttpx): - def download_from_url_file_with_httpx2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(havehttpx): + def download_from_url_file_with_httpx2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, tmpfileprefix, tmpfilesuffix exec_time_start = time.time() myhash = hashlib.new("sha1") - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): myhash.update(httpurl) myhash.update(str(buffersize)) myhash.update(str(exec_time_start)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): myhash.update(httpurl.encode('utf-8')) myhash.update(str(buffersize).encode('utf-8')) myhash.update(str(exec_time_start).encode('utf-8')) newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest()) - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 pretmpfilename = download_from_url_with_httpx2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + if(not pretmpfilename): return False with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f: tmpfilename = f.name try: - os.utime( - tmpfilename, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - tmpfilename, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass - returnval = { - 'Type': "File", - 'Filename': tmpfilename, - 'Filesize': pretmpfilename.get('Contentsize'), - 'FilesizeAlt': { - 'IEC': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "IEC"), - 'SI': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "SI")}, - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': pretmpfilename.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename.get( + 'Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} f.write(pretmpfilename.get('Content')) f.close() exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to download file.") - returnval.update( - { - 'Filesize': os.path.getsize(tmpfilename), - 'FilesizeAlt': { - 'IEC': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "IEC"), - 'SI': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "SI")}, - 'DownloadTime': float( - exec_time_start - - exec_time_end), - 'DownloadTimeReadable': hms_string( - exec_time_start - - exec_time_end)}) + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to download file.") + returnval.update({'Filesize': os.path.getsize(tmpfilename), 'FilesizeAlt': {'IEC': get_readable_size(os.path.getsize(tmpfilename), 2, "IEC"), 'SI': get_readable_size( + os.path.getsize(tmpfilename), 2, "SI")}, 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)}) return returnval -if (not havehttpx): - def download_from_url_file_with_httpx2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(not havehttpx): + def download_from_url_file_with_httpx2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) return returnval -if (havehttpx): - def download_from_url_to_file_with_httpx2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(havehttpx): + def download_from_url_to_file_with_httpx2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (not outfile == "-"): + if(not outfile == "-"): outpath = outpath.rstrip(os.path.sep) - filepath = os.path.realpath(outpath + os.path.sep + outfile) - if (not os.path.exists(outpath)): + filepath = os.path.realpath(outpath+os.path.sep+outfile) + if(not os.path.exists(outpath)): os.makedirs(outpath) - if (os.path.exists(outpath) and os.path.isfile(outpath)): + if(os.path.exists(outpath) and os.path.isfile(outpath)): return False - if (os.path.exists(filepath) and os.path.isdir(filepath)): + if(os.path.exists(filepath) and os.path.isdir(filepath)): return False pretmpfilename = download_from_url_file_with_httpx2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) + if(not pretmpfilename): return False tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 - log.info("Moving file " + tmpfilename + " to " + filepath) + log.info("Moving file "+tmpfilename+" to "+filepath) exec_time_start = time.time() shutil.move(tmpfilename, filepath) try: - os.utime( - filepath, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - filepath, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to move file.") - if (os.path.exists(tmpfilename)): + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to move file.") + if(os.path.exists(tmpfilename)): os.remove(tmpfilename) - returnval = { - 'Type': "File", - 'Filename': filepath, - 'Filesize': downloadsize, - 'FilesizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} - if (outfile == "-"): + returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} + if(outfile == "-"): pretmpfilename = download_from_url_file_with_httpx2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 @@ -6099,31 +3134,12 @@ def download_from_url_to_file_with_httpx2( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Copying " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Copied " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize f.write(databytes) f.seek(0) @@ -6132,120 +3148,56 @@ def download_from_url_to_file_with_httpx2( ft.close() os.remove(tmpfilename) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to copy file.") - returnval = { - 'Type': "Content", - 'Content': fdata, - 'Contentsize': downloadsize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to copy file.") + returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} return returnval -if (not havehttpx): - def download_from_url_to_file_with_httpx2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(not havehttpx): + def download_from_url_to_file_with_httpx2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): returnval = download_from_url_to_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - outfile, - outpath, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep, timeout) return returnval -if (havehttpcore): - def download_from_url_with_httpcore( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(havehttpcore): + def download_from_url_with_httpcore(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 urlparts = urlparse.urlparse(httpurl) - if (isinstance(httpheaders, list)): + if(isinstance(httpheaders, list)): httpheaders = make_http_headers_from_list_to_dict(httpheaders) httpheaders = fix_header_names(httpheaders) - if (httpuseragent is not None): - if ('User-Agent' in httpheaders): + if(httpuseragent is not None): + if('User-Agent' in httpheaders): httpheaders['User-Agent'] = httpuseragent else: httpuseragent.update({'User-Agent': httpuseragent}) - if (httpreferer is not None): - if ('Referer' in httpheaders): + if(httpreferer is not None): + if('Referer' in httpheaders): httpheaders['Referer'] = httpreferer else: httpuseragent.update({'Referer': httpreferer}) - if (urlparts.username is not None or urlparts.password is not None): - if (sys.version[0] == "2"): + if(urlparts.username is not None or urlparts.password is not None): + if(sys.version[0] == "2"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password)) - if (sys.version[0] >= "3"): + str(urlparts.username+":"+urlparts.password)) + if(sys.version[0] >= "3"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password).encode()).decode("UTF-8") - httpheaders.update({'Authorization': "Basic " + inurlencode}) + str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8") + httpheaders.update({'Authorization': "Basic "+inurlencode}) time.sleep(sleep) - if (postdata is not None and not isinstance(postdata, dict)): + if(postdata is not None and not isinstance(postdata, dict)): postdata = urlencode(postdata) try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): httpx_pool = httpcore.ConnectionPool(http1=True, http2=False) geturls_text = httpx_pool.request( "GET", httpurl, headers=httpheaders) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): httpx_pool = httpcore.ConnectionPool(http1=True, http2=False) geturls_text = httpx_pool.request( "GET", httpurl, data=postdata, headers=httpheaders) @@ -6254,13 +3206,13 @@ def download_from_url_with_httpcore( geturls_text = httpx_pool.request( "GET", httpurl, headers=httpheaders) except httpcore.ConnectTimeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except httpcore.ConnectError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.status httpcodereason = http_status_to_reason(geturls_text.status) @@ -6269,35 +3221,35 @@ def download_from_url_with_httpcore( httpurlout = str(httpurl) httpheaderout = geturls_text.headers httpheadersentout = httpheaders - if (isinstance(httpheaderout, list)): + if(isinstance(httpheaderout, list)): httpheaderout = dict( make_http_headers_from_list_to_dict(httpheaderout)) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): try: prehttpheaderout = httpheaderout httpheaderkeys = httpheaderout.keys() imax = len(httpheaderkeys) ic = 0 httpheaderout = {} - while (ic < imax): + while(ic < imax): httpheaderout.update( {httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]}) ic += 1 except AttributeError: pass httpheaderout = fix_header_names(httpheaderout) - if (isinstance(httpheadersentout, list)): + if(isinstance(httpheadersentout, list)): httpheadersentout = dict( make_http_headers_from_list_to_dict(httpheadersentout)) httpheadersentout = fix_header_names(httpheadersentout) downloadsize = httpheaderout.get('Content-Length') - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.read() @@ -6306,391 +3258,159 @@ def download_from_url_with_httpcore( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) break strbuf.seek(0) returnval_content = strbuf.read() geturls_text.close() - if (httpheaderout.get("Content-Encoding") == "gzip"): + if(httpheaderout.get("Content-Encoding") == "gzip"): try: returnval_content = zlib.decompress( - returnval_content, 16 + zlib.MAX_WBITS) + returnval_content, 16+zlib.MAX_WBITS) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "deflate"): + elif(httpheaderout.get("Content-Encoding") == "deflate"): try: returnval_content = zlib.decompress(returnval_content) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "br" and havebrotli): + elif(httpheaderout.get("Content-Encoding") == "br" and havebrotli): try: returnval_content = brotli.decompress(returnval_content) except brotli.error: pass - elif (httpheaderout.get("Content-Encoding") == "zstd" and havezstd): + elif(httpheaderout.get("Content-Encoding") == "zstd" and havezstd): try: returnval_content = zstandard.decompress(returnval_content) except zstandard.error: pass - elif ((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): + elif((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): try: returnval_content = lzma.decompress(returnval_content) except zstandard.error: pass - elif (httpheaderout.get("Content-Encoding") == "bzip2"): + elif(httpheaderout.get("Content-Encoding") == "bzip2"): try: returnval_content = bz2.decompress(returnval_content) except zstandard.error: pass - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': httpheaderout, - 'Version': httpversionout, - 'Method': httpmethodout, - 'HeadersSent': httpheadersentout, - 'URL': httpurlout, - 'Code': httpcodeout, - 'Reason': httpcodereason, - 'HTTPLib': "httpcore"} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize, 2, "IEC"), 'SI': get_readable_size( + fulldatasize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason, 'HTTPLib': "httpcore"} geturls_text.close() return returnval -if (not havehttpcore): - def download_from_url_with_httpcore( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(not havehttpcore): + def download_from_url_with_httpcore(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) return returnval -if (havehttpcore): - def download_from_url_file_with_httpcore( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(havehttpcore): + def download_from_url_file_with_httpcore(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, tmpfileprefix, tmpfilesuffix exec_time_start = time.time() myhash = hashlib.new("sha1") - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): myhash.update(httpurl) myhash.update(str(buffersize)) myhash.update(str(exec_time_start)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): myhash.update(httpurl.encode('utf-8')) myhash.update(str(buffersize).encode('utf-8')) myhash.update(str(exec_time_start).encode('utf-8')) newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest()) - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 pretmpfilename = download_from_url_with_httpcore( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + if(not pretmpfilename): return False with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f: tmpfilename = f.name try: - os.utime( - tmpfilename, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - tmpfilename, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass - returnval = { - 'Type': "File", - 'Filename': tmpfilename, - 'Filesize': pretmpfilename.get('Contentsize'), - 'FilesizeAlt': { - 'IEC': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "IEC"), - 'SI': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "SI")}, - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': pretmpfilename.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename.get( + 'Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} f.write(pretmpfilename.get('Content')) f.close() exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to download file.") - returnval.update( - { - 'Filesize': os.path.getsize(tmpfilename), - 'FilesizeAlt': { - 'IEC': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "IEC"), - 'SI': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "SI")}, - 'DownloadTime': float( - exec_time_start - - exec_time_end), - 'DownloadTimeReadable': hms_string( - exec_time_start - - exec_time_end)}) + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to download file.") + returnval.update({'Filesize': os.path.getsize(tmpfilename), 'FilesizeAlt': {'IEC': get_readable_size(os.path.getsize(tmpfilename), 2, "IEC"), 'SI': get_readable_size( + os.path.getsize(tmpfilename), 2, "SI")}, 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)}) return returnval -if (not havehttpcore): - def download_from_url_file_with_httpcore( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(not havehttpcore): + def download_from_url_file_with_httpcore(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) return returnval -if (havehttpcore): - def download_from_url_to_file_with_httpcore( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(havehttpcore): + def download_from_url_to_file_with_httpcore(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (not outfile == "-"): + if(not outfile == "-"): outpath = outpath.rstrip(os.path.sep) - filepath = os.path.realpath(outpath + os.path.sep + outfile) - if (not os.path.exists(outpath)): + filepath = os.path.realpath(outpath+os.path.sep+outfile) + if(not os.path.exists(outpath)): os.makedirs(outpath) - if (os.path.exists(outpath) and os.path.isfile(outpath)): + if(os.path.exists(outpath) and os.path.isfile(outpath)): return False - if (os.path.exists(filepath) and os.path.isdir(filepath)): + if(os.path.exists(filepath) and os.path.isdir(filepath)): return False pretmpfilename = download_from_url_file_with_httpcore( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) + if(not pretmpfilename): return False tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 - log.info("Moving file " + tmpfilename + " to " + filepath) + log.info("Moving file "+tmpfilename+" to "+filepath) exec_time_start = time.time() shutil.move(tmpfilename, filepath) try: - os.utime( - filepath, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - filepath, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to move file.") - if (os.path.exists(tmpfilename)): + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to move file.") + if(os.path.exists(tmpfilename)): os.remove(tmpfilename) - returnval = { - 'Type': "File", - 'Filename': filepath, - 'Filesize': downloadsize, - 'FilesizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} - if (outfile == "-"): + returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} + if(outfile == "-"): pretmpfilename = download_from_url_file_with_httpcore( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 @@ -6705,31 +3425,12 @@ def download_from_url_to_file_with_httpcore( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Copying " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Copied " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize f.write(databytes) f.seek(0) @@ -6738,120 +3439,56 @@ def download_from_url_to_file_with_httpcore( ft.close() os.remove(tmpfilename) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to copy file.") - returnval = { - 'Type': "Content", - 'Content': fdata, - 'Contentsize': downloadsize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to copy file.") + returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} return returnval -if (not havehttpcore): - def download_from_url_to_file_with_httpcore( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(not havehttpcore): + def download_from_url_to_file_with_httpcore(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): returnval = download_from_url_to_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - outfile, - outpath, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep, timeout) return returnval -if (havehttpcore): - def download_from_url_with_httpcore2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(havehttpcore): + def download_from_url_with_httpcore2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 urlparts = urlparse.urlparse(httpurl) - if (isinstance(httpheaders, list)): + if(isinstance(httpheaders, list)): httpheaders = make_http_headers_from_list_to_dict(httpheaders) httpheaders = fix_header_names(httpheaders) - if (httpuseragent is not None): - if ('User-Agent' in httpheaders): + if(httpuseragent is not None): + if('User-Agent' in httpheaders): httpheaders['User-Agent'] = httpuseragent else: httpuseragent.update({'User-Agent': httpuseragent}) - if (httpreferer is not None): - if ('Referer' in httpheaders): + if(httpreferer is not None): + if('Referer' in httpheaders): httpheaders['Referer'] = httpreferer else: httpuseragent.update({'Referer': httpreferer}) - if (urlparts.username is not None or urlparts.password is not None): - if (sys.version[0] == "2"): + if(urlparts.username is not None or urlparts.password is not None): + if(sys.version[0] == "2"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password)) - if (sys.version[0] >= "3"): + str(urlparts.username+":"+urlparts.password)) + if(sys.version[0] >= "3"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password).encode()).decode("UTF-8") - httpheaders.update({'Authorization': "Basic " + inurlencode}) + str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8") + httpheaders.update({'Authorization': "Basic "+inurlencode}) time.sleep(sleep) - if (postdata is not None and not isinstance(postdata, dict)): + if(postdata is not None and not isinstance(postdata, dict)): postdata = urlencode(postdata) try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): httpx_pool = httpcore.ConnectionPool(http1=True, http2=True) geturls_text = httpx_pool.request( "GET", httpurl, headers=httpheaders) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): httpx_pool = httpcore.ConnectionPool(http1=True, http2=True) geturls_text = httpx_pool.request( "GET", httpurl, data=postdata, headers=httpheaders) @@ -6860,13 +3497,13 @@ def download_from_url_with_httpcore2( geturls_text = httpx_pool.request( "GET", httpurl, headers=httpheaders) except httpcore.ConnectTimeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except httpcore.ConnectError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.status httpcodereason = http_status_to_reason(geturls_text.status) @@ -6875,35 +3512,35 @@ def download_from_url_with_httpcore2( httpurlout = str(httpurl) httpheaderout = geturls_text.headers httpheadersentout = httpheaders - if (isinstance(httpheaderout, list)): + if(isinstance(httpheaderout, list)): httpheaderout = dict( make_http_headers_from_list_to_dict(httpheaderout)) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): try: prehttpheaderout = httpheaderout httpheaderkeys = httpheaderout.keys() imax = len(httpheaderkeys) ic = 0 httpheaderout = {} - while (ic < imax): + while(ic < imax): httpheaderout.update( {httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]}) ic += 1 except AttributeError: pass httpheaderout = fix_header_names(httpheaderout) - if (isinstance(httpheadersentout, list)): + if(isinstance(httpheadersentout, list)): httpheadersentout = dict( make_http_headers_from_list_to_dict(httpheadersentout)) httpheadersentout = fix_header_names(httpheadersentout) downloadsize = httpheaderout.get('Content-Length') - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.read() @@ -6912,391 +3549,159 @@ def download_from_url_with_httpcore2( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) break strbuf.seek(0) returnval_content = strbuf.read() geturls_text.close() - if (httpheaderout.get("Content-Encoding") == "gzip"): + if(httpheaderout.get("Content-Encoding") == "gzip"): try: returnval_content = zlib.decompress( - returnval_content, 16 + zlib.MAX_WBITS) + returnval_content, 16+zlib.MAX_WBITS) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "deflate"): + elif(httpheaderout.get("Content-Encoding") == "deflate"): try: returnval_content = zlib.decompress(returnval_content) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "br" and havebrotli): + elif(httpheaderout.get("Content-Encoding") == "br" and havebrotli): try: returnval_content = brotli.decompress(returnval_content) except brotli.error: pass - elif (httpheaderout.get("Content-Encoding") == "zstd" and havezstd): + elif(httpheaderout.get("Content-Encoding") == "zstd" and havezstd): try: returnval_content = zstandard.decompress(returnval_content) except zstandard.error: pass - elif ((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): + elif((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): try: returnval_content = lzma.decompress(returnval_content) except zstandard.error: pass - elif (httpheaderout.get("Content-Encoding") == "bzip2"): + elif(httpheaderout.get("Content-Encoding") == "bzip2"): try: returnval_content = bz2.decompress(returnval_content) except zstandard.error: pass - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': httpheaderout, - 'Version': httpversionout, - 'Method': httpmethodout, - 'HeadersSent': httpheadersentout, - 'URL': httpurlout, - 'Code': httpcodeout, - 'Reason': httpcodereason, - 'HTTPLib': "httpcore2"} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize, 2, "IEC"), 'SI': get_readable_size( + fulldatasize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason, 'HTTPLib': "httpcore2"} geturls_text.close() return returnval -if (not havehttpcore): - def download_from_url_with_httpcore2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(not havehttpcore): + def download_from_url_with_httpcore2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) return returnval -if (havehttpcore): - def download_from_url_file_with_httpcore2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(havehttpcore): + def download_from_url_file_with_httpcore2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, tmpfileprefix, tmpfilesuffix exec_time_start = time.time() myhash = hashlib.new("sha1") - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): myhash.update(httpurl) myhash.update(str(buffersize)) myhash.update(str(exec_time_start)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): myhash.update(httpurl.encode('utf-8')) myhash.update(str(buffersize).encode('utf-8')) myhash.update(str(exec_time_start).encode('utf-8')) newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest()) - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 pretmpfilename = download_from_url_with_httpcore2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + if(not pretmpfilename): return False with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f: tmpfilename = f.name try: - os.utime( - tmpfilename, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - tmpfilename, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass - returnval = { - 'Type': "File", - 'Filename': tmpfilename, - 'Filesize': pretmpfilename.get('Contentsize'), - 'FilesizeAlt': { - 'IEC': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "IEC"), - 'SI': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "SI")}, - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': pretmpfilename.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename.get( + 'Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} f.write(pretmpfilename.get('Content')) f.close() exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to download file.") - returnval.update( - { - 'Filesize': os.path.getsize(tmpfilename), - 'FilesizeAlt': { - 'IEC': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "IEC"), - 'SI': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "SI")}, - 'DownloadTime': float( - exec_time_start - - exec_time_end), - 'DownloadTimeReadable': hms_string( - exec_time_start - - exec_time_end)}) + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to download file.") + returnval.update({'Filesize': os.path.getsize(tmpfilename), 'FilesizeAlt': {'IEC': get_readable_size(os.path.getsize(tmpfilename), 2, "IEC"), 'SI': get_readable_size( + os.path.getsize(tmpfilename), 2, "SI")}, 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)}) return returnval -if (not havehttpcore): - def download_from_url_file_with_httpcore2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(not havehttpcore): + def download_from_url_file_with_httpcore2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) return returnval -if (havehttpcore): - def download_from_url_to_file_with_httpcore2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(havehttpcore): + def download_from_url_to_file_with_httpcore2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (not outfile == "-"): + if(not outfile == "-"): outpath = outpath.rstrip(os.path.sep) - filepath = os.path.realpath(outpath + os.path.sep + outfile) - if (not os.path.exists(outpath)): + filepath = os.path.realpath(outpath+os.path.sep+outfile) + if(not os.path.exists(outpath)): os.makedirs(outpath) - if (os.path.exists(outpath) and os.path.isfile(outpath)): + if(os.path.exists(outpath) and os.path.isfile(outpath)): return False - if (os.path.exists(filepath) and os.path.isdir(filepath)): + if(os.path.exists(filepath) and os.path.isdir(filepath)): return False pretmpfilename = download_from_url_file_with_httpcore2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) + if(not pretmpfilename): return False tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 - log.info("Moving file " + tmpfilename + " to " + filepath) + log.info("Moving file "+tmpfilename+" to "+filepath) exec_time_start = time.time() shutil.move(tmpfilename, filepath) try: - os.utime( - filepath, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - filepath, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to move file.") - if (os.path.exists(tmpfilename)): + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to move file.") + if(os.path.exists(tmpfilename)): os.remove(tmpfilename) - returnval = { - 'Type': "File", - 'Filename': filepath, - 'Filesize': downloadsize, - 'FilesizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} - if (outfile == "-"): + returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} + if(outfile == "-"): pretmpfilename = download_from_url_file_with_httpcore2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 @@ -7311,31 +3716,12 @@ def download_from_url_to_file_with_httpcore2( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Copying " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Copied " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize f.write(databytes) f.seek(0) @@ -7344,320 +3730,116 @@ def download_from_url_to_file_with_httpcore2( ft.close() os.remove(tmpfilename) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to copy file.") - returnval = { - 'Type': "Content", - 'Content': fdata, - 'Contentsize': downloadsize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to copy file.") + returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} return returnval -if (not havehttpx): - def download_from_url_to_file_with_httpcore2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(not havehttpx): + def download_from_url_to_file_with_httpcore2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): returnval = download_from_url_to_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - outfile, - outpath, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep, timeout) return returnval -if (haveurllib3): - def download_from_url_with_request3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(haveurllib3): + def download_from_url_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_with_urllib3( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) return returnval -if (not haveurllib3): - def download_from_url_with_request3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(not haveurllib3): + def download_from_url_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) return returnval -if (haveurllib3): - def download_from_url_file_with_request3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(haveurllib3): + def download_from_url_file_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_file_with_urllib3( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) return returnval -if (not haveurllib3): - def download_from_url_file_with_request3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(not haveurllib3): + def download_from_url_file_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) return returnval -if (haveurllib3): - def download_from_url_to_file_with_request3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(haveurllib3): + def download_from_url_to_file_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): returnval = download_from_url_to_file_with_urllib3( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - outfile, - outpath, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep, timeout) return returnval -if (not haveurllib3): - def download_from_url_to_file_with_request3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(not haveurllib3): + def download_from_url_to_file_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): returnval = download_from_url_to_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - outfile, - outpath, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep, timeout) return returnval -if (haveurllib3): - def download_from_url_with_urllib3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(haveurllib3): + def download_from_url_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 urlparts = urlparse.urlparse(httpurl) - if (isinstance(httpheaders, list)): + if(isinstance(httpheaders, list)): httpheaders = make_http_headers_from_list_to_dict(httpheaders) httpheaders = fix_header_names(httpheaders) - if (httpuseragent is not None): - if ('User-Agent' in httpheaders): + if(httpuseragent is not None): + if('User-Agent' in httpheaders): httpheaders['User-Agent'] = httpuseragent else: httpuseragent.update({'User-Agent': httpuseragent}) - if (httpreferer is not None): - if ('Referer' in httpheaders): + if(httpreferer is not None): + if('Referer' in httpheaders): httpheaders['Referer'] = httpreferer else: httpuseragent.update({'Referer': httpreferer}) - if (urlparts.username is not None or urlparts.password is not None): - if (sys.version[0] == "2"): + if(urlparts.username is not None or urlparts.password is not None): + if(sys.version[0] == "2"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password)) - if (sys.version[0] >= "3"): + str(urlparts.username+":"+urlparts.password)) + if(sys.version[0] >= "3"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password).encode()).decode("UTF-8") - httpheaders.update({'Authorization': "Basic " + inurlencode}) + str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8") + httpheaders.update({'Authorization': "Basic "+inurlencode}) time.sleep(sleep) timeout = urllib3.util.Timeout(connect=timeout, read=timeout) urllib_pool = urllib3.PoolManager(headers=httpheaders, timeout=timeout) - if (postdata is not None and not isinstance(postdata, dict)): + if(postdata is not None and not isinstance(postdata, dict)): postdata = urlencode(postdata) try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): geturls_text = urllib_pool.request( "GET", httpurl, headers=httpheaders, preload_content=False) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): geturls_text = urllib_pool.request( "POST", httpurl, body=postdata, headers=httpheaders, preload_content=False) else: geturls_text = urllib_pool.request( "GET", httpurl, headers=httpheaders, preload_content=False) except urllib3.exceptions.ConnectTimeoutError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except urllib3.exceptions.ConnectError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except urllib3.exceptions.MaxRetryError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except ValueError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.status httpcodereason = geturls_text.reason - if (geturls_text.version == "10"): + if(geturls_text.version == "10"): httpversionout = "1.0" else: httpversionout = "1.1" @@ -7665,35 +3847,35 @@ def download_from_url_with_urllib3( httpurlout = geturls_text.geturl() httpheaderout = geturls_text.info() httpheadersentout = httpheaders - if (isinstance(httpheaderout, list)): + if(isinstance(httpheaderout, list)): httpheaderout = dict( make_http_headers_from_list_to_dict(httpheaderout)) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): try: prehttpheaderout = httpheaderout httpheaderkeys = httpheaderout.keys() imax = len(httpheaderkeys) ic = 0 httpheaderout = {} - while (ic < imax): + while(ic < imax): httpheaderout.update( {httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]}) ic += 1 except AttributeError: pass httpheaderout = fix_header_names(httpheaderout) - if (isinstance(httpheadersentout, list)): + if(isinstance(httpheadersentout, list)): httpheadersentout = dict( make_http_headers_from_list_to_dict(httpheadersentout)) httpheadersentout = fix_header_names(httpheadersentout) downloadsize = httpheaderout.get('Content-Length') - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.read(buffersize) @@ -7702,389 +3884,157 @@ def download_from_url_with_urllib3( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) strbuf.seek(0) returnval_content = strbuf.read() - if (httpheaderout.get("Content-Encoding") == "gzip"): + if(httpheaderout.get("Content-Encoding") == "gzip"): try: returnval_content = zlib.decompress( - returnval_content, 16 + zlib.MAX_WBITS) + returnval_content, 16+zlib.MAX_WBITS) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "deflate"): + elif(httpheaderout.get("Content-Encoding") == "deflate"): try: returnval_content = zlib.decompress(returnval_content) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "br" and havebrotli): + elif(httpheaderout.get("Content-Encoding") == "br" and havebrotli): try: returnval_content = brotli.decompress(returnval_content) except brotli.error: pass - elif (httpheaderout.get("Content-Encoding") == "zstd" and havezstd): + elif(httpheaderout.get("Content-Encoding") == "zstd" and havezstd): try: returnval_content = zstandard.decompress(returnval_content) except zstandard.error: pass - elif ((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): + elif((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): try: returnval_content = lzma.decompress(returnval_content) except zstandard.error: pass - elif (httpheaderout.get("Content-Encoding") == "bzip2"): + elif(httpheaderout.get("Content-Encoding") == "bzip2"): try: returnval_content = bz2.decompress(returnval_content) except zstandard.error: pass - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': httpheaderout, - 'Version': httpversionout, - 'Method': httpmethodout, - 'HeadersSent': httpheadersentout, - 'URL': httpurlout, - 'Code': httpcodeout, - 'Reason': httpcodereason, - 'HTTPLib': "urllib3"} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize, 2, "IEC"), 'SI': get_readable_size( + fulldatasize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason, 'HTTPLib': "urllib3"} geturls_text.close() return returnval -if (not haveurllib3): - def download_from_url_with_urllib3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(not haveurllib3): + def download_from_url_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) return returnval -if (haveurllib3): - def download_from_url_file_with_urllib3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(haveurllib3): + def download_from_url_file_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, tmpfileprefix, tmpfilesuffix exec_time_start = time.time() myhash = hashlib.new("sha1") - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): myhash.update(httpurl) myhash.update(str(buffersize)) myhash.update(str(exec_time_start)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): myhash.update(httpurl.encode('utf-8')) myhash.update(str(buffersize).encode('utf-8')) myhash.update(str(exec_time_start).encode('utf-8')) newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest()) - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 pretmpfilename = download_from_url_with_urllib3( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + if(not pretmpfilename): return False with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f: tmpfilename = f.name try: - os.utime( - tmpfilename, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - tmpfilename, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass - returnval = { - 'Type': "File", - 'Filename': tmpfilename, - 'Filesize': pretmpfilename.get('Contentsize'), - 'FilesizeAlt': { - 'IEC': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "IEC"), - 'SI': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "SI")}, - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': pretmpfilename.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename.get( + 'Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} f.write(pretmpfilename.get('Content')) f.close() exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to download file.") - returnval.update( - { - 'Filesize': os.path.getsize(tmpfilename), - 'FilesizeAlt': { - 'IEC': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "IEC"), - 'SI': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "SI")}, - 'DownloadTime': float( - exec_time_start - - exec_time_end), - 'DownloadTimeReadable': hms_string( - exec_time_start - - exec_time_end)}) + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to download file.") + returnval.update({'Filesize': os.path.getsize(tmpfilename), 'FilesizeAlt': {'IEC': get_readable_size(os.path.getsize(tmpfilename), 2, "IEC"), 'SI': get_readable_size( + os.path.getsize(tmpfilename), 2, "SI")}, 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)}) return returnval -if (not haveurllib3): - def download_from_url_file_with_urllib3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(not haveurllib3): + def download_from_url_file_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) return returnval -if (haveurllib3): - def download_from_url_to_file_with_urllib3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(haveurllib3): + def download_from_url_to_file_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (not outfile == "-"): + if(not outfile == "-"): outpath = outpath.rstrip(os.path.sep) - filepath = os.path.realpath(outpath + os.path.sep + outfile) - if (not os.path.exists(outpath)): + filepath = os.path.realpath(outpath+os.path.sep+outfile) + if(not os.path.exists(outpath)): os.makedirs(outpath) - if (os.path.exists(outpath) and os.path.isfile(outpath)): + if(os.path.exists(outpath) and os.path.isfile(outpath)): return False - if (os.path.exists(filepath) and os.path.isdir(filepath)): + if(os.path.exists(filepath) and os.path.isdir(filepath)): return False pretmpfilename = download_from_url_file_with_urllib3( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) + if(not pretmpfilename): return False tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 - log.info("Moving file " + tmpfilename + " to " + filepath) + log.info("Moving file "+tmpfilename+" to "+filepath) exec_time_start = time.time() shutil.move(tmpfilename, filepath) try: - os.utime( - filepath, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - filepath, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to move file.") - if (os.path.exists(tmpfilename)): + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to move file.") + if(os.path.exists(tmpfilename)): os.remove(tmpfilename) - returnval = { - 'Type': "File", - 'Filename': filepath, - 'Filesize': downloadsize, - 'FilesizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} - if (outfile == "-"): + returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} + if(outfile == "-"): pretmpfilename = download_from_url_file_with_urllib3( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 @@ -8099,31 +4049,12 @@ def download_from_url_to_file_with_urllib3( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Copying " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Copied " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize f.write(databytes) f.seek(0) @@ -8132,135 +4063,71 @@ def download_from_url_to_file_with_urllib3( ft.close() os.remove(tmpfilename) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to copy file.") - returnval = { - 'Type': "Content", - 'Content': fdata, - 'Contentsize': downloadsize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to copy file.") + returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} return returnval -if (not haveurllib3): - def download_from_url_to_file_with_urllib3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(not haveurllib3): + def download_from_url_to_file_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): returnval = download_from_url_to_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - outfile, - outpath, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep, timeout) return returnval -if (havemechanize): - def download_from_url_with_mechanize( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(havemechanize): + def download_from_url_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 urlparts = urlparse.urlparse(httpurl) - if (isinstance(httpheaders, list)): + if(isinstance(httpheaders, list)): httpheaders = make_http_headers_from_list_to_dict(httpheaders) httpheaders = fix_header_names(httpheaders) - if (httpuseragent is not None): - if ('User-Agent' in httpheaders): + if(httpuseragent is not None): + if('User-Agent' in httpheaders): httpheaders['User-Agent'] = httpuseragent else: httpuseragent.update({'User-Agent': httpuseragent}) - if (httpreferer is not None): - if ('Referer' in httpheaders): + if(httpreferer is not None): + if('Referer' in httpheaders): httpheaders['Referer'] = httpreferer else: httpuseragent.update({'Referer': httpreferer}) - if (urlparts.username is not None or urlparts.password is not None): - if (sys.version[0] == "2"): + if(urlparts.username is not None or urlparts.password is not None): + if(sys.version[0] == "2"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password)) - if (sys.version[0] >= "3"): + str(urlparts.username+":"+urlparts.password)) + if(sys.version[0] >= "3"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password).encode()).decode("UTF-8") - httpheaders.update({'Authorization': "Basic " + inurlencode}) + str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8") + httpheaders.update({'Authorization': "Basic "+inurlencode}) geturls_opener = mechanize.Browser() - if (isinstance(httpheaders, dict)): + if(isinstance(httpheaders, dict)): httpheaders = make_http_headers_from_dict_to_list(httpheaders) time.sleep(sleep) geturls_opener.addheaders = httpheaders geturls_opener.set_cookiejar(httpcookie) geturls_opener.set_handle_robots(False) - if (postdata is not None and not isinstance(postdata, dict)): + if(postdata is not None and not isinstance(postdata, dict)): postdata = urlencode(postdata) try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): geturls_text = geturls_opener.open(httpurl) - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): geturls_text = geturls_opener.open(httpurl, data=postdata) else: geturls_text = geturls_opener.open(httpurl) except mechanize.HTTPError as geturls_text_error: geturls_text = geturls_text_error - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) except URLError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.code httpcodereason = geturls_text.msg @@ -8270,35 +4137,35 @@ def download_from_url_with_mechanize( httpheaderout = geturls_text.info() reqhead = geturls_opener.request httpheadersentout = reqhead.header_items() - if (isinstance(httpheaderout, list)): + if(isinstance(httpheaderout, list)): httpheaderout = dict( make_http_headers_from_list_to_dict(httpheaderout)) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): try: prehttpheaderout = httpheaderout httpheaderkeys = httpheaderout.keys() imax = len(httpheaderkeys) ic = 0 httpheaderout = {} - while (ic < imax): + while(ic < imax): httpheaderout.update( {httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]}) ic += 1 except AttributeError: pass httpheaderout = fix_header_names(httpheaderout) - if (isinstance(httpheadersentout, list)): + if(isinstance(httpheadersentout, list)): httpheadersentout = dict( make_http_headers_from_list_to_dict(httpheadersentout)) httpheadersentout = fix_header_names(httpheadersentout) downloadsize = httpheaderout.get('Content-Length') - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.read(buffersize) @@ -8307,389 +4174,157 @@ def download_from_url_with_mechanize( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) strbuf.seek(0) returnval_content = strbuf.read() - if (httpheaderout.get("Content-Encoding") == "gzip"): + if(httpheaderout.get("Content-Encoding") == "gzip"): try: returnval_content = zlib.decompress( - returnval_content, 16 + zlib.MAX_WBITS) + returnval_content, 16+zlib.MAX_WBITS) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "deflate"): + elif(httpheaderout.get("Content-Encoding") == "deflate"): try: returnval_content = zlib.decompress(returnval_content) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "br" and havebrotli): + elif(httpheaderout.get("Content-Encoding") == "br" and havebrotli): try: returnval_content = brotli.decompress(returnval_content) except brotli.error: pass - elif (httpheaderout.get("Content-Encoding") == "zstd" and havezstd): + elif(httpheaderout.get("Content-Encoding") == "zstd" and havezstd): try: returnval_content = zstandard.decompress(returnval_content) except zstandard.error: pass - elif ((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): + elif((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): try: returnval_content = lzma.decompress(returnval_content) except zstandard.error: pass - elif (httpheaderout.get("Content-Encoding") == "bzip2"): + elif(httpheaderout.get("Content-Encoding") == "bzip2"): try: returnval_content = bz2.decompress(returnval_content) except zstandard.error: pass - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': httpheaderout, - 'Version': httpversionout, - 'Method': httpmethodout, - 'HeadersSent': httpheadersentout, - 'URL': httpurlout, - 'Code': httpcodeout, - 'Reason': httpcodereason, - 'HTTPLib': "mechanize"} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize, 2, "IEC"), 'SI': get_readable_size( + fulldatasize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason, 'HTTPLib': "mechanize"} geturls_text.close() return returnval -if (not havemechanize): - def download_from_url_with_mechanize( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(not havemechanize): + def download_from_url_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) return returnval -if (havemechanize): - def download_from_url_file_with_mechanize( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(havemechanize): + def download_from_url_file_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, tmpfileprefix, tmpfilesuffix exec_time_start = time.time() myhash = hashlib.new("sha1") - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): myhash.update(httpurl) myhash.update(str(buffersize)) myhash.update(str(exec_time_start)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): myhash.update(httpurl.encode('utf-8')) myhash.update(str(buffersize).encode('utf-8')) myhash.update(str(exec_time_start).encode('utf-8')) newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest()) - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 pretmpfilename = download_from_url_with_mechanize( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + if(not pretmpfilename): return False with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f: tmpfilename = f.name try: - os.utime( - tmpfilename, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - tmpfilename, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass - returnval = { - 'Type': "File", - 'Filename': tmpfilename, - 'Filesize': pretmpfilename.get('Contentsize'), - 'FilesizeAlt': { - 'IEC': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "IEC"), - 'SI': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "SI")}, - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': pretmpfilename.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename.get( + 'Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} f.write(pretmpfilename.get('Content')) f.close() exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to download file.") - returnval.update( - { - 'Filesize': os.path.getsize(tmpfilename), - 'FilesizeAlt': { - 'IEC': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "IEC"), - 'SI': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "SI")}, - 'DownloadTime': float( - exec_time_start - - exec_time_end), - 'DownloadTimeReadable': hms_string( - exec_time_start - - exec_time_end)}) + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to download file.") + returnval.update({'Filesize': os.path.getsize(tmpfilename), 'FilesizeAlt': {'IEC': get_readable_size(os.path.getsize(tmpfilename), 2, "IEC"), 'SI': get_readable_size( + os.path.getsize(tmpfilename), 2, "SI")}, 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)}) return returnval -if (not havemechanize): - def download_from_url_file_with_mechanize( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(not havemechanize): + def download_from_url_file_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) return returnval -if (havemechanize): - def download_from_url_to_file_with_mechanize( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(havemechanize): + def download_from_url_to_file_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (not outfile == "-"): + if(not outfile == "-"): outpath = outpath.rstrip(os.path.sep) - filepath = os.path.realpath(outpath + os.path.sep + outfile) - if (not os.path.exists(outpath)): + filepath = os.path.realpath(outpath+os.path.sep+outfile) + if(not os.path.exists(outpath)): os.makedirs(outpath) - if (os.path.exists(outpath) and os.path.isfile(outpath)): + if(os.path.exists(outpath) and os.path.isfile(outpath)): return False - if (os.path.exists(filepath) and os.path.isdir(filepath)): + if(os.path.exists(filepath) and os.path.isdir(filepath)): return False pretmpfilename = download_from_url_file_with_mechanize( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) + if(not pretmpfilename): return False tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 - log.info("Moving file " + tmpfilename + " to " + filepath) + log.info("Moving file "+tmpfilename+" to "+filepath) exec_time_start = time.time() shutil.move(tmpfilename, filepath) try: - os.utime( - filepath, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - filepath, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to move file.") - if (os.path.exists(tmpfilename)): + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to move file.") + if(os.path.exists(tmpfilename)): os.remove(tmpfilename) - returnval = { - 'Type': "File", - 'Filename': filepath, - 'Filesize': downloadsize, - 'FilesizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} - if (outfile == "-"): + returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} + if(outfile == "-"): pretmpfilename = download_from_url_file_with_mechanize( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 @@ -8704,31 +4339,12 @@ def download_from_url_to_file_with_mechanize( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Copying " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Copied " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize f.write(databytes) f.seek(0) @@ -8737,122 +4353,58 @@ def download_from_url_to_file_with_mechanize( ft.close() os.remove(tmpfilename) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to copy file.") - returnval = { - 'Type': "Content", - 'Content': fdata, - 'Contentsize': downloadsize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': ['HeadersSent'], - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to copy file.") + returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': ['HeadersSent'], 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} return returnval -if (not havemechanize): - def download_from_url_to_file_with_mechanize( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(not havemechanize): + def download_from_url_to_file_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): returnval = download_from_url_to_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - outfile, - outpath, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep, timeout) return returnval -if (havepycurl): - def download_from_url_with_pycurl( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(havepycurl): + def download_from_url_with_pycurl(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 urlparts = urlparse.urlparse(httpurl) - if (isinstance(httpheaders, list)): + if(isinstance(httpheaders, list)): httpheaders = make_http_headers_from_list_to_dict(httpheaders) httpheaders = fix_header_names(httpheaders) - if (httpuseragent is not None): - if ('User-Agent' in httpheaders): + if(httpuseragent is not None): + if('User-Agent' in httpheaders): httpheaders['User-Agent'] = httpuseragent else: httpuseragent.update({'User-Agent': httpuseragent}) - if (httpreferer is not None): - if ('Referer' in httpheaders): + if(httpreferer is not None): + if('Referer' in httpheaders): httpheaders['Referer'] = httpreferer else: httpuseragent.update({'Referer': httpreferer}) - if (urlparts.username is not None or urlparts.password is not None): - if (sys.version[0] == "2"): + if(urlparts.username is not None or urlparts.password is not None): + if(sys.version[0] == "2"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password)) - if (sys.version[0] >= "3"): + str(urlparts.username+":"+urlparts.password)) + if(sys.version[0] >= "3"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password).encode()).decode("UTF-8") - httpheaders.update({'Authorization': "Basic " + inurlencode}) + str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8") + httpheaders.update({'Authorization': "Basic "+inurlencode}) geturls_opener = build_opener(HTTPCookieProcessor(httpcookie)) - if (isinstance(httpheaders, dict)): + if(isinstance(httpheaders, dict)): httpheaders = make_http_headers_from_dict_to_pycurl(httpheaders) geturls_opener.addheaders = httpheaders time.sleep(sleep) - if (postdata is not None and not isinstance(postdata, dict)): + if(postdata is not None and not isinstance(postdata, dict)): postdata = urlencode(postdata) retrieved_body = BytesIO() retrieved_headers = BytesIO() try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): geturls_text = pycurl.Curl() geturls_text.setopt(geturls_text.URL, httpurl) geturls_text.setopt( @@ -8863,7 +4415,7 @@ def download_from_url_with_pycurl( geturls_text.setopt(geturls_text.FOLLOWLOCATION, True) geturls_text.setopt(geturls_text.TIMEOUT, timeout) geturls_text.perform() - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): geturls_text = pycurl.Curl() geturls_text.setopt(geturls_text.URL, httpurl) geturls_text.setopt( @@ -8888,24 +4440,23 @@ def download_from_url_with_pycurl( geturls_text.setopt(geturls_text.TIMEOUT, timeout) geturls_text.perform() retrieved_headers.seek(0) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): pycurlhead = retrieved_headers.read() - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): pycurlhead = retrieved_headers.read().decode('UTF-8') pyhttpverinfo = re.findall( - r'^HTTP/([0-9.]+) (\d+)(?: ([A-Za-z\s]+))?$', - pycurlhead.splitlines()[0].strip().rstrip('\r\n'))[0] + r'^HTTP/([0-9.]+) (\d+)(?: ([A-Za-z\s]+))?$', pycurlhead.splitlines()[0].strip().rstrip('\r\n'))[0] pycurlheadersout = make_http_headers_from_pycurl_to_dict( pycurlhead) retrieved_body.seek(0) except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except ValueError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.getinfo(geturls_text.HTTP_CODE) httpcodereason = http_status_to_reason( @@ -8915,35 +4466,35 @@ def download_from_url_with_pycurl( httpurlout = geturls_text.getinfo(geturls_text.EFFECTIVE_URL) httpheaderout = pycurlheadersout httpheadersentout = httpheaders - if (isinstance(httpheaderout, list)): + if(isinstance(httpheaderout, list)): httpheaderout = dict(make_http_headers_from_pycurl_to_dict( "\r\n".join(httpheaderout))) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): try: prehttpheaderout = httpheaderout httpheaderkeys = httpheaderout.keys() imax = len(httpheaderkeys) ic = 0 httpheaderout = {} - while (ic < imax): + while(ic < imax): httpheaderout.update( {httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]}) ic += 1 except AttributeError: pass httpheaderout = fix_header_names(httpheaderout) - if (isinstance(httpheadersentout, list)): + if(isinstance(httpheadersentout, list)): httpheadersentout = dict(make_http_headers_from_pycurl_to_dict( "\r\n".join(httpheadersentout))) httpheadersentout = fix_header_names(httpheadersentout) downloadsize = httpheaderout.get('Content-Length') - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = retrieved_body.read(buffersize) @@ -8952,389 +4503,157 @@ def download_from_url_with_pycurl( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) strbuf.seek(0) returnval_content = strbuf.read() - if (httpheaderout.get("Content-Encoding") == "gzip"): + if(httpheaderout.get("Content-Encoding") == "gzip"): try: returnval_content = zlib.decompress( - returnval_content, 16 + zlib.MAX_WBITS) + returnval_content, 16+zlib.MAX_WBITS) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "deflate"): + elif(httpheaderout.get("Content-Encoding") == "deflate"): try: returnval_content = zlib.decompress(returnval_content) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "br" and havebrotli): + elif(httpheaderout.get("Content-Encoding") == "br" and havebrotli): try: returnval_content = brotli.decompress(returnval_content) except brotli.error: pass - elif (httpheaderout.get("Content-Encoding") == "zstd" and havezstd): + elif(httpheaderout.get("Content-Encoding") == "zstd" and havezstd): try: returnval_content = zstandard.decompress(returnval_content) except zstandard.error: pass - elif ((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): + elif((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): try: returnval_content = lzma.decompress(returnval_content) except zstandard.error: pass - elif (httpheaderout.get("Content-Encoding") == "bzip2"): + elif(httpheaderout.get("Content-Encoding") == "bzip2"): try: returnval_content = bz2.decompress(returnval_content) except zstandard.error: pass - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': httpheaderout, - 'Version': httpversionout, - 'Method': httpmethodout, - 'HeadersSent': httpheadersentout, - 'URL': httpurlout, - 'Code': httpcodeout, - 'Reason': httpcodereason, - 'HTTPLib': "pycurl"} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize, 2, "IEC"), 'SI': get_readable_size( + fulldatasize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason, 'HTTPLib': "pycurl"} geturls_text.close() return returnval -if (not havepycurl): - def download_from_url_with_pycurl( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(not havepycurl): + def download_from_url_with_pycurl(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) return returnval -if (havepycurl): - def download_from_url_file_with_pycurl( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(havepycurl): + def download_from_url_file_with_pycurl(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, tmpfileprefix, tmpfilesuffix exec_time_start = time.time() myhash = hashlib.new("sha1") - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): myhash.update(httpurl) myhash.update(str(buffersize)) myhash.update(str(exec_time_start)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): myhash.update(httpurl.encode('utf-8')) myhash.update(str(buffersize).encode('utf-8')) myhash.update(str(exec_time_start).encode('utf-8')) newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest()) - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 pretmpfilename = download_from_url_with_pycurl( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + if(not pretmpfilename): return False with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f: tmpfilename = f.name try: - os.utime( - tmpfilename, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - tmpfilename, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass - returnval = { - 'Type': "File", - 'Filename': tmpfilename, - 'Filesize': pretmpfilename.get('Contentsize'), - 'FilesizeAlt': { - 'IEC': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "IEC"), - 'SI': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "SI")}, - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': pretmpfilename.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename.get( + 'Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} f.write(pretmpfilename.get('Content')) f.close() exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to download file.") - returnval.update( - { - 'Filesize': os.path.getsize(tmpfilename), - 'FilesizeAlt': { - 'IEC': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "IEC"), - 'SI': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "SI")}, - 'DownloadTime': float( - exec_time_start - - exec_time_end), - 'DownloadTimeReadable': hms_string( - exec_time_start - - exec_time_end)}) + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to download file.") + returnval.update({'Filesize': os.path.getsize(tmpfilename), 'FilesizeAlt': {'IEC': get_readable_size(os.path.getsize(tmpfilename), 2, "IEC"), 'SI': get_readable_size( + os.path.getsize(tmpfilename), 2, "SI")}, 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)}) return returnval -if (not havepycurl): - def download_from_url_file_with_pycurl( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(not havepycurl): + def download_from_url_file_with_pycurl(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) return returnval -if (havepycurl): - def download_from_url_to_file_with_pycurl( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(havepycurl): + def download_from_url_to_file_with_pycurl(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (not outfile == "-"): + if(not outfile == "-"): outpath = outpath.rstrip(os.path.sep) - filepath = os.path.realpath(outpath + os.path.sep + outfile) - if (not os.path.exists(outpath)): + filepath = os.path.realpath(outpath+os.path.sep+outfile) + if(not os.path.exists(outpath)): os.makedirs(outpath) - if (os.path.exists(outpath) and os.path.isfile(outpath)): + if(os.path.exists(outpath) and os.path.isfile(outpath)): return False - if (os.path.exists(filepath) and os.path.isdir(filepath)): + if(os.path.exists(filepath) and os.path.isdir(filepath)): return False pretmpfilename = download_from_url_file_with_pycurl( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) + if(not pretmpfilename): return False tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 - log.info("Moving file " + tmpfilename + " to " + filepath) + log.info("Moving file "+tmpfilename+" to "+filepath) exec_time_start = time.time() shutil.move(tmpfilename, filepath) try: - os.utime( - filepath, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - filepath, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to move file.") - if (os.path.exists(tmpfilename)): + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to move file.") + if(os.path.exists(tmpfilename)): os.remove(tmpfilename) - returnval = { - 'Type': "File", - 'Filename': filepath, - 'Filesize': downloadsize, - 'FilesizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} - if (outfile == "-"): + returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} + if(outfile == "-"): pretmpfilename = download_from_url_file_with_pycurl( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 @@ -9349,31 +4668,12 @@ def download_from_url_to_file_with_pycurl( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Copying " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Copied " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize f.write(databytes) f.seek(0) @@ -9382,122 +4682,58 @@ def download_from_url_to_file_with_pycurl( ft.close() os.remove(tmpfilename) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to copy file.") - returnval = { - 'Type': "Content", - 'Content': fdata, - 'Contentsize': downloadsize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to copy file.") + returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} return returnval -if (not havepycurl): - def download_from_url_to_file_with_pycurl( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(not havepycurl): + def download_from_url_to_file_with_pycurl(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): returnval = download_from_url_to_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - outfile, - outpath, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep, timeout) return returnval -if (havepycurl and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): - def download_from_url_with_pycurl2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(havepycurl and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): + def download_from_url_with_pycurl2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 urlparts = urlparse.urlparse(httpurl) - if (isinstance(httpheaders, list)): + if(isinstance(httpheaders, list)): httpheaders = make_http_headers_from_list_to_dict(httpheaders) httpheaders = fix_header_names(httpheaders) - if (httpuseragent is not None): - if ('User-Agent' in httpheaders): + if(httpuseragent is not None): + if('User-Agent' in httpheaders): httpheaders['User-Agent'] = httpuseragent else: httpuseragent.update({'User-Agent': httpuseragent}) - if (httpreferer is not None): - if ('Referer' in httpheaders): + if(httpreferer is not None): + if('Referer' in httpheaders): httpheaders['Referer'] = httpreferer else: httpuseragent.update({'Referer': httpreferer}) - if (urlparts.username is not None or urlparts.password is not None): - if (sys.version[0] == "2"): + if(urlparts.username is not None or urlparts.password is not None): + if(sys.version[0] == "2"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password)) - if (sys.version[0] >= "3"): + str(urlparts.username+":"+urlparts.password)) + if(sys.version[0] >= "3"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password).encode()).decode("UTF-8") - httpheaders.update({'Authorization': "Basic " + inurlencode}) + str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8") + httpheaders.update({'Authorization': "Basic "+inurlencode}) geturls_opener = build_opener(HTTPCookieProcessor(httpcookie)) - if (isinstance(httpheaders, dict)): + if(isinstance(httpheaders, dict)): httpheaders = make_http_headers_from_dict_to_pycurl(httpheaders) geturls_opener.addheaders = httpheaders time.sleep(sleep) - if (postdata is not None and not isinstance(postdata, dict)): + if(postdata is not None and not isinstance(postdata, dict)): postdata = urlencode(postdata) retrieved_body = BytesIO() retrieved_headers = BytesIO() try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): geturls_text = pycurl.Curl() geturls_text.setopt(geturls_text.URL, httpurl) geturls_text.setopt(geturls_text.HTTP_VERSION, @@ -9510,7 +4746,7 @@ def download_from_url_with_pycurl2( geturls_text.setopt(geturls_text.FOLLOWLOCATION, True) geturls_text.setopt(geturls_text.TIMEOUT, timeout) geturls_text.perform() - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): geturls_text = pycurl.Curl() geturls_text.setopt(geturls_text.URL, httpurl) geturls_text.setopt(geturls_text.HTTP_VERSION, @@ -9539,24 +4775,23 @@ def download_from_url_with_pycurl2( geturls_text.setopt(geturls_text.TIMEOUT, timeout) geturls_text.perform() retrieved_headers.seek(0) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): pycurlhead = retrieved_headers.read() - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): pycurlhead = retrieved_headers.read().decode('UTF-8') pyhttpverinfo = re.findall( - r'^HTTP/([0-9.]+) (\d+)(?: ([A-Za-z\s]+))?$', - pycurlhead.splitlines()[0].strip())[0] + r'^HTTP/([0-9.]+) (\d+)(?: ([A-Za-z\s]+))?$', pycurlhead.splitlines()[0].strip())[0] pycurlheadersout = make_http_headers_from_pycurl_to_dict( pycurlhead) retrieved_body.seek(0) except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except ValueError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.getinfo(geturls_text.HTTP_CODE) httpcodereason = http_status_to_reason( @@ -9566,35 +4801,35 @@ def download_from_url_with_pycurl2( httpurlout = geturls_text.getinfo(geturls_text.EFFECTIVE_URL) httpheaderout = pycurlheadersout httpheadersentout = httpheaders - if (isinstance(httpheaderout, list)): + if(isinstance(httpheaderout, list)): httpheaderout = dict(make_http_headers_from_pycurl_to_dict( "\r\n".join(httpheaderout))) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): try: prehttpheaderout = httpheaderout httpheaderkeys = httpheaderout.keys() imax = len(httpheaderkeys) ic = 0 httpheaderout = {} - while (ic < imax): + while(ic < imax): httpheaderout.update( {httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]}) ic += 1 except AttributeError: pass httpheaderout = fix_header_names(httpheaderout) - if (isinstance(httpheadersentout, list)): + if(isinstance(httpheadersentout, list)): httpheadersentout = dict(make_http_headers_from_pycurl_to_dict( "\r\n".join(httpheadersentout))) httpheadersentout = fix_header_names(httpheadersentout) downloadsize = httpheaderout.get('Content-Length') - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = retrieved_body.read(buffersize) @@ -9603,443 +4838,169 @@ def download_from_url_with_pycurl2( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) strbuf.seek(0) returnval_content = strbuf.read() - if (httpheaderout.get("Content-Encoding") == "gzip"): + if(httpheaderout.get("Content-Encoding") == "gzip"): try: returnval_content = zlib.decompress( - returnval_content, 16 + zlib.MAX_WBITS) + returnval_content, 16+zlib.MAX_WBITS) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "deflate"): + elif(httpheaderout.get("Content-Encoding") == "deflate"): try: returnval_content = zlib.decompress(returnval_content) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "br" and havebrotli): + elif(httpheaderout.get("Content-Encoding") == "br" and havebrotli): try: returnval_content = brotli.decompress(returnval_content) except brotli.error: pass - elif (httpheaderout.get("Content-Encoding") == "zstd" and havezstd): + elif(httpheaderout.get("Content-Encoding") == "zstd" and havezstd): try: returnval_content = zstandard.decompress(returnval_content) except zstandard.error: pass - elif ((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): + elif((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): try: returnval_content = lzma.decompress(returnval_content) except zstandard.error: pass - elif (httpheaderout.get("Content-Encoding") == "bzip2"): + elif(httpheaderout.get("Content-Encoding") == "bzip2"): try: returnval_content = bz2.decompress(returnval_content) except zstandard.error: pass - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': httpheaderout, - 'Version': httpversionout, - 'Method': httpmethodout, - 'HeadersSent': httpheadersentout, - 'URL': httpurlout, - 'Code': httpcodeout, - 'Reason': httpcodereason, - 'HTTPLib': "pycurl2"} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize, 2, "IEC"), 'SI': get_readable_size( + fulldatasize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason, 'HTTPLib': "pycurl2"} geturls_text.close() return returnval -if (not havepycurl): - def download_from_url_with_pycurl2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(not havepycurl): + def download_from_url_with_pycurl2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) return returnval -if (havepycurl and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): - def download_from_url_with_pycurl2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(havepycurl and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): + def download_from_url_with_pycurl2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) return returnval -if (havepycurl and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): - def download_from_url_file_with_pycurl2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(havepycurl and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): + def download_from_url_file_with_pycurl2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, tmpfileprefix, tmpfilesuffix exec_time_start = time.time() myhash = hashlib.new("sha1") - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): myhash.update(httpurl) myhash.update(str(buffersize)) myhash.update(str(exec_time_start)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): myhash.update(httpurl.encode('utf-8')) myhash.update(str(buffersize).encode('utf-8')) myhash.update(str(exec_time_start).encode('utf-8')) newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest()) - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 pretmpfilename = download_from_url_with_pycurl2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + if(not pretmpfilename): return False with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f: tmpfilename = f.name try: - os.utime( - tmpfilename, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - tmpfilename, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass - returnval = { - 'Type': "File", - 'Filename': tmpfilename, - 'Filesize': pretmpfilename.get('Contentsize'), - 'FilesizeAlt': { - 'IEC': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "IEC"), - 'SI': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "SI")}, - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': pretmpfilename.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename.get( + 'Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} f.write(pretmpfilename.get('Content')) f.close() exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to download file.") - returnval.update( - { - 'Filesize': os.path.getsize(tmpfilename), - 'FilesizeAlt': { - 'IEC': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "IEC"), - 'SI': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "SI")}, - 'DownloadTime': float( - exec_time_start - - exec_time_end), - 'DownloadTimeReadable': hms_string( - exec_time_start - - exec_time_end)}) + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to download file.") + returnval.update({'Filesize': os.path.getsize(tmpfilename), 'FilesizeAlt': {'IEC': get_readable_size(os.path.getsize(tmpfilename), 2, "IEC"), 'SI': get_readable_size( + os.path.getsize(tmpfilename), 2, "SI")}, 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)}) return returnval -if (not havepycurl): - def download_from_url_file_with_pycurl2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(not havepycurl): + def download_from_url_file_with_pycurl2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) return returnval -if (havepycurl and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): - def download_from_url_file_with_pycurl2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(havepycurl and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): + def download_from_url_file_with_pycurl2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_file_with_pycurl( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) return returnval -if (havepycurl and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): - def download_from_url_to_file_with_pycurl2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(havepycurl and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): + def download_from_url_to_file_with_pycurl2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (not outfile == "-"): + if(not outfile == "-"): outpath = outpath.rstrip(os.path.sep) - filepath = os.path.realpath(outpath + os.path.sep + outfile) - if (not os.path.exists(outpath)): + filepath = os.path.realpath(outpath+os.path.sep+outfile) + if(not os.path.exists(outpath)): os.makedirs(outpath) - if (os.path.exists(outpath) and os.path.isfile(outpath)): + if(os.path.exists(outpath) and os.path.isfile(outpath)): return False - if (os.path.exists(filepath) and os.path.isdir(filepath)): + if(os.path.exists(filepath) and os.path.isdir(filepath)): return False pretmpfilename = download_from_url_file_with_pycurl2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) + if(not pretmpfilename): return False tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 - log.info("Moving file " + tmpfilename + " to " + filepath) + log.info("Moving file "+tmpfilename+" to "+filepath) exec_time_start = time.time() shutil.move(tmpfilename, filepath) try: - os.utime( - filepath, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - filepath, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to move file.") - if (os.path.exists(tmpfilename)): + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to move file.") + if(os.path.exists(tmpfilename)): os.remove(tmpfilename) - returnval = { - 'Type': "File", - 'Filename': filepath, - 'Filesize': downloadsize, - 'FilesizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} - if (outfile == "-"): + returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} + if(outfile == "-"): pretmpfilename = download_from_url_file_with_pycurl2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 @@ -10054,31 +5015,12 @@ def download_from_url_to_file_with_pycurl2( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Copying " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Copied " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize f.write(databytes) f.seek(0) @@ -10087,156 +5029,64 @@ def download_from_url_to_file_with_pycurl2( ft.close() os.remove(tmpfilename) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to copy file.") - returnval = { - 'Type': "Content", - 'Content': fdata, - 'Contentsize': downloadsize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to copy file.") + returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} return returnval -if (not havepycurl): - def download_from_url_to_file_with_pycurl2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(not havepycurl): + def download_from_url_to_file_with_pycurl2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): returnval = download_from_url_to_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - outfile, - outpath, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep, timeout) return returnval -if (havepycurl and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): - def download_from_url_to_file_with_pycurl2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(havepycurl and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): + def download_from_url_to_file_with_pycurl2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): returnval = download_from_url_to_file_with_pycurl( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - outfile, - outpath, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep, timeout) return returnval -if (havepycurl and hasattr(pycurl, "CURL_HTTP_VERSION_3_0")): - def download_from_url_with_pycurl3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(havepycurl and hasattr(pycurl, "CURL_HTTP_VERSION_3_0")): + def download_from_url_with_pycurl3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 urlparts = urlparse.urlparse(httpurl) - if (isinstance(httpheaders, list)): + if(isinstance(httpheaders, list)): httpheaders = make_http_headers_from_list_to_dict(httpheaders) httpheaders = fix_header_names(httpheaders) - if (httpuseragent is not None): - if ('User-Agent' in httpheaders): + if(httpuseragent is not None): + if('User-Agent' in httpheaders): httpheaders['User-Agent'] = httpuseragent else: httpuseragent.update({'User-Agent': httpuseragent}) - if (httpreferer is not None): - if ('Referer' in httpheaders): + if(httpreferer is not None): + if('Referer' in httpheaders): httpheaders['Referer'] = httpreferer else: httpuseragent.update({'Referer': httpreferer}) - if (urlparts.username is not None or urlparts.password is not None): - if (sys.version[0] == "2"): + if(urlparts.username is not None or urlparts.password is not None): + if(sys.version[0] == "2"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password)) - if (sys.version[0] >= "3"): + str(urlparts.username+":"+urlparts.password)) + if(sys.version[0] >= "3"): inurlencode = b64encode( - str(urlparts.username + ":" + urlparts.password).encode()).decode("UTF-8") - httpheaders.update({'Authorization': "Basic " + inurlencode}) + str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8") + httpheaders.update({'Authorization': "Basic "+inurlencode}) geturls_opener = build_opener(HTTPCookieProcessor(httpcookie)) - if (isinstance(httpheaders, dict)): + if(isinstance(httpheaders, dict)): httpheaders = make_http_headers_from_dict_to_pycurl(httpheaders) geturls_opener.addheaders = httpheaders time.sleep(sleep) - if (postdata is not None and not isinstance(postdata, dict)): + if(postdata is not None and not isinstance(postdata, dict)): postdata = urlencode(postdata) retrieved_body = BytesIO() retrieved_headers = BytesIO() try: - if (httpmethod == "GET"): + if(httpmethod == "GET"): geturls_text = pycurl.Curl() geturls_text.setopt(geturls_text.URL, httpurl) geturls_text.setopt(geturls_text.HTTP_VERSION, @@ -10249,7 +5099,7 @@ def download_from_url_with_pycurl3( geturls_text.setopt(geturls_text.FOLLOWLOCATION, True) geturls_text.setopt(geturls_text.TIMEOUT, timeout) geturls_text.perform() - elif (httpmethod == "POST"): + elif(httpmethod == "POST"): geturls_text = pycurl.Curl() geturls_text.setopt(geturls_text.URL, httpurl) geturls_text.setopt(geturls_text.HTTP_VERSION, @@ -10278,24 +5128,23 @@ def download_from_url_with_pycurl3( geturls_text.setopt(geturls_text.TIMEOUT, timeout) geturls_text.perform() retrieved_headers.seek(0) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): pycurlhead = retrieved_headers.read() - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): pycurlhead = retrieved_headers.read().decode('UTF-8') pyhttpverinfo = re.findall( - r'^HTTP/([0-9.]+) (\d+)(?: ([A-Za-z\s]+))?$', - pycurlhead.splitlines()[0].strip().rstrip('\r\n'))[0] + r'^HTTP/([0-9.]+) (\d+)(?: ([A-Za-z\s]+))?$', pycurlhead.splitlines()[0].strip().rstrip('\r\n'))[0] pycurlheadersout = make_http_headers_from_pycurl_to_dict( pycurlhead) retrieved_body.seek(0) except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except ValueError: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False httpcodeout = geturls_text.getinfo(geturls_text.HTTP_CODE) httpcodereason = http_status_to_reason( @@ -10305,35 +5154,35 @@ def download_from_url_with_pycurl3( httpurlout = geturls_text.getinfo(geturls_text.EFFECTIVE_URL) httpheaderout = pycurlheadersout httpheadersentout = httpheaders - if (isinstance(httpheaderout, list)): + if(isinstance(httpheaderout, list)): httpheaderout = dict(make_http_headers_from_pycurl_to_dict( "\r\n".join(httpheaderout))) - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): try: prehttpheaderout = httpheaderout httpheaderkeys = httpheaderout.keys() imax = len(httpheaderkeys) ic = 0 httpheaderout = {} - while (ic < imax): + while(ic < imax): httpheaderout.update( {httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]}) ic += 1 except AttributeError: pass httpheaderout = fix_header_names(httpheaderout) - if (isinstance(httpheadersentout, list)): + if(isinstance(httpheadersentout, list)): httpheadersentout = dict(make_http_headers_from_pycurl_to_dict( "\r\n".join(httpheadersentout))) httpheadersentout = fix_header_names(httpheadersentout) downloadsize = httpheaderout.get('Content-Length') - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = retrieved_body.read(buffersize) @@ -10342,501 +5191,181 @@ def download_from_url_with_pycurl3( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) strbuf.seek(0) returnval_content = strbuf.read() - if (httpheaderout.get("Content-Encoding") == "gzip"): + if(httpheaderout.get("Content-Encoding") == "gzip"): try: returnval_content = zlib.decompress( - returnval_content, 16 + zlib.MAX_WBITS) + returnval_content, 16+zlib.MAX_WBITS) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "deflate"): + elif(httpheaderout.get("Content-Encoding") == "deflate"): try: returnval_content = zlib.decompress(returnval_content) except zlib.error: pass - elif (httpheaderout.get("Content-Encoding") == "br" and havebrotli): + elif(httpheaderout.get("Content-Encoding") == "br" and havebrotli): try: returnval_content = brotli.decompress(returnval_content) except brotli.error: pass - elif (httpheaderout.get("Content-Encoding") == "zstd" and havezstd): + elif(httpheaderout.get("Content-Encoding") == "zstd" and havezstd): try: returnval_content = zstandard.decompress(returnval_content) except zstandard.error: pass - elif ((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): + elif((httpheaderout.get("Content-Encoding") == "lzma" or httpheaderout.get("Content-Encoding") == "xz") and havelzma): try: returnval_content = lzma.decompress(returnval_content) except zstandard.error: pass - elif (httpheaderout.get("Content-Encoding") == "bzip2"): + elif(httpheaderout.get("Content-Encoding") == "bzip2"): try: returnval_content = bz2.decompress(returnval_content) except zstandard.error: pass - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': httpheaderout, - 'Version': httpversionout, - 'Method': httpmethodout, - 'HeadersSent': httpheadersentout, - 'URL': httpurlout, - 'Code': httpcodeout, - 'Reason': httpcodereason, - 'HTTPLib': "pycurl3"} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize, 2, "IEC"), 'SI': get_readable_size( + fulldatasize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason, 'HTTPLib': "pycurl3"} geturls_text.close() return returnval -if (not havepycurl): - def download_from_url_with_pycurl3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(not havepycurl): + def download_from_url_with_pycurl3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) return returnval -if (havepycurl and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") - and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): - def download_from_url_with_pycurl3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(havepycurl and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): + def download_from_url_with_pycurl3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_with_pycurl2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) return returnval -if (havepycurl and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") - and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): - def download_from_url_with_pycurl3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(havepycurl and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): + def download_from_url_with_pycurl3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_with_pycurl( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) return returnval -if (havepycurl and hasattr(pycurl, "CURL_HTTP_VERSION_3_0")): - def download_from_url_file_with_pycurl3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(havepycurl and hasattr(pycurl, "CURL_HTTP_VERSION_3_0")): + def download_from_url_file_with_pycurl3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, tmpfileprefix, tmpfilesuffix exec_time_start = time.time() myhash = hashlib.new("sha1") - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): myhash.update(httpurl) myhash.update(str(buffersize)) myhash.update(str(exec_time_start)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): myhash.update(httpurl.encode('utf-8')) myhash.update(str(buffersize).encode('utf-8')) myhash.update(str(exec_time_start).encode('utf-8')) newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest()) - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 pretmpfilename = download_from_url_with_pycurl3( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + if(not pretmpfilename): return False with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f: tmpfilename = f.name try: - os.utime( - tmpfilename, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - tmpfilename, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass - returnval = { - 'Type': "File", - 'Filename': tmpfilename, - 'Filesize': pretmpfilename.get('Contentsize'), - 'FilesizeAlt': { - 'IEC': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "IEC"), - 'SI': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "SI")}, - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': pretmpfilename.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename.get( + 'Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} f.write(pretmpfilename.get('Content')) f.close() exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to download file.") - returnval.update( - { - 'Filesize': os.path.getsize(tmpfilename), - 'FilesizeAlt': { - 'IEC': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "IEC"), - 'SI': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "SI")}, - 'DownloadTime': float( - exec_time_start - - exec_time_end), - 'DownloadTimeReadable': hms_string( - exec_time_start - - exec_time_end)}) + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to download file.") + returnval.update({'Filesize': os.path.getsize(tmpfilename), 'FilesizeAlt': {'IEC': get_readable_size(os.path.getsize(tmpfilename), 2, "IEC"), 'SI': get_readable_size( + os.path.getsize(tmpfilename), 2, "SI")}, 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)}) return returnval -if (not havepycurl): - def download_from_url_file_with_pycurl3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(not havepycurl): + def download_from_url_file_with_pycurl3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) return returnval -if (havepycurl and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") - and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): - def download_from_url_file_with_pycurl3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(havepycurl and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): + def download_from_url_file_with_pycurl3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_file_with_pycurl2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) return returnval -if (havepycurl and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") - and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): - def download_from_url_file_with_pycurl3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(havepycurl and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): + def download_from_url_file_with_pycurl3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): returnval = download_from_url_file_with_pycurl( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize, sleep, timeout) return returnval -if (havepycurl and hasattr(pycurl, "CURL_HTTP_VERSION_3_0")): - def download_from_url_to_file_with_pycurl3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(havepycurl and hasattr(pycurl, "CURL_HTTP_VERSION_3_0")): + def download_from_url_to_file_with_pycurl3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (not outfile == "-"): + if(not outfile == "-"): outpath = outpath.rstrip(os.path.sep) - filepath = os.path.realpath(outpath + os.path.sep + outfile) - if (not os.path.exists(outpath)): + filepath = os.path.realpath(outpath+os.path.sep+outfile) + if(not os.path.exists(outpath)): os.makedirs(outpath) - if (os.path.exists(outpath) and os.path.isfile(outpath)): + if(os.path.exists(outpath) and os.path.isfile(outpath)): return False - if (os.path.exists(filepath) and os.path.isdir(filepath)): + if(os.path.exists(filepath) and os.path.isdir(filepath)): return False pretmpfilename = download_from_url_file_with_pycurl3( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) + if(not pretmpfilename): return False tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 - log.info("Moving file " + tmpfilename + " to " + filepath) + log.info("Moving file "+tmpfilename+" to "+filepath) exec_time_start = time.time() shutil.move(tmpfilename, filepath) try: - os.utime( - filepath, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - filepath, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to move file.") - if (os.path.exists(tmpfilename)): + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to move file.") + if(os.path.exists(tmpfilename)): os.remove(tmpfilename) - returnval = { - 'Type': "File", - 'Filename': filepath, - 'Filesize': downloadsize, - 'FilesizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} - if (outfile == "-"): + returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} + if(outfile == "-"): pretmpfilename = download_from_url_file_with_pycurl3( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 @@ -10851,31 +5380,12 @@ def download_from_url_to_file_with_pycurl3( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Copying " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Copied " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize f.write(databytes) f.seek(0) @@ -10884,140 +5394,28 @@ def download_from_url_to_file_with_pycurl3( ft.close() os.remove(tmpfilename) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to copy file.") - returnval = { - 'Type': "Content", - 'Content': fdata, - 'Contentsize': downloadsize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': httpmethod, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to copy file.") + returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': httpmethod, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} return returnval -if (not havepycurl): - def download_from_url_to_file_with_pycurl3( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(not havepycurl): + def download_from_url_to_file_with_pycurl3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): returnval = download_from_url_to_file_with_urllib( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - outfile, - outpath, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep, timeout) return returnval -if (havepycurl and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") - and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): - def download_from_url_to_file_with_pycurl2( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(havepycurl and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")): + def download_from_url_to_file_with_pycurl2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): returnval = download_from_url_to_file_with_pycurl2( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - outfile, - outpath, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep, timeout) return returnval -if (havepycurl and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0") - and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0")): - def download_from_url_to_file_with_pycurl( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(havepycurl and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0") and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0")): + def download_from_url_to_file_with_pycurl(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): returnval = download_from_url_to_file_with_pycurl( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - outfile, - outpath, - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep, timeout) return returnval @@ -11025,41 +5423,41 @@ def download_file_from_ftp_file(url): urlparts = urlparse.urlparse(url) file_name = os.path.basename(urlparts.path) file_dir = os.path.dirname(urlparts.path) - if (urlparts.username is not None): + if(urlparts.username is not None): ftp_username = urlparts.username else: ftp_username = "anonymous" - if (urlparts.password is not None): + if(urlparts.password is not None): ftp_password = urlparts.password - elif (urlparts.password is None and urlparts.username == "anonymous"): + elif(urlparts.password is None and urlparts.username == "anonymous"): ftp_password = "anonymous" else: ftp_password = "" - if (urlparts.scheme == "ftp"): + if(urlparts.scheme == "ftp"): ftp = FTP() - elif (urlparts.scheme == "ftps"): + elif(urlparts.scheme == "ftps"): ftp = FTP_TLS() else: return False - if (urlparts.scheme == "http" or urlparts.scheme == "https"): + if(urlparts.scheme == "http" or urlparts.scheme == "https"): return False ftp_port = urlparts.port - if (urlparts.port is None): + if(urlparts.port is None): ftp_port = 21 try: ftp.connect(urlparts.hostname, ftp_port) except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False ftp.login(urlparts.username, urlparts.password) - if (urlparts.scheme == "ftps"): + if(urlparts.scheme == "ftps"): ftp.prot_p() ftpfile = BytesIO() - ftp.retrbinary("RETR " + urlparts.path, ftpfile.write) - # ftp.storbinary("STOR "+urlparts.path, ftpfile.write); + ftp.retrbinary("RETR "+urlparts.path, ftpfile.write) + #ftp.storbinary("STOR "+urlparts.path, ftpfile.write); ftp.close() ftpfile.seek(0, 0) return ftpfile @@ -11070,50 +5468,40 @@ def download_file_from_ftp_string(url): return ftpfile.read() -def download_from_url_with_ftp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +def download_from_url_with_ftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 urlparts = urlparse.urlparse(httpurl) - if (isinstance(httpheaders, list)): + if(isinstance(httpheaders, list)): httpheaders = make_http_headers_from_list_to_dict(httpheaders) httpheaders = fix_header_names(httpheaders) - if (httpuseragent is not None): - if ('User-Agent' in httpheaders): + if(httpuseragent is not None): + if('User-Agent' in httpheaders): httpheaders['User-Agent'] = httpuseragent else: httpuseragent.update({'User-Agent': httpuseragent}) - if (httpreferer is not None): - if ('Referer' in httpheaders): + if(httpreferer is not None): + if('Referer' in httpheaders): httpheaders['Referer'] = httpreferer else: httpuseragent.update({'Referer': httpreferer}) - if (isinstance(httpheaders, dict)): + if(isinstance(httpheaders, dict)): httpheaders = make_http_headers_from_dict_to_list(httpheaders) time.sleep(sleep) geturls_text = download_file_from_ftp_file(httpurl) - if (not geturls_text): + if(not geturls_text): return False downloadsize = None - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.read(buffersize) @@ -11122,273 +5510,103 @@ def download_from_url_with_ftp( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) strbuf.seek(0) returnval_content = strbuf.read() - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': None, - 'Version': None, - 'Method': None, - 'HeadersSent': None, - 'URL': httpurl, - 'Code': None} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size( + fulldatasize, 2, "IEC"), 'SI': get_readable_size(fulldatasize, 2, "SI")}, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl, 'Code': None} geturls_text.close() return returnval -def download_from_url_file_with_ftp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +def download_from_url_file_with_ftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, tmpfileprefix, tmpfilesuffix exec_time_start = time.time() myhash = hashlib.new("sha1") - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): myhash.update(httpurl) myhash.update(str(buffersize)) myhash.update(str(exec_time_start)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): myhash.update(httpurl.encode('utf-8')) myhash.update(str(buffersize).encode('utf-8')) myhash.update(str(exec_time_start).encode('utf-8')) newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest()) - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 pretmpfilename = download_from_url_with_ftp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + if(not pretmpfilename): return False with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f: tmpfilename = f.name try: - os.utime( - tmpfilename, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - tmpfilename, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get('Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get('Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass - returnval = { - 'Type': "File", - 'Filename': tmpfilename, - 'Filesize': pretmpfilename.get('Contentsize'), - 'FilesizeAlt': { - 'IEC': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "IEC"), - 'SI': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "SI")}, - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': pretmpfilename.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename.get( + 'Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} f.write(pretmpfilename.get('Content')) f.close() exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to download file.") - returnval.update( - { - 'Filesize': os.path.getsize(tmpfilename), - 'FilesizeAlt': { - 'IEC': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "IEC"), - 'SI': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "SI")}, - 'DownloadTime': float( - exec_time_start - - exec_time_end), - 'DownloadTimeReadable': hms_string( - exec_time_start - - exec_time_end)}) + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to download file.") + returnval.update({'Filesize': os.path.getsize(tmpfilename), 'FilesizeAlt': {'IEC': get_readable_size(os.path.getsize(tmpfilename), 2, "IEC"), 'SI': get_readable_size( + os.path.getsize(tmpfilename), 2, "SI")}, 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)}) return returnval -def download_from_url_to_file_with_ftp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +def download_from_url_to_file_with_ftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (not outfile == "-"): + if(not outfile == "-"): outpath = outpath.rstrip(os.path.sep) - filepath = os.path.realpath(outpath + os.path.sep + outfile) - if (not os.path.exists(outpath)): + filepath = os.path.realpath(outpath+os.path.sep+outfile) + if(not os.path.exists(outpath)): os.makedirs(outpath) - if (os.path.exists(outpath) and os.path.isfile(outpath)): + if(os.path.exists(outpath) and os.path.isfile(outpath)): return False - if (os.path.exists(filepath) and os.path.isdir(filepath)): + if(os.path.exists(filepath) and os.path.isdir(filepath)): return False pretmpfilename = download_from_url_file_with_ftp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) + if(not pretmpfilename): return False tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 - log.info("Moving file " + tmpfilename + " to " + filepath) + log.info("Moving file "+tmpfilename+" to "+filepath) exec_time_start = time.time() shutil.move(tmpfilename, filepath) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to move file.") - if (os.path.exists(tmpfilename)): + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to move file.") + if(os.path.exists(tmpfilename)): os.remove(tmpfilename) - returnval = { - 'Type': "File", - 'Filename': filepath, - 'Filesize': downloadsize, - 'FilesizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': None, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} - if (outfile == "-"): + returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': None, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} + if(outfile == "-"): pretmpfilename = download_from_url_file_with_ftp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 @@ -11403,31 +5621,12 @@ def download_from_url_to_file_with_ftp( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Copying " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Copied " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize f.write(databytes) f.seek(0) @@ -11436,36 +5635,10 @@ def download_from_url_to_file_with_ftp( ft.close() os.remove(tmpfilename) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to copy file.") - returnval = { - 'Type': "Content", - 'Content': fdata, - 'Contentsize': downloadsize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': None, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to copy file.") + returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': None, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} return returnval @@ -11473,39 +5646,39 @@ def upload_file_to_ftp_file(ftpfile, url): urlparts = urlparse.urlparse(url) file_name = os.path.basename(urlparts.path) file_dir = os.path.dirname(urlparts.path) - if (urlparts.username is not None): + if(urlparts.username is not None): ftp_username = urlparts.username else: ftp_username = "anonymous" - if (urlparts.password is not None): + if(urlparts.password is not None): ftp_password = urlparts.password - elif (urlparts.password is None and urlparts.username == "anonymous"): + elif(urlparts.password is None and urlparts.username == "anonymous"): ftp_password = "anonymous" else: ftp_password = "" - if (urlparts.scheme == "ftp"): + if(urlparts.scheme == "ftp"): ftp = FTP() - elif (urlparts.scheme == "ftps"): + elif(urlparts.scheme == "ftps"): ftp = FTP_TLS() else: return False - if (urlparts.scheme == "http" or urlparts.scheme == "https"): + if(urlparts.scheme == "http" or urlparts.scheme == "https"): return False ftp_port = urlparts.port - if (urlparts.port is None): + if(urlparts.port is None): ftp_port = 21 try: ftp.connect(urlparts.hostname, ftp_port) except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False ftp.login(urlparts.username, urlparts.password) - if (urlparts.scheme == "ftps"): + if(urlparts.scheme == "ftps"): ftp.prot_p() - ftp.storbinary("STOR " + urlparts.path, ftpfile) + ftp.storbinary("STOR "+urlparts.path, ftpfile) ftp.close() ftpfile.seek(0, 0) return ftpfile @@ -11518,29 +5691,29 @@ def upload_file_to_ftp_string(ftpstring, url): return ftpfile -if (haveparamiko): +if(haveparamiko): def download_file_from_sftp_file(url): urlparts = urlparse.urlparse(url) file_name = os.path.basename(urlparts.path) file_dir = os.path.dirname(urlparts.path) - if (urlparts.scheme == "http" or urlparts.scheme == "https"): + if(urlparts.scheme == "http" or urlparts.scheme == "https"): return False sftp_port = urlparts.port - if (urlparts.port is None): + if(urlparts.port is None): sftp_port = 22 else: sftp_port = urlparts.port - if (urlparts.username is not None): + if(urlparts.username is not None): sftp_username = urlparts.username else: sftp_username = "anonymous" - if (urlparts.password is not None): + if(urlparts.password is not None): sftp_password = urlparts.password - elif (urlparts.password is None and urlparts.username == "anonymous"): + elif(urlparts.password is None and urlparts.username == "anonymous"): sftp_password = "anonymous" else: sftp_password = "" - if (urlparts.scheme != "sftp"): + if(urlparts.scheme != "sftp"): return False ssh = paramiko.SSHClient() ssh.load_system_host_keys() @@ -11551,10 +5724,10 @@ def download_file_from_sftp_file(url): except paramiko.ssh_exception.SSHException: return False except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False sftp = ssh.open_sftp() sftpfile = BytesIO() @@ -11567,7 +5740,7 @@ def download_file_from_sftp_file(url): def download_file_from_sftp_file(url): return False -if (haveparamiko): +if(haveparamiko): def download_file_from_sftp_string(url): sftpfile = download_file_from_sftp_file(url) return sftpfile.read() @@ -11575,51 +5748,41 @@ def download_file_from_sftp_string(url): def download_file_from_ftp_string(url): return False -if (haveparamiko): - def download_from_url_with_sftp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(haveparamiko): + def download_from_url_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 urlparts = urlparse.urlparse(httpurl) - if (isinstance(httpheaders, list)): + if(isinstance(httpheaders, list)): httpheaders = make_http_headers_from_list_to_dict(httpheaders) httpheaders = fix_header_names(httpheaders) - if (httpuseragent is not None): - if ('User-Agent' in httpheaders): + if(httpuseragent is not None): + if('User-Agent' in httpheaders): httpheaders['User-Agent'] = httpuseragent else: httpuseragent.update({'User-Agent': httpuseragent}) - if (httpreferer is not None): - if ('Referer' in httpheaders): + if(httpreferer is not None): + if('Referer' in httpheaders): httpheaders['Referer'] = httpreferer else: httpuseragent.update({'Referer': httpreferer}) - if (isinstance(httpheaders, dict)): + if(isinstance(httpheaders, dict)): httpheaders = make_http_headers_from_dict_to_list(httpheaders) time.sleep(sleep) geturls_text = download_file_from_sftp_file(httpurl) - if (not geturls_text): + if(not geturls_text): return False downloadsize = None - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.read(buffersize) @@ -11628,306 +5791,111 @@ def download_from_url_with_sftp( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) strbuf.seek(0) returnval_content = strbuf.read() - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': None, - 'Version': None, - 'Method': None, - 'HeadersSent': None, - 'URL': httpurl, - 'Code': None} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size( + fulldatasize, 2, "IEC"), 'SI': get_readable_size(fulldatasize, 2, "SI")}, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl, 'Code': None} geturls_text.close() return returnval -if (not haveparamiko): - def download_from_url_with_sftp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(not haveparamiko): + def download_from_url_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): return False -if (haveparamiko): - def download_from_url_file_with_sftp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(haveparamiko): + def download_from_url_file_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, tmpfileprefix, tmpfilesuffix exec_time_start = time.time() myhash = hashlib.new("sha1") - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): myhash.update(httpurl) myhash.update(str(buffersize)) myhash.update(str(exec_time_start)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): myhash.update(httpurl.encode('utf-8')) myhash.update(str(buffersize).encode('utf-8')) myhash.update(str(exec_time_start).encode('utf-8')) newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest()) - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 pretmpfilename = download_from_url_with_sftp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + if(not pretmpfilename): return False with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f: tmpfilename = f.name try: - os.utime( - tmpfilename, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - tmpfilename, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass - returnval = { - 'Type': "File", - 'Filename': tmpfilename, - 'Filesize': pretmpfilename.get('Contentsize'), - 'FilesizeAlt': { - 'IEC': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "IEC"), - 'SI': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "SI")}, - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': pretmpfilename.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename.get( + 'Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} f.write(pretmpfilename.get('Content')) f.close() exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to download file.") - returnval.update( - { - 'Filesize': os.path.getsize(tmpfilename), - 'FilesizeAlt': { - 'IEC': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "IEC"), - 'SI': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "SI")}, - 'DownloadTime': float( - exec_time_start - - exec_time_end), - 'DownloadTimeReadable': hms_string( - exec_time_start - - exec_time_end)}) + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to download file.") + returnval.update({'Filesize': os.path.getsize(tmpfilename), 'FilesizeAlt': {'IEC': get_readable_size(os.path.getsize(tmpfilename), 2, "IEC"), 'SI': get_readable_size( + os.path.getsize(tmpfilename), 2, "SI")}, 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)}) return returnval -if (not haveparamiko): - def download_from_url_file_with_sftp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(not haveparamiko): + def download_from_url_file_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): return False -if (haveparamiko): - def download_from_url_to_file_with_sftp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(haveparamiko): + def download_from_url_to_file_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (not outfile == "-"): + if(not outfile == "-"): outpath = outpath.rstrip(os.path.sep) - filepath = os.path.realpath(outpath + os.path.sep + outfile) - if (not os.path.exists(outpath)): + filepath = os.path.realpath(outpath+os.path.sep+outfile) + if(not os.path.exists(outpath)): os.makedirs(outpath) - if (os.path.exists(outpath) and os.path.isfile(outpath)): + if(os.path.exists(outpath) and os.path.isfile(outpath)): return False - if (os.path.exists(filepath) and os.path.isdir(filepath)): + if(os.path.exists(filepath) and os.path.isdir(filepath)): return False pretmpfilename = download_from_url_file_with_sftp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) + if(not pretmpfilename): return False tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 - log.info("Moving file " + tmpfilename + " to " + filepath) + log.info("Moving file "+tmpfilename+" to "+filepath) exec_time_start = time.time() shutil.move(tmpfilename, filepath) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to move file.") - if (os.path.exists(tmpfilename)): + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to move file.") + if(os.path.exists(tmpfilename)): os.remove(tmpfilename) - returnval = { - 'Type': "File", - 'Filename': filepath, - 'Filesize': downloadsize, - 'FilesizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': None, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} - if (outfile == "-"): + returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': None, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} + if(outfile == "-"): pretmpfilename = download_from_url_file_with_sftp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 @@ -11942,31 +5910,12 @@ def download_from_url_to_file_with_sftp( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Copying " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Copied " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize f.write(databytes) f.seek(0) @@ -11975,82 +5924,39 @@ def download_from_url_to_file_with_sftp( ft.close() os.remove(tmpfilename) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to copy file.") - returnval = { - 'Type': "Content", - 'Content': fdata, - 'Contentsize': downloadsize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': None, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to copy file.") + returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': None, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} return returnval -if (not haveparamiko): - def download_from_url_to_file_with_sftp( - httpurl, - httpheaders=geturls_headers, - httpuseragent=None, - httpreferer=None, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(not haveparamiko): + def download_from_url_to_file_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): return False -if (haveparamiko): +if(haveparamiko): def upload_file_to_sftp_file(sftpfile, url): urlparts = urlparse.urlparse(url) file_name = os.path.basename(urlparts.path) file_dir = os.path.dirname(urlparts.path) sftp_port = urlparts.port - if (urlparts.scheme == "http" or urlparts.scheme == "https"): + if(urlparts.scheme == "http" or urlparts.scheme == "https"): return False - if (urlparts.port is None): + if(urlparts.port is None): sftp_port = 22 else: sftp_port = urlparts.port - if (urlparts.username is not None): + if(urlparts.username is not None): sftp_username = urlparts.username else: sftp_username = "anonymous" - if (urlparts.password is not None): + if(urlparts.password is not None): sftp_password = urlparts.password - elif (urlparts.password is None and urlparts.username == "anonymous"): + elif(urlparts.password is None and urlparts.username == "anonymous"): sftp_password = "anonymous" else: sftp_password = "" - if (urlparts.scheme != "sftp"): + if(urlparts.scheme != "sftp"): return False ssh = paramiko.SSHClient() ssh.load_system_host_keys() @@ -12061,10 +5967,10 @@ def upload_file_to_sftp_file(sftpfile, url): except paramiko.ssh_exception.SSHException: return False except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False sftp = ssh.open_sftp() sftp.putfo(sftpfile, urlparts.path) @@ -12076,7 +5982,7 @@ def upload_file_to_sftp_file(sftpfile, url): def upload_file_to_sftp_file(sftpfile, url): return False -if (haveparamiko): +if(haveparamiko): def upload_file_to_sftp_string(sftpstring, url): sftpfileo = BytesIO(sftpstring) sftpfile = upload_file_to_sftp_files(ftpfileo, url) @@ -12087,43 +5993,40 @@ def upload_file_to_sftp_string(url): return False -if (havepysftp): +if(havepysftp): def download_file_from_pysftp_file(url): urlparts = urlparse.urlparse(url) file_name = os.path.basename(urlparts.path) file_dir = os.path.dirname(urlparts.path) - if (urlparts.scheme == "http" or urlparts.scheme == "https"): + if(urlparts.scheme == "http" or urlparts.scheme == "https"): return False sftp_port = urlparts.port - if (urlparts.port is None): + if(urlparts.port is None): sftp_port = 22 else: sftp_port = urlparts.port - if (urlparts.username is not None): + if(urlparts.username is not None): sftp_username = urlparts.username else: sftp_username = "anonymous" - if (urlparts.password is not None): + if(urlparts.password is not None): sftp_password = urlparts.password - elif (urlparts.password is None and urlparts.username == "anonymous"): + elif(urlparts.password is None and urlparts.username == "anonymous"): sftp_password = "anonymous" else: sftp_password = "" - if (urlparts.scheme != "sftp"): + if(urlparts.scheme != "sftp"): return False try: - pysftp.Connection( - urlparts.hostname, - port=sftp_port, - username=urlparts.username, - password=urlparts.password) + pysftp.Connection(urlparts.hostname, port=sftp_port, + username=urlparts.username, password=urlparts.password) except paramiko.ssh_exception.SSHException: return False except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False sftp = ssh.open_sftp() sftpfile = BytesIO() @@ -12136,7 +6039,7 @@ def download_file_from_pysftp_file(url): def download_file_from_pysftp_file(url): return False -if (havepysftp): +if(havepysftp): def download_file_from_pysftp_string(url): sftpfile = download_file_from_pysftp_file(url) return sftpfile.read() @@ -12144,39 +6047,31 @@ def download_file_from_pysftp_string(url): def download_file_from_ftp_string(url): return False -if (havepysftp): - def download_from_url_with_pysftp( - httpurl, - httpheaders=geturls_headers, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(havepysftp): + def download_from_url_with_pysftp(httpurl, httpheaders=geturls_headers, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 urlparts = urlparse.urlparse(httpurl) - if (isinstance(httpheaders, list)): + if(isinstance(httpheaders, list)): httpheaders = make_http_headers_from_list_to_dict(httpheaders) httpheaders = fix_header_names(httpheaders) - if (isinstance(httpheaders, dict)): + if(isinstance(httpheaders, dict)): httpheaders = make_http_headers_from_dict_to_list(httpheaders) time.sleep(sleep) geturls_text = download_file_from_pysftp_file(httpurl) - if (not geturls_text): + if(not geturls_text): return False downloadsize = None - if (downloadsize is not None): + if(downloadsize is not None): downloadsize = int(downloadsize) if downloadsize is None: downloadsize = 0 fulldatasize = 0 prevdownsize = 0 - log.info("Downloading URL " + httpurl) + log.info("Downloading URL "+httpurl) with BytesIO() as strbuf: while True: databytes = geturls_text.read(buffersize) @@ -12185,294 +6080,111 @@ def download_from_url_with_pysftp( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Downloading " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Downloaded " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize strbuf.write(databytes) strbuf.seek(0) returnval_content = strbuf.read() - returnval = { - 'Type': "Content", - 'Content': returnval_content, - 'Contentsize': fulldatasize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - fulldatasize, - 2, - "IEC"), - 'SI': get_readable_size( - fulldatasize, - 2, - "SI")}, - 'Headers': None, - 'Version': None, - 'Method': None, - 'HeadersSent': None, - 'URL': httpurl, - 'Code': None} + returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size( + fulldatasize, 2, "IEC"), 'SI': get_readable_size(fulldatasize, 2, "SI")}, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl, 'Code': None} geturls_text.close() return returnval -if (not havepysftp): - def download_from_url_with_pysftp( - httpurl, - httpheaders=geturls_headers, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - buffersize=524288, - sleep=-1, - timeout=10): +if(not havepysftp): + def download_from_url_with_pysftp(httpurl, httpheaders=geturls_headers, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10): return False -if (havepysftp): - def download_from_url_file_with_pysftp( - httpurl, - httpheaders=geturls_headers, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(havepysftp): + def download_from_url_file_with_pysftp(httpurl, httpheaders=geturls_headers, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli, tmpfileprefix, tmpfilesuffix exec_time_start = time.time() myhash = hashlib.new("sha1") - if (sys.version[0] == "2"): + if(sys.version[0] == "2"): myhash.update(httpurl) myhash.update(str(buffersize)) myhash.update(str(exec_time_start)) - if (sys.version[0] >= "3"): + if(sys.version[0] >= "3"): myhash.update(httpurl.encode('utf-8')) myhash.update(str(buffersize).encode('utf-8')) myhash.update(str(exec_time_start).encode('utf-8')) newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest()) - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 pretmpfilename = download_from_url_with_pysftp( - httpurl, - httpheaders, - httpuseragent, - httpreferer, - httpcookie, - httpmethod, - postdata, - buffersize, - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep, timeout) + if(not pretmpfilename): return False with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f: tmpfilename = f.name try: - os.utime( - tmpfilename, - (time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), - time.mktime( - email.utils.parsedate_to_datetime( - pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) + os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), + time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()))) except AttributeError: try: - os.utime( - tmpfilename, - (time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()), - time.mktime( - datetime.datetime.strptime( - pretmpfilename.get('Headers').get( - 'Last-Modified'), - "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) + os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), + time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()))) except ValueError: pass except ValueError: pass - returnval = { - 'Type': "File", - 'Filename': tmpfilename, - 'Filesize': pretmpfilename.get('Contentsize'), - 'FilesizeAlt': { - 'IEC': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "IEC"), - 'SI': get_readable_size( - pretmpfilename.get('Contentsize'), - 2, - "SI")}, - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': pretmpfilename.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename.get( + 'Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} f.write(pretmpfilename.get('Content')) f.close() exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to download file.") - returnval.update( - { - 'Filesize': os.path.getsize(tmpfilename), - 'FilesizeAlt': { - 'IEC': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "IEC"), - 'SI': get_readable_size( - os.path.getsize(tmpfilename), - 2, - "SI")}, - 'DownloadTime': float( - exec_time_start - - exec_time_end), - 'DownloadTimeReadable': hms_string( - exec_time_start - - exec_time_end)}) + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to download file.") + returnval.update({'Filesize': os.path.getsize(tmpfilename), 'FilesizeAlt': {'IEC': get_readable_size(os.path.getsize(tmpfilename), 2, "IEC"), 'SI': get_readable_size( + os.path.getsize(tmpfilename), 2, "SI")}, 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)}) return returnval -if (not havepysftp): - def download_from_url_file_with_pysftp( - httpurl, - httpheaders=geturls_headers, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - ranges=[ - None, - None], - buffersize=524288, - sleep=-1, - timeout=10): +if(not havepysftp): + def download_from_url_file_with_pysftp(httpurl, httpheaders=geturls_headers, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10): return False -if (havepysftp): - def download_from_url_to_file_with_pysftp( - httpurl, - httpheaders=geturls_headers, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(havepysftp): + def download_from_url_to_file_with_pysftp(httpurl, httpheaders=geturls_headers, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): global geturls_download_sleep, havezstd, havebrotli - if (sleep < 0): + if(sleep < 0): sleep = geturls_download_sleep - if (timeout <= 0): + if(timeout <= 0): timeout = 10 - if (not outfile == "-"): + if(not outfile == "-"): outpath = outpath.rstrip(os.path.sep) - filepath = os.path.realpath(outpath + os.path.sep + outfile) - if (not os.path.exists(outpath)): + filepath = os.path.realpath(outpath+os.path.sep+outfile) + if(not os.path.exists(outpath)): os.makedirs(outpath) - if (os.path.exists(outpath) and os.path.isfile(outpath)): + if(os.path.exists(outpath) and os.path.isfile(outpath)): return False - if (os.path.exists(filepath) and os.path.isdir(filepath)): + if(os.path.exists(filepath) and os.path.isdir(filepath)): return False pretmpfilename = download_from_url_file_with_pysftp( - httpurl, - httpheaders, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) - if (not pretmpfilename): + httpurl, httpheaders, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) + if(not pretmpfilename): return False tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 - log.info("Moving file " + tmpfilename + " to " + filepath) + log.info("Moving file "+tmpfilename+" to "+filepath) exec_time_start = time.time() shutil.move(tmpfilename, filepath) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to move file.") - if (os.path.exists(tmpfilename)): + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to move file.") + if(os.path.exists(tmpfilename)): os.remove(tmpfilename) - returnval = { - 'Type': "File", - 'Filename': filepath, - 'Filesize': downloadsize, - 'FilesizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': None, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} - if (outfile == "-"): + returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': None, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} + if(outfile == "-"): pretmpfilename = download_from_url_file_with_pysftp( - httpurl, - httpheaders, - httpcookie, - httpmethod, - postdata, - ranges, - buffersize[0], - sleep, - timeout) + httpurl, httpheaders, httpcookie, httpmethod, postdata, ranges, buffersize[0], sleep, timeout) tmpfilename = pretmpfilename.get('Filename') downloadsize = int(os.path.getsize(tmpfilename)) fulldatasize = 0 @@ -12487,31 +6199,12 @@ def download_from_url_to_file_with_pysftp( datasize = len(databytes) fulldatasize = datasize + fulldatasize percentage = "" - if (downloadsize > 0): - percentage = str( - "{0:.2f}".format( - float( - float( - fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.') + "%" + if(downloadsize > 0): + percentage = str("{0:.2f}".format( + float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%" downloaddiff = fulldatasize - prevdownsize - log.info( - "Copying " + - get_readable_size( - fulldatasize, - 2, - "SI")['ReadableWithSuffix'] + - " / " + - get_readable_size( - downloadsize, - 2, - "SI")['ReadableWithSuffix'] + - " " + - str(percentage) + - " / Copied " + - get_readable_size( - downloaddiff, - 2, - "IEC")['ReadableWithSuffix']) + log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")[ + 'ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']) prevdownsize = fulldatasize f.write(databytes) f.seek(0) @@ -12520,94 +6213,50 @@ def download_from_url_to_file_with_pysftp( ft.close() os.remove(tmpfilename) exec_time_end = time.time() - log.info("It took " + hms_string(exec_time_start - - exec_time_end) + " to copy file.") - returnval = { - 'Type': "Content", - 'Content': fdata, - 'Contentsize': downloadsize, - 'ContentsizeAlt': { - 'IEC': get_readable_size( - downloadsize, - 2, - "IEC"), - 'SI': get_readable_size( - downloadsize, - 2, - "SI")}, - 'DownloadTime': pretmpfilename.get('DownloadTime'), - 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), - 'MoveFileTime': float( - exec_time_start - exec_time_end), - 'MoveFileTimeReadable': hms_string( - exec_time_start - exec_time_end), - 'Headers': pretmpfilename.get('Headers'), - 'Version': pretmpfilename.get('Version'), - 'Method': pretmpfilename.get('Method'), - 'Method': None, - 'HeadersSent': pretmpfilename.get('HeadersSent'), - 'URL': pretmpfilename.get('URL'), - 'Code': pretmpfilename.get('Code'), - 'Reason': pretmpfilename.get('Reason'), - 'HTTPLib': pretmpfilename.get('HTTPLib')} + log.info("It took "+hms_string(exec_time_start - + exec_time_end)+" to copy file.") + returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename.get('DownloadTime'), 'DownloadTimeReadable': pretmpfilename.get('DownloadTimeReadable'), 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string( + exec_time_start - exec_time_end), 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'Method': None, 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason'), 'HTTPLib': pretmpfilename.get('HTTPLib')} return returnval -if (not havepysftp): - def download_from_url_to_file_with_pysftp( - httpurl, - httpheaders=geturls_headers, - httpcookie=geturls_cj, - httpmethod="GET", - postdata=None, - outfile="-", - outpath=os.getcwd(), - ranges=[ - None, - None], - buffersize=[ - 524288, - 524288], - sleep=-1, - timeout=10): +if(not havepysftp): + def download_from_url_to_file_with_pysftp(httpurl, httpheaders=geturls_headers, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10): return False -if (havepysftp): +if(havepysftp): def upload_file_to_pysftp_file(sftpfile, url): urlparts = urlparse.urlparse(url) file_name = os.path.basename(urlparts.path) file_dir = os.path.dirname(urlparts.path) sftp_port = urlparts.port - if (urlparts.scheme == "http" or urlparts.scheme == "https"): + if(urlparts.scheme == "http" or urlparts.scheme == "https"): return False - if (urlparts.port is None): + if(urlparts.port is None): sftp_port = 22 else: sftp_port = urlparts.port - if (urlparts.username is not None): + if(urlparts.username is not None): sftp_username = urlparts.username else: sftp_username = "anonymous" - if (urlparts.password is not None): + if(urlparts.password is not None): sftp_password = urlparts.password - elif (urlparts.password is None and urlparts.username == "anonymous"): + elif(urlparts.password is None and urlparts.username == "anonymous"): sftp_password = "anonymous" else: sftp_password = "" - if (urlparts.scheme != "sftp"): + if(urlparts.scheme != "sftp"): return False try: - pysftp.Connection( - urlparts.hostname, - port=sftp_port, - username=urlparts.username, - password=urlparts.password) + pysftp.Connection(urlparts.hostname, port=sftp_port, + username=urlparts.username, password=urlparts.password) except paramiko.ssh_exception.SSHException: return False except socket.gaierror: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False except socket.timeout: - log.info("Error With URL " + httpurl) + log.info("Error With URL "+httpurl) return False sftp = ssh.open_sftp() sftp.putfo(sftpfile, urlparts.path) @@ -12619,7 +6268,7 @@ def upload_file_to_pysftp_file(sftpfile, url): def upload_file_to_pysftp_file(sftpfile, url): return False -if (havepysftp): +if(havepysftp): def upload_file_to_pysftp_string(sftpstring, url): sftpfileo = BytesIO(sftpstring) sftpfile = upload_file_to_pysftp_files(ftpfileo, url) diff --git a/setup.py b/setup.py index 9a5223b..feb8b9a 100755 --- a/setup.py +++ b/setup.py @@ -15,53 +15,40 @@ $FileInfo: setup.py - Last Update: 10/5/2023 Ver. 2.0.2 RC 1 - Author: cooldude2k $ ''' -import datetime -import os -import platform import re -import shutil +import os import sys import time - +import shutil +import datetime +import platform import pkg_resources -from setuptools import find_packages, setup +from setuptools import setup, find_packages -verinfofilename = os.path.realpath( - "." + os.path.sep + os.path.sep + "pywwwget.py") +verinfofilename = os.path.realpath("."+os.path.sep+os.path.sep+"pywwwget.py") verinfofile = open(verinfofilename, "r") verinfodata = verinfofile.read() verinfofile.close() -setuppy_verinfo_esc = re.escape( - "__version_info__ = (") + "(.*)" + re.escape(");") +setuppy_verinfo_esc = re.escape("__version_info__ = (")+"(.*)"+re.escape(");") setuppy_verinfo = re.findall(setuppy_verinfo_esc, verinfodata)[0] setuppy_verinfo_exp = [vergetspt.strip().replace("\"", "") for vergetspt in setuppy_verinfo.split(',')] setuppy_dateinfo_esc = re.escape( - "__version_date_info__ = (") + "(.*)" + re.escape(");") + "__version_date_info__ = (")+"(.*)"+re.escape(");") setuppy_dateinfo = re.findall(setuppy_dateinfo_esc, verinfodata)[0] setuppy_dateinfo_exp = [vergetspt.strip().replace("\"", "") for vergetspt in setuppy_dateinfo.split(',')] pymodule = {} -pymodule['version'] = str(setuppy_verinfo_exp[0]) + "." + \ - str(setuppy_verinfo_exp[1]) + "." + str(setuppy_verinfo_exp[2]) +pymodule['version'] = str(setuppy_verinfo_exp[0])+"." + \ + str(setuppy_verinfo_exp[1])+"."+str(setuppy_verinfo_exp[2]) pymodule['versionrc'] = int(setuppy_verinfo_exp[4]) -pymodule['versionlist'] = ( - int( - setuppy_verinfo_exp[0]), int( - setuppy_verinfo_exp[1]), int( - setuppy_verinfo_exp[2]), str( - setuppy_verinfo_exp[3]), int( - setuppy_verinfo_exp[4])) -pymodule['verdate'] = str(setuppy_dateinfo_exp[0]) + "." + \ - str(setuppy_dateinfo_exp[1]) + "." + str(setuppy_dateinfo_exp[2]) +pymodule['versionlist'] = (int(setuppy_verinfo_exp[0]), int(setuppy_verinfo_exp[1]), int( + setuppy_verinfo_exp[2]), str(setuppy_verinfo_exp[3]), int(setuppy_verinfo_exp[4])) +pymodule['verdate'] = str(setuppy_dateinfo_exp[0])+"." + \ + str(setuppy_dateinfo_exp[1])+"."+str(setuppy_dateinfo_exp[2]) pymodule['verdaterc'] = int(setuppy_dateinfo_exp[4]) -pymodule['verdatelist'] = ( - int( - setuppy_dateinfo_exp[0]), int( - setuppy_dateinfo_exp[1]), int( - setuppy_dateinfo_exp[2]), str( - setuppy_dateinfo_exp[3]), int( - setuppy_dateinfo_exp[4])) +pymodule['verdatelist'] = (int(setuppy_dateinfo_exp[0]), int(setuppy_dateinfo_exp[1]), int( + setuppy_dateinfo_exp[2]), str(setuppy_dateinfo_exp[3]), int(setuppy_dateinfo_exp[4])) pymodule['name'] = 'PyWWW-Get' pymodule['author'] = 'Kazuki Przyborowski' pymodule['authoremail'] = 'kazuki.przyborowski@gmail.com' @@ -98,22 +85,14 @@ 'Topic :: Software Development :: Libraries', 'Topic :: Software Development :: Libraries :: Python Modules' ] -if (len(sys.argv) > 1 and ( - sys.argv[1] == "versioninfo" or sys.argv[1] == "getversioninfo")): +if(len(sys.argv) > 1 and (sys.argv[1] == "versioninfo" or sys.argv[1] == "getversioninfo")): import json pymodule_data = json.dumps(pymodule) print(pymodule_data) sys.exit() -if (len(sys.argv) > 1 and ( - sys.argv[1] == "sourceinfo" or sys.argv[1] == "getsourceinfo")): - srcinfofilename = os.path.realpath( - "." + - os.path.sep + - pkg_resources.to_filename( - pymodule['name']) + - ".egg-info" + - os.path.sep + - "SOURCES.txt") +if(len(sys.argv) > 1 and (sys.argv[1] == "sourceinfo" or sys.argv[1] == "getsourceinfo")): + srcinfofilename = os.path.realpath("."+os.path.sep+pkg_resources.to_filename( + pymodule['name'])+".egg-info"+os.path.sep+"SOURCES.txt") srcinfofile = open(srcinfofilename, "r") srcinfodata = srcinfofile.read() srcinfofile.close() @@ -121,14 +100,13 @@ srcfilelist = "" srcpdir = os.path.basename(os.path.dirname(os.path.realpath(__file__))) for ifile in srcinfolist: - srcfilelist = "." + os.path.sep + srcpdir + \ - os.path.sep + ifile + " " + srcfilelist + srcfilelist = "."+os.path.sep+srcpdir+os.path.sep+ifile+" "+srcfilelist print(srcfilelist) sys.exit() -if (len(sys.argv) > 1 and sys.argv[1] == "cleansourceinfo"): - os.system("rm -rfv \"" + os.path.realpath("." + os.path.sep + "dist\"")) - os.system("rm -rfv \"" + os.path.realpath("." + os.path.sep + - pkg_resources.to_filename(pymodule['name']) + ".egg-info\"")) +if(len(sys.argv) > 1 and sys.argv[1] == "cleansourceinfo"): + os.system("rm -rfv \""+os.path.realpath("."+os.path.sep+"dist\"")) + os.system("rm -rfv \""+os.path.realpath("."+os.path.sep + + pkg_resources.to_filename(pymodule['name'])+".egg-info\"")) sys.exit() setup(