From 9a534cc497e8916b4bbf9f275672d2c5bc99c654 Mon Sep 17 00:00:00 2001 From: Andrei Hutusoru Date: Tue, 5 Feb 2019 13:53:51 +0200 Subject: [PATCH] [issue_415] Add support for python 3 --- .appveyor.yml | 24 ++++++----- .gitignore | 2 + .travis.yml | 42 ++++++++++++------- .travis/install.sh | 30 +++++++++++++ .travis/run.sh | 10 +++++ mozdownload/parser.py | 6 +-- mozdownload/scraper.py | 27 ++++++------ mozdownload/treeherder.py | 6 +-- requirements.txt | 6 +-- tests/cli/test_cli_arguments.py | 2 +- tests/cli/test_cli_print_url.py | 2 +- tests/cli/test_correct_scraper.py | 6 +-- tests/cli/test_output.py | 8 +++- tests/daily_scraper/test_daily_scraper.py | 12 +++--- .../directory_parser/test_directory_parser.py | 6 ++- .../test_release_candidate_scraper.py | 5 +-- .../test_release_candidate_scraper_latest.py | 6 +-- tests/release_scraper/test_release_scraper.py | 5 +-- .../test_release_scraper_latest.py | 5 +-- tests/remote/test_devedition.py | 11 +++-- tests/remote/test_firefox.py | 29 +++++++------ tests/remote/test_thunderbird.py | 23 +++++----- .../test_tinderbox_scraper.py | 9 ++-- tests/treeherder/test_api.py | 10 +++-- tests/try_scraper/test_invalid_revision.py | 3 +- tests/try_scraper/test_try_scraper.py | 6 ++- tox.ini | 2 +- 27 files changed, 183 insertions(+), 120 deletions(-) create mode 100755 .travis/install.sh create mode 100755 .travis/run.sh diff --git a/.appveyor.yml b/.appveyor.yml index a8e741f6..c41d4ed8 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -1,20 +1,22 @@ build: false environment: - PYTHON: "C:\\Python27" - TOXENV: "py27" + matrix: + - PYTHON: "C:\\Python27" + PYTHON_VERSION: "2.7" + TOXENV: "py27" + - PYTHON: "C:\\Python36" + PYTHON_VERSION: "3.6" + TOXENV: "py36" init: - - ECHO %PYTHON% - - ECHO %TOXENV% - - SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH% + - ECHO %PYTHON% + - ECHO %TOXENV% + - SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH% install: - - python -m pip install --upgrade pip - # Latest tox 2.3.x is currently busted: - # https://bitbucket.org/hpk42/tox/issues/314/tox-command-busted-on-windows - - pip install tox==2.2.0 - + - python -m pip install --upgrade pip + - pip install tox==3.7.0 test_script: - - tox + - tox diff --git a/.gitignore b/.gitignore index fa868448..a2aa60e8 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,8 @@ +.idea/ .cache/ .tox/ *.egg-info *.pyc +.coverage build dist diff --git a/.travis.yml b/.travis.yml index 541d85ef..ef63b3d8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,32 +8,42 @@ language: python matrix: include: - - python: 2.7 + - name: "Run test suite on Linux with python 2.7" os: linux + python: 2.7 env: TOXENV=py27 - - python: 2.7 + - name: "Run test suite on Linux with python 3.6" os: linux + python: 3.6 + env: TOXENV=py36 + - name: "Check code style with pylama on Linux with python 2.7" + os: linux + python: 2.7 env: TOXENV=pylama - - language: generic - os: osx - # 7.2 is OS X 10.11.x + - name: "Check code style with pylama on Linux with python 3.6" + os: linux + python: 3.6 + env: TOXENV=pylama + # 10.1 is OS X 10.13.x # https://docs.travis-ci.com/user/languages/objective-c/#Supported-OS-X-iOS-SDK-versions - osx_image: xcode7.2 + - name: "Run test suite on OS X with python 2.7" + os: osx + osx_image: xcode10.1 + language: generic + python: 2.7 env: TOXENV=py27 + - name: "Run test suite on OS X with python 3.6" + language: generic + os: osx + env: + - TOXENV=py36 + - HOMEBREW_NO_AUTO_UPDATE=1 install: - - if [ "$TRAVIS_OS_NAME" = "osx" ]; then - curl -O -s https://bootstrap.pypa.io/get-pip.py; - python get-pip.py --user; - export PATH=$PATH:~/Library/Python/2.7/bin; - pip install --user tox virtualenv; - else - pip install tox virtualenv; - fi - + - ./.travis/install.sh script: - - tox + - ./.travis/run.sh notifications: email: diff --git a/.travis/install.sh b/.travis/install.sh new file mode 100755 index 00000000..a6b33034 --- /dev/null +++ b/.travis/install.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +set -e +set -x + +ci_requirements="pip setuptools tox virtualenv" + +if [ "$TRAVIS_OS_NAME" == "osx" ]; then + if [[ ${TOXENV} == *"py27"* ]]; then + # install pip on the system python + curl -O https://bootstrap.pypa.io/get-pip.py + python get-pip.py --user + python -m pip install --user virtualenv + python -m virtualenv .venv/ + elif [[ ${TOXENV} == *"py3"* ]]; then + # install current python3 with homebrew + # NOTE: the formula is now named just "python" + brew install python + command -v python3 + python3 --version + python3 -m pip install virtualenv + python3 -m virtualenv .venv/ + else + echo "unsupported $TOXENV: "${TOXENV} + exit 1 + fi + source .venv/bin/activate +fi + +python -m pip install $ci_requirements diff --git a/.travis/run.sh b/.travis/run.sh new file mode 100755 index 00000000..6804f7dc --- /dev/null +++ b/.travis/run.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +set -e +set -x + +if [ "$TRAVIS_OS_NAME" == "osx" ]; then + source .venv/bin/activate +fi + +tox diff --git a/mozdownload/parser.py b/mozdownload/parser.py index 8bb4ec30..880ad398 100644 --- a/mozdownload/parser.py +++ b/mozdownload/parser.py @@ -7,10 +7,10 @@ from __future__ import absolute_import, unicode_literals import re -import urllib -from HTMLParser import HTMLParser import requests +from six.moves.html_parser import HTMLParser +from six.moves.urllib.parse import unquote class DirectoryParser(HTMLParser): @@ -68,7 +68,7 @@ def handle_starttag(self, tag, attrs): # Links look like: /pub/firefox/nightly/2015/ # We have to trim the fragment down to the last item. Also to ensure we # always get it, we remove a possible final slash first - url = urllib.unquote(attr[1]) + url = unquote(attr[1]) self.active_url = url.rstrip('/').split('/')[-1] return diff --git a/mozdownload/scraper.py b/mozdownload/scraper.py index 7e3f0dd4..cf87cefc 100755 --- a/mozdownload/scraper.py +++ b/mozdownload/scraper.py @@ -10,22 +10,21 @@ import os import re import sys -import urllib from datetime import datetime -from urlparse import urlparse import mozinfo import progressbar as pb import redo import requests +from six.moves.urllib.parse import quote +from six.moves.urllib.parse import urlparse from mozdownload import errors +from mozdownload import treeherder from mozdownload.parser import DirectoryParser from mozdownload.timezones import PacificTimezone -from mozdownload import treeherder from mozdownload.utils import urljoin - APPLICATIONS = ('devedition', 'firefox', 'fennec', 'thunderbird') # Some applications contain all locales in a single build @@ -164,6 +163,7 @@ def _create_directory_parser(self, url): @property def binary(self): """Return the name of the build.""" + def _get_binary(): # Retrieve all entries from the remote virtual folder parser = self._create_directory_parser(self.path) @@ -195,8 +195,8 @@ def binary_regex(self): @property def url(self): """Return the URL of the build.""" - return urllib.quote(urljoin(self.path, self.binary), - safe='%/:=&?~#+!$,;\'@()*[]|') + return quote(urljoin(self.path, self.binary), + safe='%/:=&?~#+!$,;\'@()*[]|') @property def path(self): @@ -248,6 +248,7 @@ def detect_platform(self): def download(self): """Download the specified file.""" + def total_seconds(td): # Keep backward compatibility with Python 2.6 which doesn't have # this method @@ -417,7 +418,7 @@ def get_latest_build_date(self): parser.entries = parser.filter(r'.*%s\.txt' % self.platform_regex) if not parser.entries: message = 'Status file for %s build cannot be found' % \ - self.platform_regex + self.platform_regex raise errors.NotFoundError(message, url) # Read status file for the platform, retrieve build id, @@ -467,13 +468,13 @@ def get_build_info_for_date(self, date, build_index=None): # ensure to select the correct subfolder for localized builds 'L10N': '(-l10n)?' if self.locale_build else '', 'PLATFORM': '' if self.application not in ( - 'fennec') else '-' + self.platform + 'fennec') else '-' + self.platform } parser.entries = parser.filter(regex) parser.entries = parser.filter(self.is_build_dir) - if has_time: + if date.strftime('%H-%M-%S') != '00-00-00': # If a time is included in the date, use it to determine the # build's index regex = r'.*%s.*' % date.strftime('%H-%M-%S') @@ -482,7 +483,7 @@ def get_build_info_for_date(self, date, build_index=None): if not parser.entries: date_format = '%Y-%m-%d-%H-%M-%S' if has_time else '%Y-%m-%d' message = 'Folder for builds on %s has not been found' % \ - self.date.strftime(date_format) + self.date.strftime(date_format) raise errors.NotFoundError(message, url) # If no index has been given, set it to the last build of the day. @@ -690,7 +691,7 @@ def get_build_info(self): parser = self._create_directory_parser(url) if not parser.entries: message = 'Folder for specific candidate builds at %s has not' \ - 'been found' % url + 'been found' % url raise errors.NotFoundError(message, url) self.show_matching_builds(parser.entries) @@ -909,11 +910,11 @@ def get_build_info_for_index(self, build_index=None): # If a timestamp is given, retrieve the folder with the timestamp # as name parser.entries = self.timestamp in parser.entries and \ - [self.timestamp] + [self.timestamp] elif self.date: # If date is given, retrieve the subset of builds on that date - parser.entries = filter(self.date_matches, parser.entries) + parser.entries = list(filter(self.date_matches, parser.entries)) if not parser.entries: message = 'No builds have been found' diff --git a/mozdownload/treeherder.py b/mozdownload/treeherder.py index 2dc0f648..01447623 100644 --- a/mozdownload/treeherder.py +++ b/mozdownload/treeherder.py @@ -7,12 +7,12 @@ from __future__ import absolute_import, unicode_literals import logging +import six from thclient import TreeherderClient from mozdownload.errors import NotSupportedError - PLATFORM_MAP = { 'android-api-9': {'build_platform': 'android-2-3-armv7-api9'}, 'android-api-11': {'build_platform': 'android-4-0-armv7-api11'}, @@ -69,11 +69,11 @@ def query_builds_by_revision(self, revision, job_type_name='Build', debug_build= try: self.logger.info('Querying {url} for list of builds for revision: {revision}'.format( - url=self.client.server_url, revision=revision)) + url=self.client.server_url, revision=revision)) # Retrieve the option hash to filter for type of build (opt, and debug for now) option_hash = None - for key, values in self.client.get_option_collection_hash().iteritems(): + for key, values in six.iteritems(self.client.get_option_collection_hash()): for value in values: if value['name'] == ('debug' if debug_build else 'opt'): option_hash = key diff --git a/requirements.txt b/requirements.txt index d661fb6f..58cbe992 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ -mozinfo >= 0.9 +mozinfo >= 1.0.0 progressbar2 >= 3.34.3 redo==2.0.2 -requests >= 2.9.1, <3.0.0 -treeherder-client >= 4.0.0, <5.0.0 +requests >= 2.21.0, <3.0.0 +treeherder-client >= 5.0.0, <6.0.0 diff --git a/tests/cli/test_cli_arguments.py b/tests/cli/test_cli_arguments.py index 8e1b7668..2ae41df2 100644 --- a/tests/cli/test_cli_arguments.py +++ b/tests/cli/test_cli_arguments.py @@ -18,4 +18,4 @@ def test_unrecognized_argument(self): except subprocess.CalledProcessError as e: output = e.output - self.assertRegexpMatches(output, r'mozdownload: error: unrecognized arguments: --abc') + self.assertRegexpMatches(output, r'mozdownload: error: unrecognized arguments: --abc'.encode('utf-8')) diff --git a/tests/cli/test_cli_print_url.py b/tests/cli/test_cli_print_url.py index d3e551a0..173a4c50 100644 --- a/tests/cli/test_cli_print_url.py +++ b/tests/cli/test_cli_print_url.py @@ -21,4 +21,4 @@ def test_print_url_argument(self): except subprocess.CalledProcessError as e: output = e.output - self.assertRegexpMatches(output, url) + self.assertRegexpMatches(output, url.encode('utf-8')) diff --git a/tests/cli/test_correct_scraper.py b/tests/cli/test_correct_scraper.py index e781399b..b14903fb 100644 --- a/tests/cli/test_correct_scraper.py +++ b/tests/cli/test_correct_scraper.py @@ -5,13 +5,13 @@ # You can obtain one at http://mozilla.org/MPL/2.0/. import os +import six import mozfile +import mozhttpd_base_test as mhttpd from mock import patch from mozdownload import cli -import mozhttpd_base_test as mhttpd - tests = { 'release': { @@ -47,7 +47,7 @@ class TestCLICorrectScraper(mhttpd.MozHttpdBaseTest): @patch('mozdownload.treeherder.Treeherder.query_builds_by_revision') def test_cli_scraper(self, query_builds_by_revision): - for scraper_type, data in tests.iteritems(): + for scraper_type, data in six.iteritems(tests): if data.get('builds'): query_builds_by_revision.return_value = data['builds'] diff --git a/tests/cli/test_output.py b/tests/cli/test_output.py index 2e5060b8..1eb85b6a 100644 --- a/tests/cli/test_output.py +++ b/tests/cli/test_output.py @@ -1,3 +1,9 @@ +#!/usr/bin/env python + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + import subprocess import unittest @@ -10,4 +16,4 @@ class TestCLIOutput(unittest.TestCase): def test_cli_executes(self): """Test that cli will start and print usage message""" output = subprocess.check_output(['mozdownload', '--help']) - self.assertTrue(cli.__doc__.format(__version__) in output) + self.assertTrue(cli.__doc__.format(__version__) in output.decode("utf-8")) diff --git a/tests/daily_scraper/test_daily_scraper.py b/tests/daily_scraper/test_daily_scraper.py index 78b19901..42403364 100644 --- a/tests/daily_scraper/test_daily_scraper.py +++ b/tests/daily_scraper/test_daily_scraper.py @@ -5,8 +5,9 @@ # You can obtain one at http://mozilla.org/MPL/2.0/. import os -import urllib + import pytest +from six.moves.urllib.parse import unquote from mozdownload import DailyScraper from mozdownload.utils import urljoin @@ -48,15 +49,15 @@ ({'platform': 'win32', 'branch': 'mozilla-central', 'date': '2013-07-02', 'build_number': 1}, '2013-07-02-03-12-13-mozilla-central-firefox-27.0a1.en-US.win32.installer.exe', 'firefox/nightly/2013/07/2013-07-02-03-12-13-mozilla-central/firefox-27.0a1.en-US.win32.installer.exe'), - # Old stub format + # Old stub format ({'platform': 'win32', 'branch': 'mozilla-central', 'date': '2013-09-30', 'is_stub_installer': True}, '2013-09-30-03-02-04-mozilla-central-firefox-27.0a1.en-US.win32.installer-stub.exe', 'firefox/nightly/2013/09/2013-09-30-03-02-04-mozilla-central/firefox-27.0a1.en-US.win32.installer-stub.exe'), - # Old file name format + # Old file name format ({'platform': 'win64', 'branch': 'mozilla-central', 'date': '2013-09-30'}, '2013-09-30-03-02-04-mozilla-central-firefox-27.0a1.en-US.win64-x86_64.installer.exe', 'firefox/nightly/2013/09/2013-09-30-03-02-04-mozilla-central/firefox-27.0a1.en-US.win64-x86_64.installer.exe'), - # New stub format + # New stub format ({'platform': 'win32', 'branch': 'mozilla-central', 'is_stub_installer': True}, '2013-10-01-03-02-04-mozilla-central-Firefox Installer.en-US.exe', 'firefox/nightly/2013/10/2013-10-01-03-02-04-mozilla-central/Firefox Installer.en-US.exe'), @@ -126,6 +127,7 @@ 'mobile/nightly/2016/02/2016-02-02-00-40-08-mozilla-aurora-android-api-15/fennec-46.0a2.multi.android-arm.apk'), ] + @pytest.mark.parametrize("args,filename,url", firefox_tests + thunderbird_tests + fennec_tests) def test_scraper(httpd, tmpdir, args, filename, url): """Testing various download scenarios for DailyScraper""" @@ -134,4 +136,4 @@ def test_scraper(httpd, tmpdir, args, filename, url): expected_target = os.path.join(str(tmpdir), filename) assert scraper.filename == expected_target - assert urllib.unquote(scraper.url) == urljoin(httpd.get_url(), url) + assert unquote(scraper.url) == urljoin(httpd.get_url(), url) diff --git a/tests/directory_parser/test_directory_parser.py b/tests/directory_parser/test_directory_parser.py index 62f38127..f0e04b51 100644 --- a/tests/directory_parser/test_directory_parser.py +++ b/tests/directory_parser/test_directory_parser.py @@ -5,6 +5,7 @@ # You can obtain one at http://mozilla.org/MPL/2.0/. import os +import six from mozdownload.parser import DirectoryParser from mozdownload.utils import urljoin @@ -48,7 +49,10 @@ def test_filter(self): parser.entries = parser.filter(r'^\d+$') # Get only the subdirectories of the folder - dirs = os.walk(folder_path).next()[1] + if six.PY2: + dirs = os.walk(folder_path).next()[1] + elif six.PY3: + dirs = os.walk(folder_path).__next__()[1] dirs.sort() self.assertEqual(parser.entries, dirs) diff --git a/tests/release_candidate_scraper/test_release_candidate_scraper.py b/tests/release_candidate_scraper/test_release_candidate_scraper.py index 7e4ea5db..a8d8b40c 100644 --- a/tests/release_candidate_scraper/test_release_candidate_scraper.py +++ b/tests/release_candidate_scraper/test_release_candidate_scraper.py @@ -5,9 +5,9 @@ # You can obtain one at http://mozilla.org/MPL/2.0/. import os -import urllib import pytest +from six.moves.urllib.parse import unquote from mozdownload import ReleaseCandidateScraper from mozdownload.utils import urljoin @@ -63,11 +63,10 @@ 'thunderbird-17.0-build3.de.win32.exe', 'thunderbird/candidates/17.0-candidates/build3/win32/de/Thunderbird Setup 17.0.exe'), ]) - def test_scraper(httpd, tmpdir, args, filename, url): """Testing various download scenarios for ReleaseCandidateScraper""" scraper = ReleaseCandidateScraper(destination=str(tmpdir), base_url=httpd.get_url(), **args) expected_filename = os.path.join(str(tmpdir), filename) assert scraper.filename == expected_filename - assert urllib.unquote(scraper.url) == urljoin(httpd.get_url(), url) + assert unquote(scraper.url) == urljoin(httpd.get_url(), url) diff --git a/tests/release_candidate_scraper/test_release_candidate_scraper_latest.py b/tests/release_candidate_scraper/test_release_candidate_scraper_latest.py index 2111dcc7..20ef57c3 100644 --- a/tests/release_candidate_scraper/test_release_candidate_scraper_latest.py +++ b/tests/release_candidate_scraper/test_release_candidate_scraper_latest.py @@ -5,9 +5,9 @@ # You can obtain one at http://mozilla.org/MPL/2.0/. import os -import urllib import pytest +from six.moves.urllib.parse import unquote from mozdownload import ReleaseCandidateScraper from mozdownload.utils import urljoin @@ -96,12 +96,10 @@ 'thunderbird-17.0.1esr-build1.en-US.win32.exe', 'thunderbird/candidates/17.0.1esr-candidates/build1/win32/en-US/Thunderbird Setup 17.0.1esr.exe'), ]) - - def test_latest_build(httpd, tmpdir, args, filename, url): """Testing various download scenarios for latest release candidate builds""" scraper = ReleaseCandidateScraper(destination=str(tmpdir), base_url=httpd.get_url(), **args) expected_filename = os.path.join(str(tmpdir), filename) assert scraper.filename == expected_filename - assert urllib.unquote(scraper.url) == urljoin(httpd.get_url(), url) + assert unquote(scraper.url) == urljoin(httpd.get_url(), url) diff --git a/tests/release_scraper/test_release_scraper.py b/tests/release_scraper/test_release_scraper.py index 6131eadf..49e0f50a 100644 --- a/tests/release_scraper/test_release_scraper.py +++ b/tests/release_scraper/test_release_scraper.py @@ -5,9 +5,9 @@ # You can obtain one at http://mozilla.org/MPL/2.0/. import os -import urllib import pytest +from six.moves.urllib.parse import unquote from mozdownload import ReleaseScraper from mozdownload.utils import urljoin @@ -63,11 +63,10 @@ 'thunderbird-17.0.de.win32.exe', 'thunderbird/releases/17.0/win32/de/Thunderbird Setup 17.0.exe'), ]) - def test_release_scraper(httpd, tmpdir, args, filename, url): """Testing various download scenarios for ReleaseScraper""" scraper = ReleaseScraper(destination=str(tmpdir), base_url=httpd.get_url(), **args) expected_filename = os.path.join(str(tmpdir), filename) assert scraper.filename == expected_filename - assert urllib.unquote(scraper.url) == urljoin(httpd.get_url(), url) + assert unquote(scraper.url) == urljoin(httpd.get_url(), url) diff --git a/tests/release_scraper/test_release_scraper_latest.py b/tests/release_scraper/test_release_scraper_latest.py index c67e7d57..55e97e78 100644 --- a/tests/release_scraper/test_release_scraper_latest.py +++ b/tests/release_scraper/test_release_scraper_latest.py @@ -5,9 +5,9 @@ # You can obtain one at http://mozilla.org/MPL/2.0/. import os -import urllib import pytest +from six.moves.urllib.parse import unquote from mozdownload import ReleaseScraper from mozdownload.utils import urljoin @@ -96,11 +96,10 @@ 'thunderbird-17.0.1esr.en-US.win32.exe', 'thunderbird/releases/17.0.1esr/win32/en-US/Thunderbird Setup 17.0.1esr.exe'), ]) - def test_latest_build(httpd, tmpdir, args, filename, url): """Testing various download scenarios for latest release builds""" scraper = ReleaseScraper(destination=str(tmpdir), base_url=httpd.get_url(), **args) expected_filename = os.path.join(str(tmpdir), filename) assert scraper.filename == expected_filename - assert urllib.unquote(scraper.url) == urljoin(httpd.get_url(), url) + assert unquote(scraper.url) == urljoin(httpd.get_url(), url) diff --git a/tests/remote/test_devedition.py b/tests/remote/test_devedition.py index da943456..8e326295 100644 --- a/tests/remote/test_devedition.py +++ b/tests/remote/test_devedition.py @@ -6,9 +6,8 @@ """Test all scraper classes for Firefox Developer Edition against the remote server""" -import urllib - import pytest +from six.moves.urllib.parse import unquote import mozdownload from mozdownload.scraper import BASE_URL @@ -34,7 +33,7 @@ def test_release_scraper(tmpdir, args, url): scraper = mozdownload.ReleaseScraper(destination=tmpdir, **args) if url: - assert urllib.unquote(scraper.url) == urljoin(BASE_URL, url) + assert unquote(scraper.url) == urljoin(BASE_URL, url) @pytest.mark.parametrize("args,url", [ @@ -47,14 +46,14 @@ def test_release_scraper(tmpdir, args, url): ({'application': 'devedition', 'platform': 'win32', 'version': '60.0b1', 'build_number': 1}, 'devedition/candidates/60.0b1-candidates/build3/win32/en-US/Firefox Setup 60.0b1.exe'), ({'application': 'devedition', 'platform': 'mac', 'version': '60.0b1', 'build_number': 1, - 'locale': 'de'}, + 'locale': 'de'}, 'devedition/candidates/60.0b1-candidates/build3/mac/de/Firefox 60.0b1.dmg'), ({'application': 'devedition', 'platform': 'mac', 'version': '60.0b1', 'build_number': 1, - 'extension': 'json'}, + 'extension': 'json'}, 'devedition/candidates/60.0b1-candidates/build3/mac/en-US/firefox-60.0b1.json'), ]) def test_candidate_scraper(tmpdir, args, url): """Test release candidate scraper against the remote server.""" scraper = mozdownload.ReleaseCandidateScraper(destination=tmpdir, **args) - assert urllib.unquote(scraper.url) == urljoin(BASE_URL, url) + assert unquote(scraper.url) == urljoin(BASE_URL, url) diff --git a/tests/remote/test_firefox.py b/tests/remote/test_firefox.py index 77900f66..c6fd142f 100644 --- a/tests/remote/test_firefox.py +++ b/tests/remote/test_firefox.py @@ -6,9 +6,8 @@ """Test all scraper classes for Firefox against the remote server""" -import urllib - import pytest +from six.moves.urllib.parse import unquote import mozdownload from mozdownload.scraper import BASE_URL @@ -31,10 +30,10 @@ ({'application': 'firefox', 'platform': 'win32', 'version': '42.0b2', 'locale': 'de'}, 'firefox/releases/42.0b2/win32/de/Firefox Setup 42.0b2.exe'), ({'application': 'firefox', 'platform': 'win32', 'version': '42.0b2', - 'is_stub_installer': True}, # old format + 'is_stub_installer': True}, # old format 'firefox/releases/42.0b2/win32/en-US/Firefox Setup Stub 42.0b2.exe'), ({'application': 'firefox', 'platform': 'win32', 'version': '55.0', - 'is_stub_installer': True}, # new format + 'is_stub_installer': True}, # new format 'firefox/releases/55.0/win32/en-US/Firefox Installer.exe'), ]) def test_release_scraper(tmpdir, args, url): @@ -42,7 +41,7 @@ def test_release_scraper(tmpdir, args, url): scraper = mozdownload.ReleaseScraper(destination=tmpdir, **args) if url: - assert urllib.unquote(scraper.url) == urljoin(BASE_URL, url) + assert unquote(scraper.url) == urljoin(BASE_URL, url) @pytest.mark.parametrize("args,url", [ @@ -55,23 +54,23 @@ def test_release_scraper(tmpdir, args, url): ({'application': 'firefox', 'platform': 'win32', 'version': '45.4.0esr', 'build_number': 1}, 'firefox/candidates/45.4.0esr-candidates/build1/win32/en-US/Firefox Setup 45.4.0esr.exe'), ({'application': 'firefox', 'platform': 'mac', 'version': '45.4.0esr', 'build_number': 1, - 'locale': 'de'}, + 'locale': 'de'}, 'firefox/candidates/45.4.0esr-candidates/build1/mac/de/Firefox 45.4.0esr.dmg'), ({'application': 'firefox', 'platform': 'mac', 'version': '45.4.0esr', 'build_number': 1, - 'extension': 'json'}, + 'extension': 'json'}, 'firefox/candidates/45.4.0esr-candidates/build1/mac/en-US/firefox-45.4.0esr.json'), ({'application': 'firefox', 'platform': 'win32', 'version': '52.0', 'build_number': 1, - 'is_stub_installer': True}, # old format + 'is_stub_installer': True}, # old format 'firefox/candidates/52.0-candidates/build1/win32/en-US/Firefox Setup Stub 52.0.exe'), ({'application': 'firefox', 'platform': 'win32', 'version': '55.0', 'build_number': 1, - 'is_stub_installer': True}, + 'is_stub_installer': True}, 'firefox/candidates/55.0-candidates/build1/win32/en-US/Firefox Installer.exe'), ]) def test_candidate_scraper(tmpdir, args, url): """Test release candidate scraper against the remote server.""" scraper = mozdownload.ReleaseCandidateScraper(destination=tmpdir, **args) - assert urllib.unquote(scraper.url) == urljoin(BASE_URL, url) + assert unquote(scraper.url) == urljoin(BASE_URL, url) @pytest.mark.parametrize("args", [ @@ -84,15 +83,15 @@ def test_candidate_scraper(tmpdir, args, url): {'branch': 'mozilla-central', 'platform': 'win32', 'date': '2015-10-21', 'build_number': 2}, {'branch': 'mozilla-central', 'platform': 'win32', 'build_id': '20151021065025'}, {'branch': 'mozilla-central', 'platform': 'win32', 'build_id': '20151021030212', - 'locale': 'de'}, + 'locale': 'de'}, {'branch': 'mozilla-central', 'platform': 'win32', 'build_id': '20151021030212', - 'extension': 'txt'}, + 'extension': 'txt'}, {'branch': 'mozilla-central', 'platform': 'win32', 'build_id': '20151021030212', - 'is_stub_installer': True}, # old format + 'is_stub_installer': True}, # old format {'branch': 'mozilla-central', 'platform': 'win32', 'build_id': '20170821100350', - 'is_stub_installer': True}, # new format + 'is_stub_installer': True}, # new format {'branch': 'mozilla-central', 'platform': 'win64', 'build_id': '20170821100350', - 'is_stub_installer': True}, + 'is_stub_installer': True}, ]) def test_daily_scraper(tmpdir, args): """Test daily scraper against the remote server.""" diff --git a/tests/remote/test_thunderbird.py b/tests/remote/test_thunderbird.py index fc92fea6..3b82aea2 100644 --- a/tests/remote/test_thunderbird.py +++ b/tests/remote/test_thunderbird.py @@ -4,10 +4,8 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. -import unittest -import urllib - import pytest +from six.moves.urllib.parse import unquote import mozdownload from mozdownload.scraper import BASE_URL @@ -25,7 +23,7 @@ 'thunderbird/releases/52.0/linux-x86_64/en-US/thunderbird-52.0.tar.bz2'), ({'application': 'thunderbird', 'platform': 'mac', 'version': '52.0'}, 'thunderbird/releases/52.0/mac/en-US/Thunderbird 52.0.dmg'), - ({'application': 'thunderbird', 'platform': 'win32', 'version': '52.0','locale': 'de'}, + ({'application': 'thunderbird', 'platform': 'win32', 'version': '52.0', 'locale': 'de'}, 'thunderbird/releases/52.0/win32/de/Thunderbird Setup 52.0.exe'), ]) def test_release_scraper(tmpdir, args, url): @@ -33,7 +31,7 @@ def test_release_scraper(tmpdir, args, url): scraper = mozdownload.ReleaseScraper(destination=tmpdir, **args) if url: - assert urllib.unquote(scraper.url) == urljoin(BASE_URL, url) + assert unquote(scraper.url) == urljoin(BASE_URL, url) @pytest.mark.parametrize("args,url", [ @@ -45,18 +43,18 @@ def test_release_scraper(tmpdir, args, url): 'thunderbird/candidates/52.7.0-candidates/build1/mac/en-US/Thunderbird 52.7.0.dmg'), ({'application': 'thunderbird', 'platform': 'win32', 'version': '52.7.0'}, 'thunderbird/candidates/52.7.0-candidates/build1/win32/en-US/Thunderbird Setup 52.7.0.exe'), - ({'application': 'thunderbird', 'platform': 'win32', 'version': '52.7.0','locale': 'cs'}, + ({'application': 'thunderbird', 'platform': 'win32', 'version': '52.7.0', 'locale': 'cs'}, 'thunderbird/candidates/52.7.0-candidates/build1/win32/cs/Thunderbird Setup 52.7.0.exe'), - ({'application': 'thunderbird', 'platform': 'win32', 'version': '52.7.0','locale': 'en-GB'}, + ({'application': 'thunderbird', 'platform': 'win32', 'version': '52.7.0', 'locale': 'en-GB'}, 'thunderbird/candidates/52.7.0-candidates/build1/win32/en-GB/Thunderbird Setup 52.7.0.exe'), - ({'application': 'thunderbird', 'platform': 'win32', 'version': '52.7.0','build_number': 1}, + ({'application': 'thunderbird', 'platform': 'win32', 'version': '52.7.0', 'build_number': 1}, 'thunderbird/candidates/52.7.0-candidates/build1/win32/en-US/Thunderbird Setup 52.7.0.exe'), ]) def test_candidate_scraper(tmpdir, args, url): """Test release candidate scraper against the remote server.""" scraper = mozdownload.ReleaseCandidateScraper(destination=tmpdir, **args) - assert urllib.unquote(scraper.url) == urljoin(BASE_URL, url) + assert unquote(scraper.url) == urljoin(BASE_URL, url) @pytest.mark.parametrize("args", [ @@ -66,12 +64,13 @@ def test_candidate_scraper(tmpdir, args, url): {'application': 'thunderbird', 'platform': 'win32', 'branch': 'comm-central'}, {'application': 'thunderbird', 'platform': 'win64', 'branch': 'comm-central'}, {'application': 'thunderbird', 'platform': 'win64', 'branch': 'comm-central', 'date': '2018-03-01'}, - {'application': 'thunderbird', 'platform': 'win64', 'branch': 'comm-central', 'date': '2018-03-01', 'build_number': 1}, + {'application': 'thunderbird', 'platform': 'win64', 'branch': 'comm-central', 'date': '2018-03-01', + 'build_number': 1}, {'application': 'thunderbird', 'platform': 'win64', 'branch': 'comm-central', 'build_id': '20180301030201'}, {'application': 'thunderbird', 'platform': 'linux', 'branch': 'comm-central', 'build_id': '20180301030201', - 'extension': 'txt'}, + 'extension': 'txt'}, {'application': 'thunderbird', 'platform': 'linux', 'branch': 'comm-central', 'build_id': '20180301030201', - 'locale': 'de'}, + 'locale': 'de'}, ]) def test_daily_scraper(tmpdir, args): """Test daily scraper against the remote server.""" diff --git a/tests/tinderbox_scraper/test_tinderbox_scraper.py b/tests/tinderbox_scraper/test_tinderbox_scraper.py index a19f165b..05442ac5 100644 --- a/tests/tinderbox_scraper/test_tinderbox_scraper.py +++ b/tests/tinderbox_scraper/test_tinderbox_scraper.py @@ -5,13 +5,12 @@ # You can obtain one at http://mozilla.org/MPL/2.0/. import os -import urllib - -from mozdownload import TinderboxScraper -from mozdownload.utils import urljoin import mozhttpd_base_test as mhttpd +from six.moves.urllib.parse import unquote +from mozdownload import TinderboxScraper +from mozdownload.utils import urljoin firefox_tests = [ # -p win32 @@ -278,5 +277,5 @@ def test_scraper(self): **entry['args']) expected_filename = os.path.join(self.temp_dir, entry['filename']) self.assertEqual(scraper.filename, expected_filename) - self.assertEqual(urllib.unquote(scraper.url), + self.assertEqual(unquote(scraper.url), urljoin(self.wdir, entry['url'])) diff --git a/tests/treeherder/test_api.py b/tests/treeherder/test_api.py index 340cb5e4..9c96736f 100644 --- a/tests/treeherder/test_api.py +++ b/tests/treeherder/test_api.py @@ -6,13 +6,14 @@ import json import os -import urlparse +import mozhttpd_base_test as mhttpd +import pytest +import six +import six.moves.urllib.parse as urlparse from wptserve.handlers import json_handler from mozdownload.treeherder import Treeherder, PLATFORM_MAP -import mozhttpd_base_test as mhttpd - HERE = os.path.dirname(os.path.abspath(__file__)) @@ -31,7 +32,7 @@ def handle_rest_api(request, response): def do_filter(entry): result = True - for option, values in query_options.iteritems(): + for option, values in six.iteritems(query_options): # Don't handle options which are not properties of the entry if option not in entry: continue @@ -56,6 +57,7 @@ def do_filter(entry): class TestAPI(mhttpd.MozHttpdBaseTest): """Basic tests for the Treeherder wrapper.""" + @pytest.mark.skip(reason="Bug 1330474 - Switch Treeherder from Python 2.7 to Python 3.6") def test_query_tinderbox_builds(self): self.httpd.router.register('GET', '/api/*', handle_rest_api) diff --git a/tests/try_scraper/test_invalid_revision.py b/tests/try_scraper/test_invalid_revision.py index ed3f4085..aab84718 100644 --- a/tests/try_scraper/test_invalid_revision.py +++ b/tests/try_scraper/test_invalid_revision.py @@ -6,8 +6,9 @@ import pytest -from mozdownload import TryScraper import mozdownload.errors as errors +from mozdownload import TryScraper + def test_invalid_parameters(httpd, tmpdir, mocker): """Testing download scenarios with invalid parameters for TryScraper""" diff --git a/tests/try_scraper/test_try_scraper.py b/tests/try_scraper/test_try_scraper.py index d76b5d6a..47510303 100644 --- a/tests/try_scraper/test_try_scraper.py +++ b/tests/try_scraper/test_try_scraper.py @@ -5,12 +5,14 @@ # You can obtain one at http://mozilla.org/MPL/2.0/. import os -import urllib + import pytest +from six.moves.urllib.parse import unquote from mozdownload import TryScraper from mozdownload.utils import urljoin + @pytest.mark.parametrize("args,filename,url", [ ({'platform': 'mac64', 'revision': '8fcac92cfcad'}, '8fcac92cfcad-firefox-38.0a1.en-US.mac.dmg', @@ -42,4 +44,4 @@ def test_scraper(httpd, tmpdir, args, filename, url, mocker): **args) expected_filename = os.path.join(str(tmpdir), filename) assert scraper.filename == expected_filename - assert urllib.unquote(scraper.url) == urljoin(httpd.get_url(), url) + assert unquote(scraper.url) == urljoin(httpd.get_url(), url) diff --git a/tox.ini b/tox.ini index a07a4eca..c61c8eb8 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py27, pylama +envlist = py27, py36, pylama [testenv] usedevelop = true