diff --git a/.codecov.yml b/.codecov.yml index 0617ba3916..296ddc47ee 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -48,7 +48,8 @@ coverage: if_ci_failed: failure backend: flags: - - backend + - backend_py27 + - backend_py36 target: 0% threshold: 5.0% if_no_uploads: error diff --git a/.travis.yml b/.travis.yml index 62367d4c82..158ab7e5a6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,6 +10,9 @@ before_install: install: false script: false jobs: + fast_finish: true + allow_failures: + - python: '3.6' include: # test stage + frontend tests start here - stage: test @@ -30,19 +33,29 @@ jobs: - yarn lint-css - yarn test - yarn coverage - # backend tests start here - - name: 'Backend tests' - python: - - 2.7.10 + # backend tests (py2.7) start here + - name: 'Backend tests (py2.7)' + python: '2.7.10' + env: + - TOXENV=py27,lint install: - pip install --upgrade pip - pip install --upgrade tox script: - tox -v --recreate - # dredd tests start here - - name: 'Dredd tests' - python: - - 2.7.10 + # backend tests (py3.6) start here + - name: 'Backend tests (py3.6)' + python: '3.6' + env: + - TOXENV=py36 + install: + - pip install --upgrade pip + - pip install --upgrade tox + script: + - tox -v --recreate + # dredd tests (py2.7) start here + - name: 'Dredd tests (py2.7)' + python: '2.7.10' install: - pip install --upgrade pip - pip install dredd_hooks @@ -55,6 +68,22 @@ jobs: - yarn test-api after_failure: - cat ./dredd/data/Logs/application.log + # dredd tests (py3.6) start here + # @FIXME: Disabled because they take too long. Enable when Medusa can *actually* start on Python 3. + # - name: 'Dredd tests (py3.6)' + # python: '3.6' + # install: + # - pip install --upgrade pip + # - pip install dredd_hooks + # - pip install 'PyYAML<4' + # - pip install six + # - nvm install v10.7.0 + # - 'curl -o- -L https://yarnpkg.com/install.sh | bash' + # - 'export PATH="$HOME/.yarn/bin:$PATH" && yarn install --ignore-scripts' + # script: + # - yarn test-api + # after_failure: + # - cat ./dredd/data/Logs/application.log notifications: slack: secure: >- diff --git a/CHANGELOG.md b/CHANGELOG.md index dd29190ec8..35193d6b34 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,30 @@ -## 0.2.11 (2018-10-29) +## 0.2.12 (2018-11-16) #### New Features +- Added Join notifier ([#5241](https://github.com/pymedusa/Medusa/pull/5241)) + +#### Improvements +- Vueified "config - notifications" page: + - Improved components: config-textbox, select-list, show-selector, config-textbox-number + - Improved responsiveness of the notification page on smaller screens ([#4913](https://github.com/pymedusa/Medusa/pull/4913)) +- Allowed the use of priorities in the Pushover notifier ([#5567](https://github.com/pymedusa/Medusa/pull/5567)) +- Added delete method to EpisodeHandler (apiv2), for deleting a single episode ([#5685](https://github.com/pymedusa/Medusa/pull/5685)) +- Allowed Nyaa and Anidex to search for non-anime shows ([#5680](https://github.com/pymedusa/Medusa/pull/5680) & [#5681](https://github.com/pymedusa/Medusa/pull/5681)) +- Do not allow to enable the anime options, when using tmdb or tvmaze ([#5701](https://github.com/pymedusa/Medusa/pull/5701)) +- Vueified "config - search" page. Improved responsiveness of the notification page on smaller screens. ([#5553](https://github.com/pymedusa/Medusa/pull/5553)) + +#### Fixes +- Fixed test not working for Download Station ([#5561](https://github.com/pymedusa/Medusa/pull/5561)) +- Fixed wrong placeholder reference in log ([#5562](https://github.com/pymedusa/Medusa/pull/5562)) +- Fixed guessit exception when parsing release without title ([#5569](https://github.com/pymedusa/Medusa/pull/5569)) +- Fixed Download Station BraceAdapter exception ([#5573](https://github.com/pymedusa/Medusa/pull/5573)) +- Fixed saving multiple metadata providers ([#5576](https://github.com/pymedusa/Medusa/pull/5576)) +- Fixed show-selector for libraries with more than 1k shows ([#5623](https://github.com/pymedusa/Medusa/pull/5623)) +- Fixed Growl registration error ([#5684](https://github.com/pymedusa/Medusa/pull/5684)) + +----- + +## 0.2.11 (2018-10-29) #### Improvements - Updated `guessit` to version 3.0.0 ([#4244](https://github.com/pymedusa/Medusa/pull/4244)) diff --git a/Dockerfile b/Dockerfile index 90ed264e1d..6f9ee78aca 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM lsiobase/alpine.python:3.7 +FROM lsiobase/alpine.python:3.8 MAINTAINER bobbysteel # set version label diff --git a/dredd/api-description.yml b/dredd/api-description.yml index 6f3760b1a0..e411a939e0 100644 --- a/dredd/api-description.yml +++ b/dredd/api-description.yml @@ -324,6 +324,35 @@ paths: {"status": 2, "quality": 4} path-params: id: e999 + delete: + summary: Delete a specific episode from a given series + description: Delete a specific episode from a given series + parameters: + - $ref: '#/parameters/series-id' + name: seriesid + - $ref: '#/parameters/episode-id-delete' + name: id + responses: + 204: + description: Episode is deleted successfully + 400: + $ref: '#/responses/error' + description: Invalid id + x-request: + path-params: + seriesid: tvdb301824 + id: 123456 + 404: + $ref: '#/responses/error' + description: Episode not found + x-request: + path-params: + seriesid: tvdb301824 + id: s9999e9999 + 409: + $ref: '#/responses/error' + description: Unable to delete episode + x-disabled: true /series/{seriesid}/episodes/{id}/{field}: get: summary: Return a specific field from a given episode @@ -1344,6 +1373,8 @@ definitions: type: string webRoot: type: string + runsInDocker: + type: boolean torrents: type: object properties: @@ -1479,110 +1510,557 @@ definitions: type: string multiEpStrings: type: object - search: + search: + type: object + properties: + general: + type: object + properties: + randomizeProviders: + type: boolean + downloadPropers: + type: boolean + checkPropersInterval: + type: string + propersSearchDays: + type: integer + backlogDays: + type: integer + backlogFrequency: + type: integer + minBacklogFrequency: + type: integer + dailySearchFrequency: + type: integer + minDailySearchFrequency: + type: integer + removeFromClient: + type: boolean + torrentCheckerFrequency: + type: integer + minTorrentCheckerFrequency: + type: integer + usenetRetention: + type: integer + trackersList: + type: array + allowHighPriority: + type: boolean + useFailedDownloads: + type: boolean + deleteFailed: + type: boolean + cacheTrimming: + type: boolean + maxCacheAge: + type: integer + filters: + type: object + properties: + ignored: + type: array + undesired: + type: array + preferred: + type: array + required: + type: array + ignoredSubsList: + type: array + ignoreUnknownSubs: + type: boolean + showDownloadDir: + type: string + processAutomatically: + type: boolean + postponeIfSyncFiles: + type: boolean + postponeIfNoSubs: + type: boolean + renameEpisodes: + type: boolean + createMissingShowDirs: + type: boolean + addShowsWithoutDir: + type: boolean + moveAssociatedFiles: + type: boolean + nfoRename: + type: boolean + airdateEpisodes: + type: boolean + unpack: + type: boolean + deleteRarContent: + type: boolean + noDelete: + type: boolean + processMethod: + type: string + reflinkAvailable: + type: boolean + autoPostprocessorFrequency: + type: integer + syncFiles: + type: array + fileTimestampTimezone: + type: string + allowedExtensions: + type: array + extraScripts: + type: array + extraScriptsUrl: + type: string + multiEpStrings: + type: object + notifiers: + type: object + properties: + kodi: type: object properties: - general: + enabled: + type: boolean + alwaysOn: + type: boolean + notifyOnSnatch: + type: boolean + notifyOnDownload: + type: boolean + notifyOnSubtitleDownload: + type: boolean + update: type: object properties: - randomizeProviders: + library: type: boolean - downloadPropers: + full: type: boolean - checkPropersInterval: - type: string - propersSearchDays: - type: integer - backlogDays: - type: integer - backlogFrequency: - type: integer - minBacklogFrequency: - type: integer - dailySearchFrequency: - type: integer - minDailySearchFrequency: - type: integer - removeFromClient: - type: boolean - torrentCheckerFrequency: - type: integer - minTorrentCheckerFrequency: - type: integer - usenetRetention: - type: integer - trackersList: - type: array - allowHighPriority: + onlyFirst: type: boolean - useFailedDownloads: + host: + type: array + username: + type: string + password: + type: string + libraryCleanPending: + type: boolean + cleanLibrary: + type: boolean + plex: + type: object + properties: + server: + type: object + properties: + enabled: type: boolean - deleteFailed: + updateLibrary: type: boolean - cacheTrimming: + host: + type: array + https: type: boolean - maxCacheAge: - type: integer - filters: + username: + type: string + password: + type: string + token: + type: string + client: type: object properties: - ignored: - type: array - undesired: - type: array - preferred: - type: array - required: - type: array - ignoredSubsList: + enabled: + type: boolean + username: + type: string + host: type: array - ignoreUnknownSubs: + notifyOnSnatch: type: boolean - showDownloadDir: + notifyOnDownload: + type: boolean + notifyOnSubtitleDownload: + type: boolean + emby: + type: object + properties: + enabled: + type: boolean + host: type: string - processAutomatically: + apiKey: + type: string + nmj: + type: object + properties: + enabled: type: boolean - postponeIfSyncFiles: + host: + type: string + database: + type: string + mount: + type: string + nmjv2: + type: object + properties: + enabled: type: boolean - postponeIfNoSubs: + host: + type: string + dbloc: + type: string + database: + type: string + synologyIndex: + type: object + properties: + enabled: type: boolean - renameEpisodes: + synology: + type: object + properties: + enabled: type: boolean - createMissingShowDirs: + notifyOnSnatch: type: boolean - addShowsWithoutDir: + notifyOnDownload: type: boolean - moveAssociatedFiles: + notifyOnSubtitleDownload: type: boolean - nfoRename: + pyTivo: + type: object + properties: + enabled: type: boolean - airdateEpisodes: + host: + type: string + name: + type: string + shareName: + type: string + growl: + type: object + properties: + enabled: type: boolean - unpack: + host: + type: string + password: + type: string + notifyOnSnatch: type: boolean - deleteRarContent: + notifyOnDownload: type: boolean - noDelete: + notifyOnSubtitleDownload: type: boolean - processMethod: - type: string - reflinkAvailable: + prowl: + type: object + properties: + enabled: type: boolean - autoPostprocessorFrequency: - type: integer - syncFiles: + api: type: array - fileTimestampTimezone: + messageTitle: type: string - allowedExtensions: + priority: + type: integer + notifyOnSnatch: + type: boolean + notifyOnDownload: + type: boolean + notifyOnSubtitleDownload: + type: boolean + libnotify: + type: object + properties: + enabled: + type: boolean + notifyOnSnatch: + type: boolean + notifyOnDownload: + type: boolean + notifyOnSubtitleDownload: + type: boolean + pushover: + type: object + properties: + enabled: + type: boolean + apiKey: + type: string + userKey: + type: string + device: type: array - extraScripts: + sound: + type: string + notifyOnSnatch: + type: boolean + notifyOnDownload: + type: boolean + notifyOnSubtitleDownload: + type: boolean + boxcar2: + type: object + properties: + enabled: + type: boolean + accessToken: + type: string + notifyOnSnatch: + type: boolean + notifyOnDownload: + type: boolean + notifyOnSubtitleDownload: + type: boolean + pushalot: + type: object + properties: + enabled: + type: boolean + authToken: + type: string + notifyOnSnatch: + type: boolean + notifyOnDownload: + type: boolean + notifyOnSubtitleDownload: + type: boolean + pushbullet: + type: object + properties: + enabled: + type: boolean + api: + type: string + id: + type: string + notifyOnSnatch: + type: boolean + notifyOnDownload: + type: boolean + notifyOnSubtitleDownload: + type: boolean + join: + type: object + properties: + enabled: + type: boolean + api: + type: string + device: + type: string + notifyOnSnatch: + type: boolean + notifyOnDownload: + type: boolean + notifyOnSubtitleDownload: + type: boolean + freemobile: + type: object + properties: + enabled: + type: boolean + api: + type: string + id: + type: string + notifyOnSnatch: + type: boolean + notifyOnDownload: + type: boolean + notifyOnSubtitleDownload: + type: boolean + telegram: + type: object + properties: + enabled: + type: boolean + api: + type: string + id: + type: string + notifyOnSnatch: + type: boolean + notifyOnDownload: + type: boolean + notifyOnSubtitleDownload: + type: boolean + twitter: + type: object + properties: + enabled: + type: boolean + dmto: + type: string + username: + type: string + password: + type: string + prefix: + type: string + directMessage: + type: boolean + notifyOnSnatch: + type: boolean + notifyOnDownload: + type: boolean + notifyOnSubtitleDownload: + type: boolean + trakt: + type: object + properties: + enabled: + type: boolean + pinUrl: + type: string + username: + type: string + accessToken: + type: string + timeout: + type: integer + defaultIndexer: + type: integer + sync: + type: boolean + syncRemove: + type: boolean + syncWatchlist: + type: boolean + methodAdd: + type: integer + removeWatchlist: + type: boolean + removeSerieslist: + type: boolean + removeShowFromApplication: + type: boolean + startPaused: + type: boolean + blacklistName: + type: string + email: + type: object + properties: + enabled: + type: boolean + host: + type: string + port: + type: integer + from: + type: string + tls: + type: boolean + username: + type: string + password: + type: string + addressList: type: array - extraScriptsUrl: + subject: type: string - multiEpStrings: + notifyOnSnatch: + type: boolean + notifyOnDownload: + type: boolean + notifyOnSubtitleDownload: + type: boolean + slack: + type: object + properties: + enabled: + type: boolean + webhook: + type: string + notifyOnSnatch: + type: boolean + notifyOnDownload: + type: boolean + notifyOnSubtitleDownload: + type: boolean + clients: + type: object + properties: + nzb: + type: object + properties: + enabled: + type: boolean + dir: + type: string + method: + type: string + nzbget: type: object - + properties: + category: + type: string + categoryAnime: + type: string + categoryAnimeBacklog: + type: string + categoryBacklog: + type: string + host: + type: string + priority: + type: integer + useHttps: + type: boolean + username: + type: string + sabnzbd: + type: object + properties: + category: + type: string + categoryAnime: + type: string + categoryAnimeBacklog: + type: string + categoryBacklog: + type: string + forced: + type: boolean + host: + type: string + username: + type: string + torrents: + type: object + properties: + username: + type: string + seedTime: + type: integer + rpcurl: + type: string + authType: + type: string + paused: + type: boolean + host: + type: string + path: + type: string + verifySSL: + type: boolean + highBandwidth: + type: boolean + enabled: + type: boolean + label: + type: string + labelAnime: + type: string + method: + type: string + enum: ["blackhole", "utorrent", "transmission", "deluge", "deluged", "downloadstation", "rtorrent", "qbittorrent", "mlnet"] Log: type: object properties: @@ -1973,6 +2451,13 @@ parameters: description: The episode id to retrieve. E.g. s02e03, e34 or 2016-12-31 x-example: s01e01 type: string + episode-id-delete: + name: episode-id-delete + in: path + required: true + description: The episode id to retrieve. E.g. s02e03, e34 or 2016-12-31 + x-example: s01e02 + type: string alias-id: name: alias-id in: path @@ -2020,6 +2505,8 @@ parameters: - statuses - qualities - metadata + - search + - notifiers log-level: name: level in: query diff --git a/dredd/dredd_hook.py b/dredd/dredd_hook.py index a90cdc8037..06e0eeb459 100644 --- a/dredd/dredd_hook.py +++ b/dredd/dredd_hook.py @@ -1,15 +1,20 @@ +#!/usr/bin/env python +# coding=utf-8 """Dredd hook.""" from __future__ import absolute_import +from __future__ import print_function +from __future__ import unicode_literals -import ConfigParser +import io import json -import urlparse from collections import Mapping -from urllib import urlencode import dredd_hooks as hooks +import six from six import string_types +from six.moves.configparser import RawConfigParser +from six.moves.urllib.parse import parse_qs, urlencode, urlparse import yaml @@ -27,7 +32,7 @@ def load_api_description(transactions): """Load api description.""" global api_description - with open(transactions[0]['origin']['filename'], 'r') as stream: + with io.open(transactions[0]['origin']['filename'], 'rb') as stream: api_description = yaml.safe_load(stream) @@ -64,8 +69,8 @@ def configure_transaction(transaction): # Change request based on x-request configuration url = transaction['fullPath'] - parsed_url = urlparse.urlparse(url) - parsed_params = urlparse.parse_qs(parsed_url.query) + parsed_url = urlparse(url) + parsed_params = parse_qs(parsed_url.query) parsed_path = parsed_url.path request = response.get('x-request', {}) @@ -158,13 +163,13 @@ def start(): os.makedirs(data_dir) os.chdir(data_dir) - config = ConfigParser.RawConfigParser() + config = RawConfigParser() config.read('config.ini') config.add_section('General') config.set('General', 'web_username', stash['web-username']) config.set('General', 'web_password', stash['web-password']) config.set('General', 'api_key', stash['api-key']) - with open('config.ini', 'wb') as configfile: + with io.open('config.ini', 'w' if six.PY3 else 'wb') as configfile: config.write(configfile) sys.path.insert(1, app_dir) diff --git a/ext/dateutil/_version.py b/ext/dateutil/_version.py index 713fe0dfe5..d3ce85616b 100644 --- a/ext/dateutil/_version.py +++ b/ext/dateutil/_version.py @@ -1,4 +1,4 @@ # coding: utf-8 # file generated by setuptools_scm # don't change, don't track in version control -version = '2.7.3' +version = '2.7.5' diff --git a/ext/dateutil/test/__init__.py b/ext/dateutil/test/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/ext/dateutil/test/_common.py b/ext/dateutil/test/_common.py new file mode 100644 index 0000000000..264dfbda39 --- /dev/null +++ b/ext/dateutil/test/_common.py @@ -0,0 +1,275 @@ +from __future__ import unicode_literals +import os +import time +import subprocess +import warnings +import tempfile +import pickle + + +class WarningTestMixin(object): + # Based on https://stackoverflow.com/a/12935176/467366 + class _AssertWarnsContext(warnings.catch_warnings): + def __init__(self, expected_warnings, parent, **kwargs): + super(WarningTestMixin._AssertWarnsContext, self).__init__(**kwargs) + + self.parent = parent + try: + self.expected_warnings = list(expected_warnings) + except TypeError: + self.expected_warnings = [expected_warnings] + + self._warning_log = [] + + def __enter__(self, *args, **kwargs): + rv = super(WarningTestMixin._AssertWarnsContext, self).__enter__(*args, **kwargs) + + if self._showwarning is not self._module.showwarning: + super_showwarning = self._module.showwarning + else: + super_showwarning = None + + def showwarning(*args, **kwargs): + if super_showwarning is not None: + super_showwarning(*args, **kwargs) + + self._warning_log.append(warnings.WarningMessage(*args, **kwargs)) + + self._module.showwarning = showwarning + return rv + + def __exit__(self, *args, **kwargs): + super(WarningTestMixin._AssertWarnsContext, self).__exit__(self, *args, **kwargs) + + self.parent.assertTrue(any(issubclass(item.category, warning) + for warning in self.expected_warnings + for item in self._warning_log)) + + def assertWarns(self, warning, callable=None, *args, **kwargs): + warnings.simplefilter('always') + context = self.__class__._AssertWarnsContext(warning, self) + if callable is None: + return context + else: + with context: + callable(*args, **kwargs) + + +class PicklableMixin(object): + def _get_nobj_bytes(self, obj, dump_kwargs, load_kwargs): + """ + Pickle and unpickle an object using ``pickle.dumps`` / ``pickle.loads`` + """ + pkl = pickle.dumps(obj, **dump_kwargs) + return pickle.loads(pkl, **load_kwargs) + + def _get_nobj_file(self, obj, dump_kwargs, load_kwargs): + """ + Pickle and unpickle an object using ``pickle.dump`` / ``pickle.load`` on + a temporary file. + """ + with tempfile.TemporaryFile('w+b') as pkl: + pickle.dump(obj, pkl, **dump_kwargs) + pkl.seek(0) # Reset the file to the beginning to read it + nobj = pickle.load(pkl, **load_kwargs) + + return nobj + + def assertPicklable(self, obj, singleton=False, asfile=False, + dump_kwargs=None, load_kwargs=None): + """ + Assert that an object can be pickled and unpickled. This assertion + assumes that the desired behavior is that the unpickled object compares + equal to the original object, but is not the same object. + """ + get_nobj = self._get_nobj_file if asfile else self._get_nobj_bytes + dump_kwargs = dump_kwargs or {} + load_kwargs = load_kwargs or {} + + nobj = get_nobj(obj, dump_kwargs, load_kwargs) + if not singleton: + self.assertIsNot(obj, nobj) + self.assertEqual(obj, nobj) + + +class TZContextBase(object): + """ + Base class for a context manager which allows changing of time zones. + + Subclasses may define a guard variable to either block or or allow time + zone changes by redefining ``_guard_var_name`` and ``_guard_allows_change``. + The default is that the guard variable must be affirmatively set. + + Subclasses must define ``get_current_tz`` and ``set_current_tz``. + """ + _guard_var_name = "DATEUTIL_MAY_CHANGE_TZ" + _guard_allows_change = True + + def __init__(self, tzval): + self.tzval = tzval + self._old_tz = None + + @classmethod + def tz_change_allowed(cls): + """ + Class method used to query whether or not this class allows time zone + changes. + """ + guard = bool(os.environ.get(cls._guard_var_name, False)) + + # _guard_allows_change gives the "default" behavior - if True, the + # guard is overcoming a block. If false, the guard is causing a block. + # Whether tz_change is allowed is therefore the XNOR of the two. + return guard == cls._guard_allows_change + + @classmethod + def tz_change_disallowed_message(cls): + """ Generate instructions on how to allow tz changes """ + msg = ('Changing time zone not allowed. Set {envar} to {gval} ' + 'if you would like to allow this behavior') + + return msg.format(envar=cls._guard_var_name, + gval=cls._guard_allows_change) + + def __enter__(self): + if not self.tz_change_allowed(): + raise ValueError(self.tz_change_disallowed_message()) + + self._old_tz = self.get_current_tz() + self.set_current_tz(self.tzval) + + def __exit__(self, type, value, traceback): + if self._old_tz is not None: + self.set_current_tz(self._old_tz) + + self._old_tz = None + + def get_current_tz(self): + raise NotImplementedError + + def set_current_tz(self): + raise NotImplementedError + + +class TZEnvContext(TZContextBase): + """ + Context manager that temporarily sets the `TZ` variable (for use on + *nix-like systems). Because the effect is local to the shell anyway, this + will apply *unless* a guard is set. + + If you do not want the TZ environment variable set, you may set the + ``DATEUTIL_MAY_NOT_CHANGE_TZ_VAR`` variable to a truthy value. + """ + _guard_var_name = "DATEUTIL_MAY_NOT_CHANGE_TZ_VAR" + _guard_allows_change = False + + def get_current_tz(self): + return os.environ.get('TZ', UnsetTz) + + def set_current_tz(self, tzval): + if tzval is UnsetTz and 'TZ' in os.environ: + del os.environ['TZ'] + else: + os.environ['TZ'] = tzval + + time.tzset() + + +class TZWinContext(TZContextBase): + """ + Context manager for changing local time zone on Windows. + + Because the effect of this is system-wide and global, it may have + unintended side effect. Set the ``DATEUTIL_MAY_CHANGE_TZ`` environment + variable to a truthy value before using this context manager. + """ + def get_current_tz(self): + p = subprocess.Popen(['tzutil', '/g'], stdout=subprocess.PIPE) + + ctzname, err = p.communicate() + ctzname = ctzname.decode() # Popen returns + + if p.returncode: + raise OSError('Failed to get current time zone: ' + err) + + return ctzname + + def set_current_tz(self, tzname): + p = subprocess.Popen('tzutil /s "' + tzname + '"') + + out, err = p.communicate() + + if p.returncode: + raise OSError('Failed to set current time zone: ' + + (err or 'Unknown error.')) + + +### +# Utility classes +class NotAValueClass(object): + """ + A class analogous to NaN that has operations defined for any type. + """ + def _op(self, other): + return self # Operation with NotAValue returns NotAValue + + def _cmp(self, other): + return False + + __add__ = __radd__ = _op + __sub__ = __rsub__ = _op + __mul__ = __rmul__ = _op + __div__ = __rdiv__ = _op + __truediv__ = __rtruediv__ = _op + __floordiv__ = __rfloordiv__ = _op + + __lt__ = __rlt__ = _op + __gt__ = __rgt__ = _op + __eq__ = __req__ = _op + __le__ = __rle__ = _op + __ge__ = __rge__ = _op + + +NotAValue = NotAValueClass() + + +class ComparesEqualClass(object): + """ + A class that is always equal to whatever you compare it to. + """ + + def __eq__(self, other): + return True + + def __ne__(self, other): + return False + + def __le__(self, other): + return True + + def __ge__(self, other): + return True + + def __lt__(self, other): + return False + + def __gt__(self, other): + return False + + __req__ = __eq__ + __rne__ = __ne__ + __rle__ = __le__ + __rge__ = __ge__ + __rlt__ = __lt__ + __rgt__ = __gt__ + + +ComparesEqual = ComparesEqualClass() + + +class UnsetTzClass(object): + """ Sentinel class for unset time zone variable """ + pass + + +UnsetTz = UnsetTzClass() diff --git a/ext/dateutil/test/property/test_isoparse_prop.py b/ext/dateutil/test/property/test_isoparse_prop.py new file mode 100644 index 0000000000..c6a4b82a31 --- /dev/null +++ b/ext/dateutil/test/property/test_isoparse_prop.py @@ -0,0 +1,27 @@ +from hypothesis import given, assume +from hypothesis import strategies as st + +from dateutil import tz +from dateutil.parser import isoparse + +import pytest + +# Strategies +TIME_ZONE_STRATEGY = st.sampled_from([None, tz.tzutc()] + + [tz.gettz(zname) for zname in ('US/Eastern', 'US/Pacific', + 'Australia/Sydney', 'Europe/London')]) +ASCII_STRATEGY = st.characters(max_codepoint=127) + + +@pytest.mark.isoparser +@given(dt=st.datetimes(timezones=TIME_ZONE_STRATEGY), sep=ASCII_STRATEGY) +def test_timespec_auto(dt, sep): + if dt.tzinfo is not None: + # Assume offset has no sub-second components + assume(dt.utcoffset().total_seconds() % 60 == 0) + + sep = str(sep) # Python 2.7 requires bytes + dtstr = dt.isoformat(sep=sep) + dt_rt = isoparse(dtstr) + + assert dt_rt == dt diff --git a/ext/dateutil/test/property/test_parser_prop.py b/ext/dateutil/test/property/test_parser_prop.py new file mode 100644 index 0000000000..fdfd171e86 --- /dev/null +++ b/ext/dateutil/test/property/test_parser_prop.py @@ -0,0 +1,22 @@ +from hypothesis.strategies import integers +from hypothesis import given + +import pytest + +from dateutil.parser import parserinfo + + +@pytest.mark.parserinfo +@given(integers(min_value=100, max_value=9999)) +def test_convertyear(n): + assert n == parserinfo().convertyear(n) + + +@pytest.mark.parserinfo +@given(integers(min_value=-50, + max_value=49)) +def test_convertyear_no_specified_century(n): + p = parserinfo() + new_year = p._year + n + result = p.convertyear(new_year % 100, century_specified=False) + assert result == new_year diff --git a/ext/dateutil/test/test_easter.py b/ext/dateutil/test/test_easter.py new file mode 100644 index 0000000000..eeb094ee26 --- /dev/null +++ b/ext/dateutil/test/test_easter.py @@ -0,0 +1,95 @@ +from dateutil.easter import easter +from dateutil.easter import EASTER_WESTERN, EASTER_ORTHODOX, EASTER_JULIAN + +from datetime import date +import unittest + +# List of easters between 1990 and 2050 +western_easter_dates = [ + date(1990, 4, 15), date(1991, 3, 31), date(1992, 4, 19), date(1993, 4, 11), + date(1994, 4, 3), date(1995, 4, 16), date(1996, 4, 7), date(1997, 3, 30), + date(1998, 4, 12), date(1999, 4, 4), + + date(2000, 4, 23), date(2001, 4, 15), date(2002, 3, 31), date(2003, 4, 20), + date(2004, 4, 11), date(2005, 3, 27), date(2006, 4, 16), date(2007, 4, 8), + date(2008, 3, 23), date(2009, 4, 12), + + date(2010, 4, 4), date(2011, 4, 24), date(2012, 4, 8), date(2013, 3, 31), + date(2014, 4, 20), date(2015, 4, 5), date(2016, 3, 27), date(2017, 4, 16), + date(2018, 4, 1), date(2019, 4, 21), + + date(2020, 4, 12), date(2021, 4, 4), date(2022, 4, 17), date(2023, 4, 9), + date(2024, 3, 31), date(2025, 4, 20), date(2026, 4, 5), date(2027, 3, 28), + date(2028, 4, 16), date(2029, 4, 1), + + date(2030, 4, 21), date(2031, 4, 13), date(2032, 3, 28), date(2033, 4, 17), + date(2034, 4, 9), date(2035, 3, 25), date(2036, 4, 13), date(2037, 4, 5), + date(2038, 4, 25), date(2039, 4, 10), + + date(2040, 4, 1), date(2041, 4, 21), date(2042, 4, 6), date(2043, 3, 29), + date(2044, 4, 17), date(2045, 4, 9), date(2046, 3, 25), date(2047, 4, 14), + date(2048, 4, 5), date(2049, 4, 18), date(2050, 4, 10) + ] + +orthodox_easter_dates = [ + date(1990, 4, 15), date(1991, 4, 7), date(1992, 4, 26), date(1993, 4, 18), + date(1994, 5, 1), date(1995, 4, 23), date(1996, 4, 14), date(1997, 4, 27), + date(1998, 4, 19), date(1999, 4, 11), + + date(2000, 4, 30), date(2001, 4, 15), date(2002, 5, 5), date(2003, 4, 27), + date(2004, 4, 11), date(2005, 5, 1), date(2006, 4, 23), date(2007, 4, 8), + date(2008, 4, 27), date(2009, 4, 19), + + date(2010, 4, 4), date(2011, 4, 24), date(2012, 4, 15), date(2013, 5, 5), + date(2014, 4, 20), date(2015, 4, 12), date(2016, 5, 1), date(2017, 4, 16), + date(2018, 4, 8), date(2019, 4, 28), + + date(2020, 4, 19), date(2021, 5, 2), date(2022, 4, 24), date(2023, 4, 16), + date(2024, 5, 5), date(2025, 4, 20), date(2026, 4, 12), date(2027, 5, 2), + date(2028, 4, 16), date(2029, 4, 8), + + date(2030, 4, 28), date(2031, 4, 13), date(2032, 5, 2), date(2033, 4, 24), + date(2034, 4, 9), date(2035, 4, 29), date(2036, 4, 20), date(2037, 4, 5), + date(2038, 4, 25), date(2039, 4, 17), + + date(2040, 5, 6), date(2041, 4, 21), date(2042, 4, 13), date(2043, 5, 3), + date(2044, 4, 24), date(2045, 4, 9), date(2046, 4, 29), date(2047, 4, 21), + date(2048, 4, 5), date(2049, 4, 25), date(2050, 4, 17) +] + +# A random smattering of Julian dates. +# Pulled values from http://www.kevinlaughery.com/east4099.html +julian_easter_dates = [ + date( 326, 4, 3), date( 375, 4, 5), date( 492, 4, 5), date( 552, 3, 31), + date( 562, 4, 9), date( 569, 4, 21), date( 597, 4, 14), date( 621, 4, 19), + date( 636, 3, 31), date( 655, 3, 29), date( 700, 4, 11), date( 725, 4, 8), + date( 750, 3, 29), date( 782, 4, 7), date( 835, 4, 18), date( 849, 4, 14), + date( 867, 3, 30), date( 890, 4, 12), date( 922, 4, 21), date( 934, 4, 6), + date(1049, 3, 26), date(1058, 4, 19), date(1113, 4, 6), date(1119, 3, 30), + date(1242, 4, 20), date(1255, 3, 28), date(1257, 4, 8), date(1258, 3, 24), + date(1261, 4, 24), date(1278, 4, 17), date(1333, 4, 4), date(1351, 4, 17), + date(1371, 4, 6), date(1391, 3, 26), date(1402, 3, 26), date(1412, 4, 3), + date(1439, 4, 5), date(1445, 3, 28), date(1531, 4, 9), date(1555, 4, 14) +] + + +class EasterTest(unittest.TestCase): + def testEasterWestern(self): + for easter_date in western_easter_dates: + self.assertEqual(easter_date, + easter(easter_date.year, EASTER_WESTERN)) + + def testEasterOrthodox(self): + for easter_date in orthodox_easter_dates: + self.assertEqual(easter_date, + easter(easter_date.year, EASTER_ORTHODOX)) + + def testEasterJulian(self): + for easter_date in julian_easter_dates: + self.assertEqual(easter_date, + easter(easter_date.year, EASTER_JULIAN)) + + def testEasterBadMethod(self): + # Invalid methods raise ValueError + with self.assertRaises(ValueError): + easter(1975, 4) diff --git a/ext/dateutil/test/test_import_star.py b/ext/dateutil/test/test_import_star.py new file mode 100644 index 0000000000..8e66f38aa3 --- /dev/null +++ b/ext/dateutil/test/test_import_star.py @@ -0,0 +1,33 @@ +"""Test for the "import *" functionality. + +As imort * can be only done at module level, it has been added in a separate file +""" +import unittest + +prev_locals = list(locals()) +from dateutil import * +new_locals = {name:value for name,value in locals().items() + if name not in prev_locals} +new_locals.pop('prev_locals') + +class ImportStarTest(unittest.TestCase): + """ Test that `from dateutil import *` adds the modules in __all__ locally""" + + def testImportedModules(self): + import dateutil.easter + import dateutil.parser + import dateutil.relativedelta + import dateutil.rrule + import dateutil.tz + import dateutil.utils + import dateutil.zoneinfo + + self.assertEquals(dateutil.easter, new_locals.pop("easter")) + self.assertEquals(dateutil.parser, new_locals.pop("parser")) + self.assertEquals(dateutil.relativedelta, new_locals.pop("relativedelta")) + self.assertEquals(dateutil.rrule, new_locals.pop("rrule")) + self.assertEquals(dateutil.tz, new_locals.pop("tz")) + self.assertEquals(dateutil.utils, new_locals.pop("utils")) + self.assertEquals(dateutil.zoneinfo, new_locals.pop("zoneinfo")) + + self.assertFalse(new_locals) diff --git a/ext/dateutil/test/test_imports.py b/ext/dateutil/test/test_imports.py new file mode 100644 index 0000000000..2a19b62a47 --- /dev/null +++ b/ext/dateutil/test/test_imports.py @@ -0,0 +1,166 @@ +import sys +import unittest + +class ImportVersionTest(unittest.TestCase): + """ Test that dateutil.__version__ can be imported""" + + def testImportVersionStr(self): + from dateutil import __version__ + + def testImportRoot(self): + import dateutil + + self.assertTrue(hasattr(dateutil, '__version__')) + + +class ImportEasterTest(unittest.TestCase): + """ Test that dateutil.easter-related imports work properly """ + + def testEasterDirect(self): + import dateutil.easter + + def testEasterFrom(self): + from dateutil import easter + + def testEasterStar(self): + from dateutil.easter import easter + + +class ImportParserTest(unittest.TestCase): + """ Test that dateutil.parser-related imports work properly """ + def testParserDirect(self): + import dateutil.parser + + def testParserFrom(self): + from dateutil import parser + + def testParserAll(self): + # All interface + from dateutil.parser import parse + from dateutil.parser import parserinfo + + # Other public classes + from dateutil.parser import parser + + for var in (parse, parserinfo, parser): + self.assertIsNot(var, None) + + +class ImportRelativeDeltaTest(unittest.TestCase): + """ Test that dateutil.relativedelta-related imports work properly """ + def testRelativeDeltaDirect(self): + import dateutil.relativedelta + + def testRelativeDeltaFrom(self): + from dateutil import relativedelta + + def testRelativeDeltaAll(self): + from dateutil.relativedelta import relativedelta + from dateutil.relativedelta import MO, TU, WE, TH, FR, SA, SU + + for var in (relativedelta, MO, TU, WE, TH, FR, SA, SU): + self.assertIsNot(var, None) + + # In the public interface but not in all + from dateutil.relativedelta import weekday + self.assertIsNot(weekday, None) + + +class ImportRRuleTest(unittest.TestCase): + """ Test that dateutil.rrule related imports work properly """ + def testRRuleDirect(self): + import dateutil.rrule + + def testRRuleFrom(self): + from dateutil import rrule + + def testRRuleAll(self): + from dateutil.rrule import rrule + from dateutil.rrule import rruleset + from dateutil.rrule import rrulestr + from dateutil.rrule import YEARLY, MONTHLY, WEEKLY, DAILY + from dateutil.rrule import HOURLY, MINUTELY, SECONDLY + from dateutil.rrule import MO, TU, WE, TH, FR, SA, SU + + rr_all = (rrule, rruleset, rrulestr, + YEARLY, MONTHLY, WEEKLY, DAILY, + HOURLY, MINUTELY, SECONDLY, + MO, TU, WE, TH, FR, SA, SU) + + for var in rr_all: + self.assertIsNot(var, None) + + # In the public interface but not in all + from dateutil.rrule import weekday + self.assertIsNot(weekday, None) + + +class ImportTZTest(unittest.TestCase): + """ Test that dateutil.tz related imports work properly """ + def testTzDirect(self): + import dateutil.tz + + def testTzFrom(self): + from dateutil import tz + + def testTzAll(self): + from dateutil.tz import tzutc + from dateutil.tz import tzoffset + from dateutil.tz import tzlocal + from dateutil.tz import tzfile + from dateutil.tz import tzrange + from dateutil.tz import tzstr + from dateutil.tz import tzical + from dateutil.tz import gettz + from dateutil.tz import tzwin + from dateutil.tz import tzwinlocal + from dateutil.tz import UTC + from dateutil.tz import datetime_ambiguous + from dateutil.tz import datetime_exists + from dateutil.tz import resolve_imaginary + + tz_all = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange", + "tzstr", "tzical", "gettz", "datetime_ambiguous", + "datetime_exists", "resolve_imaginary", "UTC"] + + tz_all += ["tzwin", "tzwinlocal"] if sys.platform.startswith("win") else [] + lvars = locals() + + for var in tz_all: + self.assertIsNot(lvars[var], None) + +@unittest.skipUnless(sys.platform.startswith('win'), "Requires Windows") +class ImportTZWinTest(unittest.TestCase): + """ Test that dateutil.tzwin related imports work properly """ + def testTzwinDirect(self): + import dateutil.tzwin + + def testTzwinFrom(self): + from dateutil import tzwin + + def testTzwinStar(self): + from dateutil.tzwin import tzwin + from dateutil.tzwin import tzwinlocal + + tzwin_all = [tzwin, tzwinlocal] + + for var in tzwin_all: + self.assertIsNot(var, None) + + +class ImportZoneInfoTest(unittest.TestCase): + def testZoneinfoDirect(self): + import dateutil.zoneinfo + + def testZoneinfoFrom(self): + from dateutil import zoneinfo + + def testZoneinfoStar(self): + from dateutil.zoneinfo import gettz + from dateutil.zoneinfo import gettz_db_metadata + from dateutil.zoneinfo import rebuild + + zi_all = (gettz, gettz_db_metadata, rebuild) + + for var in zi_all: + self.assertIsNot(var, None) diff --git a/ext/dateutil/test/test_internals.py b/ext/dateutil/test/test_internals.py new file mode 100644 index 0000000000..a64c514899 --- /dev/null +++ b/ext/dateutil/test/test_internals.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +""" +Tests for implementation details, not necessarily part of the user-facing +API. + +The motivating case for these tests is #483, where we want to smoke-test +code that may be difficult to reach through the standard API calls. +""" + +import unittest +import sys + +import pytest + +from dateutil.parser._parser import _ymd +from dateutil import tz + +IS_PY32 = sys.version_info[0:2] == (3, 2) + + +class TestYMD(unittest.TestCase): + + # @pytest.mark.smoke + def test_could_be_day(self): + ymd = _ymd('foo bar 124 baz') + + ymd.append(2, 'M') + assert ymd.has_month + assert not ymd.has_year + assert ymd.could_be_day(4) + assert not ymd.could_be_day(-6) + assert not ymd.could_be_day(32) + + # Assumes leapyear + assert ymd.could_be_day(29) + + ymd.append(1999) + assert ymd.has_year + assert not ymd.could_be_day(29) + + ymd.append(16, 'D') + assert ymd.has_day + assert not ymd.could_be_day(1) + + ymd = _ymd('foo bar 124 baz') + ymd.append(1999) + assert ymd.could_be_day(31) + + +### +# Test that private interfaces in _parser are deprecated properly +@pytest.mark.skipif(IS_PY32, reason='pytest.warns not supported on Python 3.2') +def test_parser_private_warns(): + from dateutil.parser import _timelex, _tzparser + from dateutil.parser import _parsetz + + with pytest.warns(DeprecationWarning): + _tzparser() + + with pytest.warns(DeprecationWarning): + _timelex('2014-03-03') + + with pytest.warns(DeprecationWarning): + _parsetz('+05:00') + + +@pytest.mark.skipif(IS_PY32, reason='pytest.warns not supported on Python 3.2') +def test_parser_parser_private_not_warns(): + from dateutil.parser._parser import _timelex, _tzparser + from dateutil.parser._parser import _parsetz + + with pytest.warns(None) as recorder: + _tzparser() + assert len(recorder) == 0 + + with pytest.warns(None) as recorder: + _timelex('2014-03-03') + + assert len(recorder) == 0 + + with pytest.warns(None) as recorder: + _parsetz('+05:00') + assert len(recorder) == 0 + + +@pytest.mark.tzstr +def test_tzstr_internal_timedeltas(): + with pytest.warns(tz.DeprecatedTzFormatWarning): + tz1 = tz.tzstr("EST5EDT,5,4,0,7200,11,-3,0,7200") + + with pytest.warns(tz.DeprecatedTzFormatWarning): + tz2 = tz.tzstr("EST5EDT,4,1,0,7200,10,-1,0,7200") + + assert tz1._start_delta != tz2._start_delta + assert tz1._end_delta != tz2._end_delta diff --git a/ext/dateutil/test/test_isoparser.py b/ext/dateutil/test/test_isoparser.py new file mode 100644 index 0000000000..28c1bf7657 --- /dev/null +++ b/ext/dateutil/test/test_isoparser.py @@ -0,0 +1,482 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from datetime import datetime, timedelta, date, time +import itertools as it + +from dateutil.tz import tz +from dateutil.parser import isoparser, isoparse + +import pytest +import six + +UTC = tz.tzutc() + +def _generate_tzoffsets(limited): + def _mkoffset(hmtuple, fmt): + h, m = hmtuple + m_td = (-1 if h < 0 else 1) * m + + tzo = tz.tzoffset(None, timedelta(hours=h, minutes=m_td)) + return tzo, fmt.format(h, m) + + out = [] + if not limited: + # The subset that's just hours + hm_out_h = [(h, 0) for h in (-23, -5, 0, 5, 23)] + out.extend([_mkoffset(hm, '{:+03d}') for hm in hm_out_h]) + + # Ones that have hours and minutes + hm_out = [] + hm_out_h + hm_out += [(-12, 15), (11, 30), (10, 2), (5, 15), (-5, 30)] + else: + hm_out = [(-5, -0)] + + fmts = ['{:+03d}:{:02d}', '{:+03d}{:02d}'] + out += [_mkoffset(hm, fmt) for hm in hm_out for fmt in fmts] + + # Also add in UTC and naive + out.append((tz.tzutc(), 'Z')) + out.append((None, '')) + + return out + +FULL_TZOFFSETS = _generate_tzoffsets(False) +FULL_TZOFFSETS_AWARE = [x for x in FULL_TZOFFSETS if x[1]] +TZOFFSETS = _generate_tzoffsets(True) + +DATES = [datetime(1996, 1, 1), datetime(2017, 1, 1)] +@pytest.mark.parametrize('dt', tuple(DATES)) +def test_year_only(dt): + dtstr = dt.strftime('%Y') + + assert isoparse(dtstr) == dt + +DATES += [datetime(2000, 2, 1), datetime(2017, 4, 1)] +@pytest.mark.parametrize('dt', tuple(DATES)) +def test_year_month(dt): + fmt = '%Y-%m' + dtstr = dt.strftime(fmt) + + assert isoparse(dtstr) == dt + +DATES += [datetime(2016, 2, 29), datetime(2018, 3, 15)] +YMD_FMTS = ('%Y%m%d', '%Y-%m-%d') +@pytest.mark.parametrize('dt', tuple(DATES)) +@pytest.mark.parametrize('fmt', YMD_FMTS) +def test_year_month_day(dt, fmt): + dtstr = dt.strftime(fmt) + + assert isoparse(dtstr) == dt + +def _isoparse_date_and_time(dt, date_fmt, time_fmt, tzoffset, + microsecond_precision=None): + tzi, offset_str = tzoffset + fmt = date_fmt + 'T' + time_fmt + dt = dt.replace(tzinfo=tzi) + dtstr = dt.strftime(fmt) + + if microsecond_precision is not None: + if not fmt.endswith('%f'): + raise ValueError('Time format has no microseconds!') + + if microsecond_precision != 6: + dtstr = dtstr[:-(6 - microsecond_precision)] + elif microsecond_precision > 6: + raise ValueError('Precision must be 1-6') + + dtstr += offset_str + + assert isoparse(dtstr) == dt + +DATETIMES = [datetime(1998, 4, 16, 12), + datetime(2019, 11, 18, 23), + datetime(2014, 12, 16, 4)] +@pytest.mark.parametrize('dt', tuple(DATETIMES)) +@pytest.mark.parametrize('date_fmt', YMD_FMTS) +@pytest.mark.parametrize('tzoffset', TZOFFSETS) +def test_ymd_h(dt, date_fmt, tzoffset): + _isoparse_date_and_time(dt, date_fmt, '%H', tzoffset) + +DATETIMES = [datetime(2012, 1, 6, 9, 37)] +@pytest.mark.parametrize('dt', tuple(DATETIMES)) +@pytest.mark.parametrize('date_fmt', YMD_FMTS) +@pytest.mark.parametrize('time_fmt', ('%H%M', '%H:%M')) +@pytest.mark.parametrize('tzoffset', TZOFFSETS) +def test_ymd_hm(dt, date_fmt, time_fmt, tzoffset): + _isoparse_date_and_time(dt, date_fmt, time_fmt, tzoffset) + +DATETIMES = [datetime(2003, 9, 2, 22, 14, 2), + datetime(2003, 8, 8, 14, 9, 14), + datetime(2003, 4, 7, 6, 14, 59)] +HMS_FMTS = ('%H%M%S', '%H:%M:%S') +@pytest.mark.parametrize('dt', tuple(DATETIMES)) +@pytest.mark.parametrize('date_fmt', YMD_FMTS) +@pytest.mark.parametrize('time_fmt', HMS_FMTS) +@pytest.mark.parametrize('tzoffset', TZOFFSETS) +def test_ymd_hms(dt, date_fmt, time_fmt, tzoffset): + _isoparse_date_and_time(dt, date_fmt, time_fmt, tzoffset) + +DATETIMES = [datetime(2017, 11, 27, 6, 14, 30, 123456)] +@pytest.mark.parametrize('dt', tuple(DATETIMES)) +@pytest.mark.parametrize('date_fmt', YMD_FMTS) +@pytest.mark.parametrize('time_fmt', (x + '.%f' for x in HMS_FMTS)) +@pytest.mark.parametrize('tzoffset', TZOFFSETS) +@pytest.mark.parametrize('precision', list(range(3, 7))) +def test_ymd_hms_micro(dt, date_fmt, time_fmt, tzoffset, precision): + # Truncate the microseconds to the desired precision for the representation + dt = dt.replace(microsecond=int(round(dt.microsecond, precision-6))) + + _isoparse_date_and_time(dt, date_fmt, time_fmt, tzoffset, precision) + +@pytest.mark.parametrize('tzoffset', FULL_TZOFFSETS) +def test_full_tzoffsets(tzoffset): + dt = datetime(2017, 11, 27, 6, 14, 30, 123456) + date_fmt = '%Y-%m-%d' + time_fmt = '%H:%M:%S.%f' + + _isoparse_date_and_time(dt, date_fmt, time_fmt, tzoffset) + +@pytest.mark.parametrize('dt_str', [ + '2014-04-11T00', + '2014-04-11T24', + '2014-04-11T00:00', + '2014-04-11T24:00', + '2014-04-11T00:00:00', + '2014-04-11T24:00:00', + '2014-04-11T00:00:00.000', + '2014-04-11T24:00:00.000', + '2014-04-11T00:00:00.000000', + '2014-04-11T24:00:00.000000'] +) +def test_datetime_midnight(dt_str): + assert isoparse(dt_str) == datetime(2014, 4, 11, 0, 0, 0, 0) + +@pytest.mark.parametrize('datestr', [ + '2014-01-01', + '20140101', +]) +@pytest.mark.parametrize('sep', [' ', 'a', 'T', '_', '-']) +def test_isoparse_sep_none(datestr, sep): + isostr = datestr + sep + '14:33:09' + assert isoparse(isostr) == datetime(2014, 1, 1, 14, 33, 9) + +## +# Uncommon date formats +TIME_ARGS = ('time_args', + ((None, time(0), None), ) + tuple(('%H:%M:%S.%f', _t, _tz) + for _t, _tz in it.product([time(0), time(9, 30), time(14, 47)], + TZOFFSETS))) + +@pytest.mark.parametrize('isocal,dt_expected',[ + ((2017, 10), datetime(2017, 3, 6)), + ((2020, 1), datetime(2019, 12, 30)), # ISO year != Cal year + ((2004, 53), datetime(2004, 12, 27)), # Only half the week is in 2014 +]) +def test_isoweek(isocal, dt_expected): + # TODO: Figure out how to parametrize this on formats, too + for fmt in ('{:04d}-W{:02d}', '{:04d}W{:02d}'): + dtstr = fmt.format(*isocal) + assert isoparse(dtstr) == dt_expected + +@pytest.mark.parametrize('isocal,dt_expected',[ + ((2016, 13, 7), datetime(2016, 4, 3)), + ((2004, 53, 7), datetime(2005, 1, 2)), # ISO year != Cal year + ((2009, 1, 2), datetime(2008, 12, 30)), # ISO year < Cal year + ((2009, 53, 6), datetime(2010, 1, 2)) # ISO year > Cal year +]) +def test_isoweek_day(isocal, dt_expected): + # TODO: Figure out how to parametrize this on formats, too + for fmt in ('{:04d}-W{:02d}-{:d}', '{:04d}W{:02d}{:d}'): + dtstr = fmt.format(*isocal) + assert isoparse(dtstr) == dt_expected + +@pytest.mark.parametrize('isoord,dt_expected', [ + ((2004, 1), datetime(2004, 1, 1)), + ((2016, 60), datetime(2016, 2, 29)), + ((2017, 60), datetime(2017, 3, 1)), + ((2016, 366), datetime(2016, 12, 31)), + ((2017, 365), datetime(2017, 12, 31)) +]) +def test_iso_ordinal(isoord, dt_expected): + for fmt in ('{:04d}-{:03d}', '{:04d}{:03d}'): + dtstr = fmt.format(*isoord) + + assert isoparse(dtstr) == dt_expected + + +### +# Acceptance of bytes +@pytest.mark.parametrize('isostr,dt', [ + (b'2014', datetime(2014, 1, 1)), + (b'20140204', datetime(2014, 2, 4)), + (b'2014-02-04', datetime(2014, 2, 4)), + (b'2014-02-04T12', datetime(2014, 2, 4, 12)), + (b'2014-02-04T12:30', datetime(2014, 2, 4, 12, 30)), + (b'2014-02-04T12:30:15', datetime(2014, 2, 4, 12, 30, 15)), + (b'2014-02-04T12:30:15.224', datetime(2014, 2, 4, 12, 30, 15, 224000)), + (b'20140204T123015.224', datetime(2014, 2, 4, 12, 30, 15, 224000)), + (b'2014-02-04T12:30:15.224Z', datetime(2014, 2, 4, 12, 30, 15, 224000, + tz.tzutc())), + (b'2014-02-04T12:30:15.224+05:00', + datetime(2014, 2, 4, 12, 30, 15, 224000, + tzinfo=tz.tzoffset(None, timedelta(hours=5))))]) +def test_bytes(isostr, dt): + assert isoparse(isostr) == dt + + +### +# Invalid ISO strings +@pytest.mark.parametrize('isostr,exception', [ + ('201', ValueError), # ISO string too short + ('2012-0425', ValueError), # Inconsistent date separators + ('201204-25', ValueError), # Inconsistent date separators + ('20120425T0120:00', ValueError), # Inconsistent time separators + ('20120425T012500-334', ValueError), # Wrong microsecond separator + ('2001-1', ValueError), # YYYY-M not valid + ('2012-04-9', ValueError), # YYYY-MM-D not valid + ('201204', ValueError), # YYYYMM not valid + ('20120411T03:30+', ValueError), # Time zone too short + ('20120411T03:30+1234567', ValueError), # Time zone too long + ('20120411T03:30-25:40', ValueError), # Time zone invalid + ('2012-1a', ValueError), # Invalid month + ('20120411T03:30+00:60', ValueError), # Time zone invalid minutes + ('20120411T03:30+00:61', ValueError), # Time zone invalid minutes + ('20120411T033030.123456012:00', # No sign in time zone + ValueError), + ('2012-W00', ValueError), # Invalid ISO week + ('2012-W55', ValueError), # Invalid ISO week + ('2012-W01-0', ValueError), # Invalid ISO week day + ('2012-W01-8', ValueError), # Invalid ISO week day + ('2013-000', ValueError), # Invalid ordinal day + ('2013-366', ValueError), # Invalid ordinal day + ('2013366', ValueError), # Invalid ordinal day + ('2014-03-12Т12:30:14', ValueError), # Cyrillic T + ('2014-04-21T24:00:01', ValueError), # Invalid use of 24 for midnight + ('2014_W01-1', ValueError), # Invalid separator + ('2014W01-1', ValueError), # Inconsistent use of dashes + ('2014-W011', ValueError), # Inconsistent use of dashes + +]) +def test_iso_raises(isostr, exception): + with pytest.raises(exception): + isoparse(isostr) + + +@pytest.mark.parametrize('sep_act,valid_sep', [ + ('C', 'T'), + ('T', 'C') +]) +def test_iso_raises_sep(sep_act, valid_sep): + isostr = '2012-04-25' + sep_act + '01:25:00' + + +@pytest.mark.xfail() +@pytest.mark.parametrize('isostr,exception', [ + ('20120425T01:2000', ValueError), # Inconsistent time separators +]) +def test_iso_raises_failing(isostr, exception): + # These are test cases where the current implementation is too lenient + # and need to be fixed + with pytest.raises(exception): + isoparse(isostr) + + +### +# Test ISOParser constructor +@pytest.mark.parametrize('sep', [' ', '9', '🍛']) +def test_isoparser_invalid_sep(sep): + with pytest.raises(ValueError): + isoparser(sep=sep) + + +# This only fails on Python 3 +@pytest.mark.xfail(six.PY3, reason="Fails on Python 3 only") +def test_isoparser_byte_sep(): + dt = datetime(2017, 12, 6, 12, 30, 45) + dt_str = dt.isoformat(sep=str('T')) + + dt_rt = isoparser(sep=b'T').isoparse(dt_str) + + assert dt == dt_rt + + +### +# Test parse_tzstr +@pytest.mark.parametrize('tzoffset', FULL_TZOFFSETS) +def test_parse_tzstr(tzoffset): + dt = datetime(2017, 11, 27, 6, 14, 30, 123456) + date_fmt = '%Y-%m-%d' + time_fmt = '%H:%M:%S.%f' + + _isoparse_date_and_time(dt, date_fmt, time_fmt, tzoffset) + + +@pytest.mark.parametrize('tzstr', [ + '-00:00', '+00:00', '+00', '-00', '+0000', '-0000' +]) +@pytest.mark.parametrize('zero_as_utc', [True, False]) +def test_parse_tzstr_zero_as_utc(tzstr, zero_as_utc): + tzi = isoparser().parse_tzstr(tzstr, zero_as_utc=zero_as_utc) + assert tzi == tz.tzutc() + assert (type(tzi) == tz.tzutc) == zero_as_utc + + +@pytest.mark.parametrize('tzstr,exception', [ + ('00:00', ValueError), # No sign + ('05:00', ValueError), # No sign + ('_00:00', ValueError), # Invalid sign + ('+25:00', ValueError), # Offset too large + ('00:0000', ValueError), # String too long +]) +def test_parse_tzstr_fails(tzstr, exception): + with pytest.raises(exception): + isoparser().parse_tzstr(tzstr) + +### +# Test parse_isodate +def __make_date_examples(): + dates_no_day = [ + date(1999, 12, 1), + date(2016, 2, 1) + ] + + if six.PY3: + # strftime does not support dates before 1900 in Python 2 + dates_no_day.append(date(1000, 11, 1)) + + # Only one supported format for dates with no day + o = zip(dates_no_day, it.repeat('%Y-%m')) + + dates_w_day = [ + date(1969, 12, 31), + date(1900, 1, 1), + date(2016, 2, 29), + date(2017, 11, 14) + ] + + dates_w_day_fmts = ('%Y%m%d', '%Y-%m-%d') + o = it.chain(o, it.product(dates_w_day, dates_w_day_fmts)) + + return list(o) + + +@pytest.mark.parametrize('d,dt_fmt', __make_date_examples()) +@pytest.mark.parametrize('as_bytes', [True, False]) +def test_parse_isodate(d, dt_fmt, as_bytes): + d_str = d.strftime(dt_fmt) + if isinstance(d_str, six.text_type) and as_bytes: + d_str = d_str.encode('ascii') + elif isinstance(d_str, six.binary_type) and not as_bytes: + d_str = d_str.decode('ascii') + + iparser = isoparser() + assert iparser.parse_isodate(d_str) == d + + +@pytest.mark.parametrize('isostr,exception', [ + ('243', ValueError), # ISO string too short + ('2014-0423', ValueError), # Inconsistent date separators + ('201404-23', ValueError), # Inconsistent date separators + ('2014日03月14', ValueError), # Not ASCII + ('2013-02-29', ValueError), # Not a leap year + ('2014/12/03', ValueError), # Wrong separators + ('2014-04-19T', ValueError), # Unknown components +]) +def test_isodate_raises(isostr, exception): + with pytest.raises(exception): + isoparser().parse_isodate(isostr) + + +### +# Test parse_isotime +def __make_time_examples(): + outputs = [] + + # HH + time_h = [time(0), time(8), time(22)] + time_h_fmts = ['%H'] + + outputs.append(it.product(time_h, time_h_fmts)) + + # HHMM / HH:MM + time_hm = [time(0, 0), time(0, 30), time(8, 47), time(16, 1)] + time_hm_fmts = ['%H%M', '%H:%M'] + + outputs.append(it.product(time_hm, time_hm_fmts)) + + # HHMMSS / HH:MM:SS + time_hms = [time(0, 0, 0), time(0, 15, 30), + time(8, 2, 16), time(12, 0), time(16, 2), time(20, 45)] + + time_hms_fmts = ['%H%M%S', '%H:%M:%S'] + + outputs.append(it.product(time_hms, time_hms_fmts)) + + # HHMMSS.ffffff / HH:MM:SS.ffffff + time_hmsu = [time(0, 0, 0, 0), time(4, 15, 3, 247993), + time(14, 21, 59, 948730), + time(23, 59, 59, 999999)] + + time_hmsu_fmts = ['%H%M%S.%f', '%H:%M:%S.%f'] + + outputs.append(it.product(time_hmsu, time_hmsu_fmts)) + + outputs = list(map(list, outputs)) + + # Time zones + ex_naive = list(it.chain.from_iterable(x[0:2] for x in outputs)) + o = it.product(ex_naive, TZOFFSETS) # ((time, fmt), (tzinfo, offsetstr)) + o = ((t.replace(tzinfo=tzi), fmt + off_str) + for (t, fmt), (tzi, off_str) in o) + + outputs.append(o) + + return list(it.chain.from_iterable(outputs)) + + +@pytest.mark.parametrize('time_val,time_fmt', __make_time_examples()) +@pytest.mark.parametrize('as_bytes', [True, False]) +def test_isotime(time_val, time_fmt, as_bytes): + tstr = time_val.strftime(time_fmt) + if isinstance(time_val, six.text_type) and as_bytes: + tstr = tstr.encode('ascii') + elif isinstance(time_val, six.binary_type) and not as_bytes: + tstr = tstr.decode('ascii') + + iparser = isoparser() + + assert iparser.parse_isotime(tstr) == time_val + +@pytest.mark.parametrize('isostr,exception', [ + ('3', ValueError), # ISO string too short + ('14時30分15秒', ValueError), # Not ASCII + ('14_30_15', ValueError), # Invalid separators + ('1430:15', ValueError), # Inconsistent separator use + ('14:30:15.3684000309', ValueError), # Too much us precision + ('25', ValueError), # Invalid hours + ('25:15', ValueError), # Invalid hours + ('14:60', ValueError), # Invalid minutes + ('14:59:61', ValueError), # Invalid seconds + ('14:30:15.3446830500', ValueError), # No sign in time zone + ('14:30:15+', ValueError), # Time zone too short + ('14:30:15+1234567', ValueError), # Time zone invalid + ('14:59:59+25:00', ValueError), # Invalid tz hours + ('14:59:59+12:62', ValueError), # Invalid tz minutes + ('14:59:30_344583', ValueError), # Invalid microsecond separator +]) +def test_isotime_raises(isostr, exception): + iparser = isoparser() + with pytest.raises(exception): + iparser.parse_isotime(isostr) + + +@pytest.mark.xfail() +@pytest.mark.parametrize('isostr,exception', [ + ('14:3015', ValueError), # Inconsistent separator use + ('201202', ValueError) # Invalid ISO format +]) +def test_isotime_raises_xfail(isostr, exception): + iparser = isoparser() + with pytest.raises(exception): + iparser.parse_isotime(isostr) diff --git a/ext/dateutil/test/test_parser.py b/ext/dateutil/test/test_parser.py new file mode 100644 index 0000000000..f8c2072040 --- /dev/null +++ b/ext/dateutil/test/test_parser.py @@ -0,0 +1,1114 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +import itertools +from datetime import datetime, timedelta +import unittest +import sys + +from dateutil import tz +from dateutil.tz import tzoffset +from dateutil.parser import parse, parserinfo +from dateutil.parser import UnknownTimezoneWarning + +from ._common import TZEnvContext + +from six import assertRaisesRegex, PY3 +from six.moves import StringIO + +import pytest + +# Platform info +IS_WIN = sys.platform.startswith('win') + +try: + datetime.now().strftime('%-d') + PLATFORM_HAS_DASH_D = True +except ValueError: + PLATFORM_HAS_DASH_D = False + + +class TestFormat(unittest.TestCase): + + def test_ybd(self): + # If we have a 4-digit year, a non-numeric month (abbreviated or not), + # and a day (1 or 2 digits), then there is no ambiguity as to which + # token is a year/month/day. This holds regardless of what order the + # terms are in and for each of the separators below. + + seps = ['-', ' ', '/', '.'] + + year_tokens = ['%Y'] + month_tokens = ['%b', '%B'] + day_tokens = ['%d'] + if PLATFORM_HAS_DASH_D: + day_tokens.append('%-d') + + prods = itertools.product(year_tokens, month_tokens, day_tokens) + perms = [y for x in prods for y in itertools.permutations(x)] + unambig_fmts = [sep.join(perm) for sep in seps for perm in perms] + + actual = datetime(2003, 9, 25) + + for fmt in unambig_fmts: + dstr = actual.strftime(fmt) + res = parse(dstr) + self.assertEqual(res, actual) + + +class ParserTest(unittest.TestCase): + + def setUp(self): + self.tzinfos = {"BRST": -10800} + self.brsttz = tzoffset("BRST", -10800) + self.default = datetime(2003, 9, 25) + + # Parser should be able to handle bytestring and unicode + self.uni_str = '2014-05-01 08:00:00' + self.str_str = self.uni_str.encode() + + def testEmptyString(self): + with self.assertRaises(ValueError): + parse('') + + def testNone(self): + with self.assertRaises(TypeError): + parse(None) + + def testInvalidType(self): + with self.assertRaises(TypeError): + parse(13) + + def testDuckTyping(self): + # We want to support arbitrary classes that implement the stream + # interface. + + class StringPassThrough(object): + def __init__(self, stream): + self.stream = stream + + def read(self, *args, **kwargs): + return self.stream.read(*args, **kwargs) + + dstr = StringPassThrough(StringIO('2014 January 19')) + + self.assertEqual(parse(dstr), datetime(2014, 1, 19)) + + def testParseStream(self): + dstr = StringIO('2014 January 19') + + self.assertEqual(parse(dstr), datetime(2014, 1, 19)) + + def testParseStr(self): + self.assertEqual(parse(self.str_str), + parse(self.uni_str)) + + def testParseBytes(self): + self.assertEqual(parse(b'2014 January 19'), datetime(2014, 1, 19)) + + def testParseBytearray(self): + # GH #417 + self.assertEqual(parse(bytearray(b'2014 January 19')), + datetime(2014, 1, 19)) + + def testParserParseStr(self): + from dateutil.parser import parser + + self.assertEqual(parser().parse(self.str_str), + parser().parse(self.uni_str)) + + def testParseUnicodeWords(self): + + class rus_parserinfo(parserinfo): + MONTHS = [("янв", "Январь"), + ("фев", "Февраль"), + ("мар", "Март"), + ("апр", "Апрель"), + ("май", "Май"), + ("июн", "Июнь"), + ("июл", "Июль"), + ("авг", "Август"), + ("сен", "Сентябрь"), + ("окт", "Октябрь"), + ("ноя", "Ноябрь"), + ("дек", "Декабрь")] + + self.assertEqual(parse('10 Сентябрь 2015 10:20', + parserinfo=rus_parserinfo()), + datetime(2015, 9, 10, 10, 20)) + + def testParseWithNulls(self): + # This relies on the from __future__ import unicode_literals, because + # explicitly specifying a unicode literal is a syntax error in Py 3.2 + # May want to switch to u'...' if we ever drop Python 3.2 support. + pstring = '\x00\x00August 29, 1924' + + self.assertEqual(parse(pstring), + datetime(1924, 8, 29)) + + def testDateCommandFormat(self): + self.assertEqual(parse("Thu Sep 25 10:36:28 BRST 2003", + tzinfos=self.tzinfos), + datetime(2003, 9, 25, 10, 36, 28, + tzinfo=self.brsttz)) + + def testDateCommandFormatUnicode(self): + self.assertEqual(parse("Thu Sep 25 10:36:28 BRST 2003", + tzinfos=self.tzinfos), + datetime(2003, 9, 25, 10, 36, 28, + tzinfo=self.brsttz)) + + def testDateCommandFormatReversed(self): + self.assertEqual(parse("2003 10:36:28 BRST 25 Sep Thu", + tzinfos=self.tzinfos), + datetime(2003, 9, 25, 10, 36, 28, + tzinfo=self.brsttz)) + + def testDateCommandFormatWithLong(self): + if not PY3: + self.assertEqual(parse("Thu Sep 25 10:36:28 BRST 2003", + tzinfos={"BRST": long(-10800)}), + datetime(2003, 9, 25, 10, 36, 28, + tzinfo=self.brsttz)) + def testDateCommandFormatIgnoreTz(self): + self.assertEqual(parse("Thu Sep 25 10:36:28 BRST 2003", + ignoretz=True), + datetime(2003, 9, 25, 10, 36, 28)) + + def testDateCommandFormatStrip1(self): + self.assertEqual(parse("Thu Sep 25 10:36:28 2003"), + datetime(2003, 9, 25, 10, 36, 28)) + + def testDateCommandFormatStrip2(self): + self.assertEqual(parse("Thu Sep 25 10:36:28", default=self.default), + datetime(2003, 9, 25, 10, 36, 28)) + + def testDateCommandFormatStrip3(self): + self.assertEqual(parse("Thu Sep 10:36:28", default=self.default), + datetime(2003, 9, 25, 10, 36, 28)) + + def testDateCommandFormatStrip4(self): + self.assertEqual(parse("Thu 10:36:28", default=self.default), + datetime(2003, 9, 25, 10, 36, 28)) + + def testDateCommandFormatStrip5(self): + self.assertEqual(parse("Sep 10:36:28", default=self.default), + datetime(2003, 9, 25, 10, 36, 28)) + + def testDateCommandFormatStrip6(self): + self.assertEqual(parse("10:36:28", default=self.default), + datetime(2003, 9, 25, 10, 36, 28)) + + def testDateCommandFormatStrip7(self): + self.assertEqual(parse("10:36", default=self.default), + datetime(2003, 9, 25, 10, 36)) + + def testDateCommandFormatStrip8(self): + self.assertEqual(parse("Thu Sep 25 2003"), + datetime(2003, 9, 25)) + + def testDateCommandFormatStrip10(self): + self.assertEqual(parse("Sep 2003", default=self.default), + datetime(2003, 9, 25)) + + def testDateCommandFormatStrip11(self): + self.assertEqual(parse("Sep", default=self.default), + datetime(2003, 9, 25)) + + def testDateCommandFormatStrip12(self): + self.assertEqual(parse("2003", default=self.default), + datetime(2003, 9, 25)) + + def testDateRCommandFormat(self): + self.assertEqual(parse("Thu, 25 Sep 2003 10:49:41 -0300"), + datetime(2003, 9, 25, 10, 49, 41, + tzinfo=self.brsttz)) + + def testISOFormat(self): + self.assertEqual(parse("2003-09-25T10:49:41.5-03:00"), + datetime(2003, 9, 25, 10, 49, 41, 500000, + tzinfo=self.brsttz)) + + def testISOFormatStrip1(self): + self.assertEqual(parse("2003-09-25T10:49:41-03:00"), + datetime(2003, 9, 25, 10, 49, 41, + tzinfo=self.brsttz)) + + def testISOFormatStrip2(self): + self.assertEqual(parse("2003-09-25T10:49:41"), + datetime(2003, 9, 25, 10, 49, 41)) + + def testISOFormatStrip3(self): + self.assertEqual(parse("2003-09-25T10:49"), + datetime(2003, 9, 25, 10, 49)) + + def testISOFormatStrip4(self): + self.assertEqual(parse("2003-09-25T10"), + datetime(2003, 9, 25, 10)) + + def testISOFormatStrip5(self): + self.assertEqual(parse("2003-09-25"), + datetime(2003, 9, 25)) + + def testISOStrippedFormat(self): + self.assertEqual(parse("20030925T104941.5-0300"), + datetime(2003, 9, 25, 10, 49, 41, 500000, + tzinfo=self.brsttz)) + + def testISOStrippedFormatStrip1(self): + self.assertEqual(parse("20030925T104941-0300"), + datetime(2003, 9, 25, 10, 49, 41, + tzinfo=self.brsttz)) + + def testISOStrippedFormatStrip2(self): + self.assertEqual(parse("20030925T104941"), + datetime(2003, 9, 25, 10, 49, 41)) + + def testISOStrippedFormatStrip3(self): + self.assertEqual(parse("20030925T1049"), + datetime(2003, 9, 25, 10, 49, 0)) + + def testISOStrippedFormatStrip4(self): + self.assertEqual(parse("20030925T10"), + datetime(2003, 9, 25, 10)) + + def testISOStrippedFormatStrip5(self): + self.assertEqual(parse("20030925"), + datetime(2003, 9, 25)) + + def testPythonLoggerFormat(self): + self.assertEqual(parse("2003-09-25 10:49:41,502"), + datetime(2003, 9, 25, 10, 49, 41, 502000)) + + def testNoSeparator1(self): + self.assertEqual(parse("199709020908"), + datetime(1997, 9, 2, 9, 8)) + + def testNoSeparator2(self): + self.assertEqual(parse("19970902090807"), + datetime(1997, 9, 2, 9, 8, 7)) + + def testDateWithDash1(self): + self.assertEqual(parse("2003-09-25"), + datetime(2003, 9, 25)) + + def testDateWithDash6(self): + self.assertEqual(parse("09-25-2003"), + datetime(2003, 9, 25)) + + def testDateWithDash7(self): + self.assertEqual(parse("25-09-2003"), + datetime(2003, 9, 25)) + + def testDateWithDash8(self): + self.assertEqual(parse("10-09-2003", dayfirst=True), + datetime(2003, 9, 10)) + + def testDateWithDash9(self): + self.assertEqual(parse("10-09-2003"), + datetime(2003, 10, 9)) + + def testDateWithDash10(self): + self.assertEqual(parse("10-09-03"), + datetime(2003, 10, 9)) + + def testDateWithDash11(self): + self.assertEqual(parse("10-09-03", yearfirst=True), + datetime(2010, 9, 3)) + + def testDateWithDot1(self): + self.assertEqual(parse("2003.09.25"), + datetime(2003, 9, 25)) + + def testDateWithDot6(self): + self.assertEqual(parse("09.25.2003"), + datetime(2003, 9, 25)) + + def testDateWithDot7(self): + self.assertEqual(parse("25.09.2003"), + datetime(2003, 9, 25)) + + def testDateWithDot8(self): + self.assertEqual(parse("10.09.2003", dayfirst=True), + datetime(2003, 9, 10)) + + def testDateWithDot9(self): + self.assertEqual(parse("10.09.2003"), + datetime(2003, 10, 9)) + + def testDateWithDot10(self): + self.assertEqual(parse("10.09.03"), + datetime(2003, 10, 9)) + + def testDateWithDot11(self): + self.assertEqual(parse("10.09.03", yearfirst=True), + datetime(2010, 9, 3)) + + def testDateWithSlash1(self): + self.assertEqual(parse("2003/09/25"), + datetime(2003, 9, 25)) + + def testDateWithSlash6(self): + self.assertEqual(parse("09/25/2003"), + datetime(2003, 9, 25)) + + def testDateWithSlash7(self): + self.assertEqual(parse("25/09/2003"), + datetime(2003, 9, 25)) + + def testDateWithSlash8(self): + self.assertEqual(parse("10/09/2003", dayfirst=True), + datetime(2003, 9, 10)) + + def testDateWithSlash9(self): + self.assertEqual(parse("10/09/2003"), + datetime(2003, 10, 9)) + + def testDateWithSlash10(self): + self.assertEqual(parse("10/09/03"), + datetime(2003, 10, 9)) + + def testDateWithSlash11(self): + self.assertEqual(parse("10/09/03", yearfirst=True), + datetime(2010, 9, 3)) + + def testDateWithSpace1(self): + self.assertEqual(parse("2003 09 25"), + datetime(2003, 9, 25)) + + def testDateWithSpace6(self): + self.assertEqual(parse("09 25 2003"), + datetime(2003, 9, 25)) + + def testDateWithSpace7(self): + self.assertEqual(parse("25 09 2003"), + datetime(2003, 9, 25)) + + def testDateWithSpace8(self): + self.assertEqual(parse("10 09 2003", dayfirst=True), + datetime(2003, 9, 10)) + + def testDateWithSpace9(self): + self.assertEqual(parse("10 09 2003"), + datetime(2003, 10, 9)) + + def testDateWithSpace10(self): + self.assertEqual(parse("10 09 03"), + datetime(2003, 10, 9)) + + def testDateWithSpace11(self): + self.assertEqual(parse("10 09 03", yearfirst=True), + datetime(2010, 9, 3)) + + def testDateWithSpace12(self): + self.assertEqual(parse("25 09 03"), + datetime(2003, 9, 25)) + + def testStrangelyOrderedDate1(self): + self.assertEqual(parse("03 25 Sep"), + datetime(2003, 9, 25)) + + def testStrangelyOrderedDate3(self): + self.assertEqual(parse("25 03 Sep"), + datetime(2025, 9, 3)) + + def testHourWithLetters(self): + self.assertEqual(parse("10h36m28.5s", default=self.default), + datetime(2003, 9, 25, 10, 36, 28, 500000)) + + def testHourWithLettersStrip1(self): + self.assertEqual(parse("10h36m28s", default=self.default), + datetime(2003, 9, 25, 10, 36, 28)) + + def testHourWithLettersStrip2(self): + self.assertEqual(parse("10h36m", default=self.default), + datetime(2003, 9, 25, 10, 36)) + + def testHourWithLettersStrip3(self): + self.assertEqual(parse("10h", default=self.default), + datetime(2003, 9, 25, 10)) + + def testHourWithLettersStrip4(self): + self.assertEqual(parse("10 h 36", default=self.default), + datetime(2003, 9, 25, 10, 36)) + + def testHourWithLetterStrip5(self): + self.assertEqual(parse("10 h 36.5", default=self.default), + datetime(2003, 9, 25, 10, 36, 30)) + + def testMinuteWithLettersSpaces1(self): + self.assertEqual(parse("36 m 5", default=self.default), + datetime(2003, 9, 25, 0, 36, 5)) + + def testMinuteWithLettersSpaces2(self): + self.assertEqual(parse("36 m 5 s", default=self.default), + datetime(2003, 9, 25, 0, 36, 5)) + + def testMinuteWithLettersSpaces3(self): + self.assertEqual(parse("36 m 05", default=self.default), + datetime(2003, 9, 25, 0, 36, 5)) + + def testMinuteWithLettersSpaces4(self): + self.assertEqual(parse("36 m 05 s", default=self.default), + datetime(2003, 9, 25, 0, 36, 5)) + + def testAMPMNoHour(self): + with self.assertRaises(ValueError): + parse("AM") + + with self.assertRaises(ValueError): + parse("Jan 20, 2015 PM") + + def testHourAmPm1(self): + self.assertEqual(parse("10h am", default=self.default), + datetime(2003, 9, 25, 10)) + + def testHourAmPm2(self): + self.assertEqual(parse("10h pm", default=self.default), + datetime(2003, 9, 25, 22)) + + def testHourAmPm3(self): + self.assertEqual(parse("10am", default=self.default), + datetime(2003, 9, 25, 10)) + + def testHourAmPm4(self): + self.assertEqual(parse("10pm", default=self.default), + datetime(2003, 9, 25, 22)) + + def testHourAmPm5(self): + self.assertEqual(parse("10:00 am", default=self.default), + datetime(2003, 9, 25, 10)) + + def testHourAmPm6(self): + self.assertEqual(parse("10:00 pm", default=self.default), + datetime(2003, 9, 25, 22)) + + def testHourAmPm7(self): + self.assertEqual(parse("10:00am", default=self.default), + datetime(2003, 9, 25, 10)) + + def testHourAmPm8(self): + self.assertEqual(parse("10:00pm", default=self.default), + datetime(2003, 9, 25, 22)) + + def testHourAmPm9(self): + self.assertEqual(parse("10:00a.m", default=self.default), + datetime(2003, 9, 25, 10)) + + def testHourAmPm10(self): + self.assertEqual(parse("10:00p.m", default=self.default), + datetime(2003, 9, 25, 22)) + + def testHourAmPm11(self): + self.assertEqual(parse("10:00a.m.", default=self.default), + datetime(2003, 9, 25, 10)) + + def testHourAmPm12(self): + self.assertEqual(parse("10:00p.m.", default=self.default), + datetime(2003, 9, 25, 22)) + + def testAMPMRange(self): + with self.assertRaises(ValueError): + parse("13:44 AM") + + with self.assertRaises(ValueError): + parse("January 25, 1921 23:13 PM") + + def testPertain(self): + self.assertEqual(parse("Sep 03", default=self.default), + datetime(2003, 9, 3)) + self.assertEqual(parse("Sep of 03", default=self.default), + datetime(2003, 9, 25)) + + def testWeekdayAlone(self): + self.assertEqual(parse("Wed", default=self.default), + datetime(2003, 10, 1)) + + def testLongWeekday(self): + self.assertEqual(parse("Wednesday", default=self.default), + datetime(2003, 10, 1)) + + def testLongMonth(self): + self.assertEqual(parse("October", default=self.default), + datetime(2003, 10, 25)) + + def testZeroYear(self): + self.assertEqual(parse("31-Dec-00", default=self.default), + datetime(2000, 12, 31)) + + def testFuzzy(self): + s = "Today is 25 of September of 2003, exactly " \ + "at 10:49:41 with timezone -03:00." + self.assertEqual(parse(s, fuzzy=True), + datetime(2003, 9, 25, 10, 49, 41, + tzinfo=self.brsttz)) + + def testFuzzyWithTokens(self): + s1 = "Today is 25 of September of 2003, exactly " \ + "at 10:49:41 with timezone -03:00." + self.assertEqual(parse(s1, fuzzy_with_tokens=True), + (datetime(2003, 9, 25, 10, 49, 41, + tzinfo=self.brsttz), + ('Today is ', 'of ', ', exactly at ', + ' with timezone ', '.'))) + + s2 = "http://biz.yahoo.com/ipo/p/600221.html" + self.assertEqual(parse(s2, fuzzy_with_tokens=True), + (datetime(2060, 2, 21, 0, 0, 0), + ('http://biz.yahoo.com/ipo/p/', '.html'))) + + def testFuzzyAMPMProblem(self): + # Sometimes fuzzy parsing results in AM/PM flag being set without + # hours - if it's fuzzy it should ignore that. + s1 = "I have a meeting on March 1, 1974." + s2 = "On June 8th, 2020, I am going to be the first man on Mars" + + # Also don't want any erroneous AM or PMs changing the parsed time + s3 = "Meet me at the AM/PM on Sunset at 3:00 AM on December 3rd, 2003" + s4 = "Meet me at 3:00AM on December 3rd, 2003 at the AM/PM on Sunset" + + self.assertEqual(parse(s1, fuzzy=True), datetime(1974, 3, 1)) + self.assertEqual(parse(s2, fuzzy=True), datetime(2020, 6, 8)) + self.assertEqual(parse(s3, fuzzy=True), datetime(2003, 12, 3, 3)) + self.assertEqual(parse(s4, fuzzy=True), datetime(2003, 12, 3, 3)) + + def testFuzzyIgnoreAMPM(self): + s1 = "Jan 29, 1945 14:45 AM I going to see you there?" + with pytest.warns(UnknownTimezoneWarning): + res = parse(s1, fuzzy=True) + self.assertEqual(res, datetime(1945, 1, 29, 14, 45)) + + def testExtraSpace(self): + self.assertEqual(parse(" July 4 , 1976 12:01:02 am "), + datetime(1976, 7, 4, 0, 1, 2)) + + def testRandomFormat1(self): + self.assertEqual(parse("Wed, July 10, '96"), + datetime(1996, 7, 10, 0, 0)) + + def testRandomFormat2(self): + self.assertEqual(parse("1996.07.10 AD at 15:08:56 PDT", + ignoretz=True), + datetime(1996, 7, 10, 15, 8, 56)) + + def testRandomFormat3(self): + self.assertEqual(parse("1996.July.10 AD 12:08 PM"), + datetime(1996, 7, 10, 12, 8)) + + def testRandomFormat4(self): + self.assertEqual(parse("Tuesday, April 12, 1952 AD 3:30:42pm PST", + ignoretz=True), + datetime(1952, 4, 12, 15, 30, 42)) + + def testRandomFormat5(self): + self.assertEqual(parse("November 5, 1994, 8:15:30 am EST", + ignoretz=True), + datetime(1994, 11, 5, 8, 15, 30)) + + def testRandomFormat6(self): + self.assertEqual(parse("1994-11-05T08:15:30-05:00", + ignoretz=True), + datetime(1994, 11, 5, 8, 15, 30)) + + def testRandomFormat7(self): + self.assertEqual(parse("1994-11-05T08:15:30Z", + ignoretz=True), + datetime(1994, 11, 5, 8, 15, 30)) + + def testRandomFormat8(self): + self.assertEqual(parse("July 4, 1976"), datetime(1976, 7, 4)) + + def testRandomFormat9(self): + self.assertEqual(parse("7 4 1976"), datetime(1976, 7, 4)) + + def testRandomFormat10(self): + self.assertEqual(parse("4 jul 1976"), datetime(1976, 7, 4)) + + def testRandomFormat11(self): + self.assertEqual(parse("7-4-76"), datetime(1976, 7, 4)) + + def testRandomFormat12(self): + self.assertEqual(parse("19760704"), datetime(1976, 7, 4)) + + def testRandomFormat13(self): + self.assertEqual(parse("0:01:02", default=self.default), + datetime(2003, 9, 25, 0, 1, 2)) + + def testRandomFormat14(self): + self.assertEqual(parse("12h 01m02s am", default=self.default), + datetime(2003, 9, 25, 0, 1, 2)) + + def testRandomFormat15(self): + self.assertEqual(parse("0:01:02 on July 4, 1976"), + datetime(1976, 7, 4, 0, 1, 2)) + + def testRandomFormat16(self): + self.assertEqual(parse("0:01:02 on July 4, 1976"), + datetime(1976, 7, 4, 0, 1, 2)) + + def testRandomFormat17(self): + self.assertEqual(parse("1976-07-04T00:01:02Z", ignoretz=True), + datetime(1976, 7, 4, 0, 1, 2)) + + def testRandomFormat18(self): + self.assertEqual(parse("July 4, 1976 12:01:02 am"), + datetime(1976, 7, 4, 0, 1, 2)) + + def testRandomFormat19(self): + self.assertEqual(parse("Mon Jan 2 04:24:27 1995"), + datetime(1995, 1, 2, 4, 24, 27)) + + def testRandomFormat20(self): + self.assertEqual(parse("Tue Apr 4 00:22:12 PDT 1995", ignoretz=True), + datetime(1995, 4, 4, 0, 22, 12)) + + def testRandomFormat21(self): + self.assertEqual(parse("04.04.95 00:22"), + datetime(1995, 4, 4, 0, 22)) + + def testRandomFormat22(self): + self.assertEqual(parse("Jan 1 1999 11:23:34.578"), + datetime(1999, 1, 1, 11, 23, 34, 578000)) + + def testRandomFormat23(self): + self.assertEqual(parse("950404 122212"), + datetime(1995, 4, 4, 12, 22, 12)) + + def testRandomFormat24(self): + self.assertEqual(parse("0:00 PM, PST", default=self.default, + ignoretz=True), + datetime(2003, 9, 25, 12, 0)) + + def testRandomFormat25(self): + self.assertEqual(parse("12:08 PM", default=self.default), + datetime(2003, 9, 25, 12, 8)) + + def testRandomFormat26(self): + with pytest.warns(UnknownTimezoneWarning): + res = parse("5:50 A.M. on June 13, 1990") + + self.assertEqual(res, datetime(1990, 6, 13, 5, 50)) + + def testRandomFormat27(self): + self.assertEqual(parse("3rd of May 2001"), datetime(2001, 5, 3)) + + def testRandomFormat28(self): + self.assertEqual(parse("5th of March 2001"), datetime(2001, 3, 5)) + + def testRandomFormat29(self): + self.assertEqual(parse("1st of May 2003"), datetime(2003, 5, 1)) + + def testRandomFormat30(self): + self.assertEqual(parse("01h02m03", default=self.default), + datetime(2003, 9, 25, 1, 2, 3)) + + def testRandomFormat31(self): + self.assertEqual(parse("01h02", default=self.default), + datetime(2003, 9, 25, 1, 2)) + + def testRandomFormat32(self): + self.assertEqual(parse("01h02s", default=self.default), + datetime(2003, 9, 25, 1, 0, 2)) + + def testRandomFormat33(self): + self.assertEqual(parse("01m02", default=self.default), + datetime(2003, 9, 25, 0, 1, 2)) + + def testRandomFormat34(self): + self.assertEqual(parse("01m02h", default=self.default), + datetime(2003, 9, 25, 2, 1)) + + def testRandomFormat35(self): + self.assertEqual(parse("2004 10 Apr 11h30m", default=self.default), + datetime(2004, 4, 10, 11, 30)) + + def test_99_ad(self): + self.assertEqual(parse('0099-01-01T00:00:00'), + datetime(99, 1, 1, 0, 0)) + + def test_31_ad(self): + self.assertEqual(parse('0031-01-01T00:00:00'), + datetime(31, 1, 1, 0, 0)) + + def testInvalidDay(self): + with self.assertRaises(ValueError): + parse("Feb 30, 2007") + + def testUnspecifiedDayFallback(self): + # Test that for an unspecified day, the fallback behavior is correct. + self.assertEqual(parse("April 2009", default=datetime(2010, 1, 31)), + datetime(2009, 4, 30)) + + def testUnspecifiedDayFallbackFebNoLeapYear(self): + self.assertEqual(parse("Feb 2007", default=datetime(2010, 1, 31)), + datetime(2007, 2, 28)) + + def testUnspecifiedDayFallbackFebLeapYear(self): + self.assertEqual(parse("Feb 2008", default=datetime(2010, 1, 31)), + datetime(2008, 2, 29)) + + def testTzinfoDictionaryCouldReturnNone(self): + self.assertEqual(parse('2017-02-03 12:40 BRST', tzinfos={"BRST": None}), + datetime(2017, 2, 3, 12, 40)) + + def testTzinfosCallableCouldReturnNone(self): + self.assertEqual(parse('2017-02-03 12:40 BRST', tzinfos=lambda *args: None), + datetime(2017, 2, 3, 12, 40)) + + def testErrorType01(self): + self.assertRaises(ValueError, + parse, 'shouldfail') + + def testCorrectErrorOnFuzzyWithTokens(self): + assertRaisesRegex(self, ValueError, 'Unknown string format', + parse, '04/04/32/423', fuzzy_with_tokens=True) + assertRaisesRegex(self, ValueError, 'Unknown string format', + parse, '04/04/04 +32423', fuzzy_with_tokens=True) + assertRaisesRegex(self, ValueError, 'Unknown string format', + parse, '04/04/0d4', fuzzy_with_tokens=True) + + def testIncreasingCTime(self): + # This test will check 200 different years, every month, every day, + # every hour, every minute, every second, and every weekday, using + # a delta of more or less 1 year, 1 month, 1 day, 1 minute and + # 1 second. + delta = timedelta(days=365+31+1, seconds=1+60+60*60) + dt = datetime(1900, 1, 1, 0, 0, 0, 0) + for i in range(200): + self.assertEqual(parse(dt.ctime()), dt) + dt += delta + + def testIncreasingISOFormat(self): + delta = timedelta(days=365+31+1, seconds=1+60+60*60) + dt = datetime(1900, 1, 1, 0, 0, 0, 0) + for i in range(200): + self.assertEqual(parse(dt.isoformat()), dt) + dt += delta + + def testMicrosecondsPrecisionError(self): + # Skip found out that sad precision problem. :-( + dt1 = parse("00:11:25.01") + dt2 = parse("00:12:10.01") + self.assertEqual(dt1.microsecond, 10000) + self.assertEqual(dt2.microsecond, 10000) + + def testMicrosecondPrecisionErrorReturns(self): + # One more precision issue, discovered by Eric Brown. This should + # be the last one, as we're no longer using floating points. + for ms in [100001, 100000, 99999, 99998, + 10001, 10000, 9999, 9998, + 1001, 1000, 999, 998, + 101, 100, 99, 98]: + dt = datetime(2008, 2, 27, 21, 26, 1, ms) + self.assertEqual(parse(dt.isoformat()), dt) + + def testHighPrecisionSeconds(self): + self.assertEqual(parse("20080227T21:26:01.123456789"), + datetime(2008, 2, 27, 21, 26, 1, 123456)) + + def testCustomParserInfo(self): + # Custom parser info wasn't working, as Michael Elsdörfer discovered. + from dateutil.parser import parserinfo, parser + + class myparserinfo(parserinfo): + MONTHS = parserinfo.MONTHS[:] + MONTHS[0] = ("Foo", "Foo") + myparser = parser(myparserinfo()) + dt = myparser.parse("01/Foo/2007") + self.assertEqual(dt, datetime(2007, 1, 1)) + + def testCustomParserShortDaynames(self): + # Horacio Hoyos discovered that day names shorter than 3 characters, + # for example two letter German day name abbreviations, don't work: + # https://github.com/dateutil/dateutil/issues/343 + from dateutil.parser import parserinfo, parser + + class GermanParserInfo(parserinfo): + WEEKDAYS = [("Mo", "Montag"), + ("Di", "Dienstag"), + ("Mi", "Mittwoch"), + ("Do", "Donnerstag"), + ("Fr", "Freitag"), + ("Sa", "Samstag"), + ("So", "Sonntag")] + + myparser = parser(GermanParserInfo()) + dt = myparser.parse("Sa 21. Jan 2017") + self.assertEqual(dt, datetime(2017, 1, 21)) + + def testNoYearFirstNoDayFirst(self): + dtstr = '090107' + + # Should be MMDDYY + self.assertEqual(parse(dtstr), + datetime(2007, 9, 1)) + + self.assertEqual(parse(dtstr, yearfirst=False, dayfirst=False), + datetime(2007, 9, 1)) + + def testYearFirst(self): + dtstr = '090107' + + # Should be MMDDYY + self.assertEqual(parse(dtstr, yearfirst=True), + datetime(2009, 1, 7)) + + self.assertEqual(parse(dtstr, yearfirst=True, dayfirst=False), + datetime(2009, 1, 7)) + + def testDayFirst(self): + dtstr = '090107' + + # Should be DDMMYY + self.assertEqual(parse(dtstr, dayfirst=True), + datetime(2007, 1, 9)) + + self.assertEqual(parse(dtstr, yearfirst=False, dayfirst=True), + datetime(2007, 1, 9)) + + def testDayFirstYearFirst(self): + dtstr = '090107' + # Should be YYDDMM + self.assertEqual(parse(dtstr, yearfirst=True, dayfirst=True), + datetime(2009, 7, 1)) + + def testUnambiguousYearFirst(self): + dtstr = '2015 09 25' + self.assertEqual(parse(dtstr, yearfirst=True), + datetime(2015, 9, 25)) + + def testUnambiguousDayFirst(self): + dtstr = '2015 09 25' + self.assertEqual(parse(dtstr, dayfirst=True), + datetime(2015, 9, 25)) + + def testUnambiguousDayFirstYearFirst(self): + dtstr = '2015 09 25' + self.assertEqual(parse(dtstr, dayfirst=True, yearfirst=True), + datetime(2015, 9, 25)) + + def test_mstridx(self): + # See GH408 + dtstr = '2015-15-May' + self.assertEqual(parse(dtstr), + datetime(2015, 5, 15)) + + def test_idx_check(self): + dtstr = '2017-07-17 06:15:' + # Pre-PR, the trailing colon will cause an IndexError at 824-825 + # when checking `i < len_l` and then accessing `l[i+1]` + res = parse(dtstr, fuzzy=True) + self.assertEqual(res, datetime(2017, 7, 17, 6, 15)) + + def test_dBY(self): + # See GH360 + dtstr = '13NOV2017' + res = parse(dtstr) + self.assertEqual(res, datetime(2017, 11, 13)) + + def test_hmBY(self): + # See GH#483 + dtstr = '02:17NOV2017' + res = parse(dtstr, default=self.default) + self.assertEqual(res, datetime(2017, 11, self.default.day, 2, 17)) + + def test_validate_hour(self): + # See GH353 + invalid = "201A-01-01T23:58:39.239769+03:00" + with self.assertRaises(ValueError): + parse(invalid) + + def test_era_trailing_year(self): + dstr = 'AD2001' + res = parse(dstr) + assert res.year == 2001, res + + def test_pre_12_year_same_month(self): + # See GH PR #293 + dtstr = '0003-03-04' + assert parse(dtstr) == datetime(3, 3, 4) + + +class TestParseUnimplementedCases(object): + @pytest.mark.xfail + def test_somewhat_ambiguous_string(self): + # Ref: github issue #487 + # The parser is choosing the wrong part for hour + # causing datetime to raise an exception. + dtstr = '1237 PM BRST Mon Oct 30 2017' + res = parse(dtstr, tzinfo=self.tzinfos) + assert res == datetime(2017, 10, 30, 12, 37, tzinfo=self.tzinfos) + + @pytest.mark.xfail + def test_YmdH_M_S(self): + # found in nasdaq's ftp data + dstr = '1991041310:19:24' + expected = datetime(1991, 4, 13, 10, 19, 24) + res = parse(dstr) + assert res == expected, (res, expected) + + @pytest.mark.xfail + def test_first_century(self): + dstr = '0031 Nov 03' + expected = datetime(31, 11, 3) + res = parse(dstr) + assert res == expected, res + + @pytest.mark.xfail + def test_era_trailing_year_with_dots(self): + dstr = 'A.D.2001' + res = parse(dstr) + assert res.year == 2001, res + + @pytest.mark.xfail + def test_ad_nospace(self): + expected = datetime(6, 5, 19) + for dstr in [' 6AD May 19', ' 06AD May 19', + ' 006AD May 19', ' 0006AD May 19']: + res = parse(dstr) + assert res == expected, (dstr, res) + + @pytest.mark.xfail + def test_four_letter_day(self): + dstr = 'Frid Dec 30, 2016' + expected = datetime(2016, 12, 30) + res = parse(dstr) + assert res == expected + + @pytest.mark.xfail + def test_non_date_number(self): + dstr = '1,700' + with pytest.raises(ValueError): + parse(dstr) + + @pytest.mark.xfail + def test_on_era(self): + # This could be classified as an "eras" test, but the relevant part + # about this is the ` on ` + dstr = '2:15 PM on January 2nd 1973 A.D.' + expected = datetime(1973, 1, 2, 14, 15) + res = parse(dstr) + assert res == expected + + @pytest.mark.xfail + def test_extraneous_year(self): + # This was found in the wild at insidertrading.org + dstr = "2011 MARTIN CHILDREN'S IRREVOCABLE TRUST u/a/d NOVEMBER 7, 2012" + res = parse(dstr, fuzzy_with_tokens=True) + expected = datetime(2012, 11, 7) + assert res == expected + + @pytest.mark.xfail + def test_extraneous_year_tokens(self): + # This was found in the wild at insidertrading.org + # Unlike in the case above, identifying the first "2012" as the year + # would not be a problem, but infering that the latter 2012 is hhmm + # is a problem. + dstr = "2012 MARTIN CHILDREN'S IRREVOCABLE TRUST u/a/d NOVEMBER 7, 2012" + expected = datetime(2012, 11, 7) + (res, tokens) = parse(dstr, fuzzy_with_tokens=True) + assert res == expected + assert tokens == ("2012 MARTIN CHILDREN'S IRREVOCABLE TRUST u/a/d ",) + + @pytest.mark.xfail + def test_extraneous_year2(self): + # This was found in the wild at insidertrading.org + dstr = ("Berylson Amy Smith 1998 Grantor Retained Annuity Trust " + "u/d/t November 2, 1998 f/b/o Jennifer L Berylson") + res = parse(dstr, fuzzy_with_tokens=True) + expected = datetime(1998, 11, 2) + assert res == expected + + @pytest.mark.xfail + def test_extraneous_year3(self): + # This was found in the wild at insidertrading.org + dstr = "SMITH R & WEISS D 94 CHILD TR FBO M W SMITH UDT 12/1/1994" + res = parse(dstr, fuzzy_with_tokens=True) + expected = datetime(1994, 12, 1) + assert res == expected + + @pytest.mark.xfail + def test_unambiguous_YYYYMM(self): + # 171206 can be parsed as YYMMDD. However, 201712 cannot be parsed + # as instance of YYMMDD and parser could fallback to YYYYMM format. + dstr = "201712" + res = parse(dstr) + expected = datetime(2017, 12, 1) + assert res == expected + +@pytest.mark.skipif(IS_WIN, reason='Windows does not use TZ var') +def test_parse_unambiguous_nonexistent_local(): + # When dates are specified "EST" even when they should be "EDT" in the + # local time zone, we should still assign the local time zone + with TZEnvContext('EST+5EDT,M3.2.0/2,M11.1.0/2'): + dt_exp = datetime(2011, 8, 1, 12, 30, tzinfo=tz.tzlocal()) + dt = parse('2011-08-01T12:30 EST') + + assert dt.tzname() == 'EDT' + assert dt == dt_exp + + +@pytest.mark.skipif(IS_WIN, reason='Windows does not use TZ var') +def test_tzlocal_in_gmt(): + # GH #318 + with TZEnvContext('GMT0BST,M3.5.0,M10.5.0'): + # This is an imaginary datetime in tz.tzlocal() but should still + # parse using the GMT-as-alias-for-UTC rule + dt = parse('2004-05-01T12:00 GMT') + dt_exp = datetime(2004, 5, 1, 12, tzinfo=tz.tzutc()) + + assert dt == dt_exp + + +@pytest.mark.skipif(IS_WIN, reason='Windows does not use TZ var') +def test_tzlocal_parse_fold(): + # One manifestion of GH #318 + with TZEnvContext('EST+5EDT,M3.2.0/2,M11.1.0/2'): + dt_exp = datetime(2011, 11, 6, 1, 30, tzinfo=tz.tzlocal()) + dt_exp = tz.enfold(dt_exp, fold=1) + dt = parse('2011-11-06T01:30 EST') + + # Because this is ambiguous, kuntil `tz.tzlocal() is tz.tzlocal()` + # we'll just check the attributes we care about rather than + # dt == dt_exp + assert dt.tzname() == dt_exp.tzname() + assert dt.replace(tzinfo=None) == dt_exp.replace(tzinfo=None) + assert getattr(dt, 'fold') == getattr(dt_exp, 'fold') + assert dt.astimezone(tz.tzutc()) == dt_exp.astimezone(tz.tzutc()) + + +def test_parse_tzinfos_fold(): + NYC = tz.gettz('America/New_York') + tzinfos = {'EST': NYC, 'EDT': NYC} + + dt_exp = tz.enfold(datetime(2011, 11, 6, 1, 30, tzinfo=NYC), fold=1) + dt = parse('2011-11-06T01:30 EST', tzinfos=tzinfos) + + assert dt == dt_exp + assert dt.tzinfo is dt_exp.tzinfo + assert getattr(dt, 'fold') == getattr(dt_exp, 'fold') + assert dt.astimezone(tz.tzutc()) == dt_exp.astimezone(tz.tzutc()) + + +@pytest.mark.parametrize('dtstr,dt', [ + ('5.6h', datetime(2003, 9, 25, 5, 36)), + ('5.6m', datetime(2003, 9, 25, 0, 5, 36)), + # '5.6s' never had a rounding problem, test added for completeness + ('5.6s', datetime(2003, 9, 25, 0, 0, 5, 600000)) +]) +def test_rounding_floatlike_strings(dtstr, dt): + assert parse(dtstr, default=datetime(2003, 9, 25)) == dt + + +@pytest.mark.parametrize('value', ['1: test', 'Nan']) +def test_decimal_error(value): + # GH 632, GH 662 - decimal.Decimal raises some non-ValueError exception when + # constructed with an invalid value + with pytest.raises(ValueError): + parse(value) + + +def test_BYd_corner_case(): + # GH#687 + res = parse('December.0031.30') + assert res == datetime(31, 12, 30) diff --git a/ext/dateutil/test/test_relativedelta.py b/ext/dateutil/test/test_relativedelta.py new file mode 100644 index 0000000000..70cb543a4b --- /dev/null +++ b/ext/dateutil/test/test_relativedelta.py @@ -0,0 +1,678 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals +from ._common import WarningTestMixin, NotAValue + +import calendar +from datetime import datetime, date, timedelta +import unittest + +from dateutil.relativedelta import relativedelta, MO, TU, WE, FR, SU + + +class RelativeDeltaTest(WarningTestMixin, unittest.TestCase): + now = datetime(2003, 9, 17, 20, 54, 47, 282310) + today = date(2003, 9, 17) + + def testInheritance(self): + # Ensure that relativedelta is inheritance-friendly. + class rdChildClass(relativedelta): + pass + + ccRD = rdChildClass(years=1, months=1, days=1, leapdays=1, weeks=1, + hours=1, minutes=1, seconds=1, microseconds=1) + + rd = relativedelta(years=1, months=1, days=1, leapdays=1, weeks=1, + hours=1, minutes=1, seconds=1, microseconds=1) + + self.assertEqual(type(ccRD + rd), type(ccRD), + msg='Addition does not inherit type.') + + self.assertEqual(type(ccRD - rd), type(ccRD), + msg='Subtraction does not inherit type.') + + self.assertEqual(type(-ccRD), type(ccRD), + msg='Negation does not inherit type.') + + self.assertEqual(type(ccRD * 5.0), type(ccRD), + msg='Multiplication does not inherit type.') + + self.assertEqual(type(ccRD / 5.0), type(ccRD), + msg='Division does not inherit type.') + + def testMonthEndMonthBeginning(self): + self.assertEqual(relativedelta(datetime(2003, 1, 31, 23, 59, 59), + datetime(2003, 3, 1, 0, 0, 0)), + relativedelta(months=-1, seconds=-1)) + + self.assertEqual(relativedelta(datetime(2003, 3, 1, 0, 0, 0), + datetime(2003, 1, 31, 23, 59, 59)), + relativedelta(months=1, seconds=1)) + + def testMonthEndMonthBeginningLeapYear(self): + self.assertEqual(relativedelta(datetime(2012, 1, 31, 23, 59, 59), + datetime(2012, 3, 1, 0, 0, 0)), + relativedelta(months=-1, seconds=-1)) + + self.assertEqual(relativedelta(datetime(2003, 3, 1, 0, 0, 0), + datetime(2003, 1, 31, 23, 59, 59)), + relativedelta(months=1, seconds=1)) + + def testNextMonth(self): + self.assertEqual(self.now+relativedelta(months=+1), + datetime(2003, 10, 17, 20, 54, 47, 282310)) + + def testNextMonthPlusOneWeek(self): + self.assertEqual(self.now+relativedelta(months=+1, weeks=+1), + datetime(2003, 10, 24, 20, 54, 47, 282310)) + + def testNextMonthPlusOneWeek10am(self): + self.assertEqual(self.today + + relativedelta(months=+1, weeks=+1, hour=10), + datetime(2003, 10, 24, 10, 0)) + + def testNextMonthPlusOneWeek10amDiff(self): + self.assertEqual(relativedelta(datetime(2003, 10, 24, 10, 0), + self.today), + relativedelta(months=+1, days=+7, hours=+10)) + + def testOneMonthBeforeOneYear(self): + self.assertEqual(self.now+relativedelta(years=+1, months=-1), + datetime(2004, 8, 17, 20, 54, 47, 282310)) + + def testMonthsOfDiffNumOfDays(self): + self.assertEqual(date(2003, 1, 27)+relativedelta(months=+1), + date(2003, 2, 27)) + self.assertEqual(date(2003, 1, 31)+relativedelta(months=+1), + date(2003, 2, 28)) + self.assertEqual(date(2003, 1, 31)+relativedelta(months=+2), + date(2003, 3, 31)) + + def testMonthsOfDiffNumOfDaysWithYears(self): + self.assertEqual(date(2000, 2, 28)+relativedelta(years=+1), + date(2001, 2, 28)) + self.assertEqual(date(2000, 2, 29)+relativedelta(years=+1), + date(2001, 2, 28)) + + self.assertEqual(date(1999, 2, 28)+relativedelta(years=+1), + date(2000, 2, 28)) + self.assertEqual(date(1999, 3, 1)+relativedelta(years=+1), + date(2000, 3, 1)) + self.assertEqual(date(1999, 3, 1)+relativedelta(years=+1), + date(2000, 3, 1)) + + self.assertEqual(date(2001, 2, 28)+relativedelta(years=-1), + date(2000, 2, 28)) + self.assertEqual(date(2001, 3, 1)+relativedelta(years=-1), + date(2000, 3, 1)) + + def testNextFriday(self): + self.assertEqual(self.today+relativedelta(weekday=FR), + date(2003, 9, 19)) + + def testNextFridayInt(self): + self.assertEqual(self.today+relativedelta(weekday=calendar.FRIDAY), + date(2003, 9, 19)) + + def testLastFridayInThisMonth(self): + self.assertEqual(self.today+relativedelta(day=31, weekday=FR(-1)), + date(2003, 9, 26)) + + def testNextWednesdayIsToday(self): + self.assertEqual(self.today+relativedelta(weekday=WE), + date(2003, 9, 17)) + + def testNextWenesdayNotToday(self): + self.assertEqual(self.today+relativedelta(days=+1, weekday=WE), + date(2003, 9, 24)) + + def test15thISOYearWeek(self): + self.assertEqual(date(2003, 1, 1) + + relativedelta(day=4, weeks=+14, weekday=MO(-1)), + date(2003, 4, 7)) + + def testMillenniumAge(self): + self.assertEqual(relativedelta(self.now, date(2001, 1, 1)), + relativedelta(years=+2, months=+8, days=+16, + hours=+20, minutes=+54, seconds=+47, + microseconds=+282310)) + + def testJohnAge(self): + self.assertEqual(relativedelta(self.now, + datetime(1978, 4, 5, 12, 0)), + relativedelta(years=+25, months=+5, days=+12, + hours=+8, minutes=+54, seconds=+47, + microseconds=+282310)) + + def testJohnAgeWithDate(self): + self.assertEqual(relativedelta(self.today, + datetime(1978, 4, 5, 12, 0)), + relativedelta(years=+25, months=+5, days=+11, + hours=+12)) + + def testYearDay(self): + self.assertEqual(date(2003, 1, 1)+relativedelta(yearday=260), + date(2003, 9, 17)) + self.assertEqual(date(2002, 1, 1)+relativedelta(yearday=260), + date(2002, 9, 17)) + self.assertEqual(date(2000, 1, 1)+relativedelta(yearday=260), + date(2000, 9, 16)) + self.assertEqual(self.today+relativedelta(yearday=261), + date(2003, 9, 18)) + + def testYearDayBug(self): + # Tests a problem reported by Adam Ryan. + self.assertEqual(date(2010, 1, 1)+relativedelta(yearday=15), + date(2010, 1, 15)) + + def testNonLeapYearDay(self): + self.assertEqual(date(2003, 1, 1)+relativedelta(nlyearday=260), + date(2003, 9, 17)) + self.assertEqual(date(2002, 1, 1)+relativedelta(nlyearday=260), + date(2002, 9, 17)) + self.assertEqual(date(2000, 1, 1)+relativedelta(nlyearday=260), + date(2000, 9, 17)) + self.assertEqual(self.today+relativedelta(yearday=261), + date(2003, 9, 18)) + + def testAddition(self): + self.assertEqual(relativedelta(days=10) + + relativedelta(years=1, months=2, days=3, hours=4, + minutes=5, microseconds=6), + relativedelta(years=1, months=2, days=13, hours=4, + minutes=5, microseconds=6)) + + def testAbsoluteAddition(self): + self.assertEqual(relativedelta() + relativedelta(day=0, hour=0), + relativedelta(day=0, hour=0)) + self.assertEqual(relativedelta(day=0, hour=0) + relativedelta(), + relativedelta(day=0, hour=0)) + + def testAdditionToDatetime(self): + self.assertEqual(datetime(2000, 1, 1) + relativedelta(days=1), + datetime(2000, 1, 2)) + + def testRightAdditionToDatetime(self): + self.assertEqual(relativedelta(days=1) + datetime(2000, 1, 1), + datetime(2000, 1, 2)) + + def testAdditionInvalidType(self): + with self.assertRaises(TypeError): + relativedelta(days=3) + 9 + + def testAdditionUnsupportedType(self): + # For unsupported types that define their own comparators, etc. + self.assertIs(relativedelta(days=1) + NotAValue, NotAValue) + + def testAdditionFloatValue(self): + self.assertEqual(datetime(2000, 1, 1) + relativedelta(days=float(1)), + datetime(2000, 1, 2)) + self.assertEqual(datetime(2000, 1, 1) + relativedelta(months=float(1)), + datetime(2000, 2, 1)) + self.assertEqual(datetime(2000, 1, 1) + relativedelta(years=float(1)), + datetime(2001, 1, 1)) + + def testAdditionFloatFractionals(self): + self.assertEqual(datetime(2000, 1, 1, 0) + + relativedelta(days=float(0.5)), + datetime(2000, 1, 1, 12)) + self.assertEqual(datetime(2000, 1, 1, 0, 0) + + relativedelta(hours=float(0.5)), + datetime(2000, 1, 1, 0, 30)) + self.assertEqual(datetime(2000, 1, 1, 0, 0, 0) + + relativedelta(minutes=float(0.5)), + datetime(2000, 1, 1, 0, 0, 30)) + self.assertEqual(datetime(2000, 1, 1, 0, 0, 0, 0) + + relativedelta(seconds=float(0.5)), + datetime(2000, 1, 1, 0, 0, 0, 500000)) + self.assertEqual(datetime(2000, 1, 1, 0, 0, 0, 0) + + relativedelta(microseconds=float(500000.25)), + datetime(2000, 1, 1, 0, 0, 0, 500000)) + + def testSubtraction(self): + self.assertEqual(relativedelta(days=10) - + relativedelta(years=1, months=2, days=3, hours=4, + minutes=5, microseconds=6), + relativedelta(years=-1, months=-2, days=7, hours=-4, + minutes=-5, microseconds=-6)) + + def testRightSubtractionFromDatetime(self): + self.assertEqual(datetime(2000, 1, 2) - relativedelta(days=1), + datetime(2000, 1, 1)) + + def testSubractionWithDatetime(self): + self.assertRaises(TypeError, lambda x, y: x - y, + (relativedelta(days=1), datetime(2000, 1, 1))) + + def testSubtractionInvalidType(self): + with self.assertRaises(TypeError): + relativedelta(hours=12) - 14 + + def testSubtractionUnsupportedType(self): + self.assertIs(relativedelta(days=1) + NotAValue, NotAValue) + + def testMultiplication(self): + self.assertEqual(datetime(2000, 1, 1) + relativedelta(days=1) * 28, + datetime(2000, 1, 29)) + self.assertEqual(datetime(2000, 1, 1) + 28 * relativedelta(days=1), + datetime(2000, 1, 29)) + + def testMultiplicationUnsupportedType(self): + self.assertIs(relativedelta(days=1) * NotAValue, NotAValue) + + def testDivision(self): + self.assertEqual(datetime(2000, 1, 1) + relativedelta(days=28) / 28, + datetime(2000, 1, 2)) + + def testDivisionUnsupportedType(self): + self.assertIs(relativedelta(days=1) / NotAValue, NotAValue) + + def testBoolean(self): + self.assertFalse(relativedelta(days=0)) + self.assertTrue(relativedelta(days=1)) + + def testAbsoluteValueNegative(self): + rd_base = relativedelta(years=-1, months=-5, days=-2, hours=-3, + minutes=-5, seconds=-2, microseconds=-12) + rd_expected = relativedelta(years=1, months=5, days=2, hours=3, + minutes=5, seconds=2, microseconds=12) + self.assertEqual(abs(rd_base), rd_expected) + + def testAbsoluteValuePositive(self): + rd_base = relativedelta(years=1, months=5, days=2, hours=3, + minutes=5, seconds=2, microseconds=12) + rd_expected = rd_base + + self.assertEqual(abs(rd_base), rd_expected) + + def testComparison(self): + d1 = relativedelta(years=1, months=1, days=1, leapdays=0, hours=1, + minutes=1, seconds=1, microseconds=1) + d2 = relativedelta(years=1, months=1, days=1, leapdays=0, hours=1, + minutes=1, seconds=1, microseconds=1) + d3 = relativedelta(years=1, months=1, days=1, leapdays=0, hours=1, + minutes=1, seconds=1, microseconds=2) + + self.assertEqual(d1, d2) + self.assertNotEqual(d1, d3) + + def testInequalityTypeMismatch(self): + # Different type + self.assertFalse(relativedelta(year=1) == 19) + + def testInequalityUnsupportedType(self): + self.assertIs(relativedelta(hours=3) == NotAValue, NotAValue) + + def testInequalityWeekdays(self): + # Different weekdays + no_wday = relativedelta(year=1997, month=4) + wday_mo_1 = relativedelta(year=1997, month=4, weekday=MO(+1)) + wday_mo_2 = relativedelta(year=1997, month=4, weekday=MO(+2)) + wday_tu = relativedelta(year=1997, month=4, weekday=TU) + + self.assertTrue(wday_mo_1 == wday_mo_1) + + self.assertFalse(no_wday == wday_mo_1) + self.assertFalse(wday_mo_1 == no_wday) + + self.assertFalse(wday_mo_1 == wday_mo_2) + self.assertFalse(wday_mo_2 == wday_mo_1) + + self.assertFalse(wday_mo_1 == wday_tu) + self.assertFalse(wday_tu == wday_mo_1) + + def testMonthOverflow(self): + self.assertEqual(relativedelta(months=273), + relativedelta(years=22, months=9)) + + def testWeeks(self): + # Test that the weeks property is working properly. + rd = relativedelta(years=4, months=2, weeks=8, days=6) + self.assertEqual((rd.weeks, rd.days), (8, 8 * 7 + 6)) + + rd.weeks = 3 + self.assertEqual((rd.weeks, rd.days), (3, 3 * 7 + 6)) + + def testRelativeDeltaRepr(self): + self.assertEqual(repr(relativedelta(years=1, months=-1, days=15)), + 'relativedelta(years=+1, months=-1, days=+15)') + + self.assertEqual(repr(relativedelta(months=14, seconds=-25)), + 'relativedelta(years=+1, months=+2, seconds=-25)') + + self.assertEqual(repr(relativedelta(month=3, hour=3, weekday=SU(3))), + 'relativedelta(month=3, weekday=SU(+3), hour=3)') + + def testRelativeDeltaFractionalYear(self): + with self.assertRaises(ValueError): + relativedelta(years=1.5) + + def testRelativeDeltaFractionalMonth(self): + with self.assertRaises(ValueError): + relativedelta(months=1.5) + + def testRelativeDeltaFractionalAbsolutes(self): + # Fractional absolute values will soon be unsupported, + # check for the deprecation warning. + with self.assertWarns(DeprecationWarning): + relativedelta(year=2.86) + + with self.assertWarns(DeprecationWarning): + relativedelta(month=1.29) + + with self.assertWarns(DeprecationWarning): + relativedelta(day=0.44) + + with self.assertWarns(DeprecationWarning): + relativedelta(hour=23.98) + + with self.assertWarns(DeprecationWarning): + relativedelta(minute=45.21) + + with self.assertWarns(DeprecationWarning): + relativedelta(second=13.2) + + with self.assertWarns(DeprecationWarning): + relativedelta(microsecond=157221.93) + + def testRelativeDeltaFractionalRepr(self): + rd = relativedelta(years=3, months=-2, days=1.25) + + self.assertEqual(repr(rd), + 'relativedelta(years=+3, months=-2, days=+1.25)') + + rd = relativedelta(hours=0.5, seconds=9.22) + self.assertEqual(repr(rd), + 'relativedelta(hours=+0.5, seconds=+9.22)') + + def testRelativeDeltaFractionalWeeks(self): + # Equivalent to days=8, hours=18 + rd = relativedelta(weeks=1.25) + d1 = datetime(2009, 9, 3, 0, 0) + self.assertEqual(d1 + rd, + datetime(2009, 9, 11, 18)) + + def testRelativeDeltaFractionalDays(self): + rd1 = relativedelta(days=1.48) + + d1 = datetime(2009, 9, 3, 0, 0) + self.assertEqual(d1 + rd1, + datetime(2009, 9, 4, 11, 31, 12)) + + rd2 = relativedelta(days=1.5) + self.assertEqual(d1 + rd2, + datetime(2009, 9, 4, 12, 0, 0)) + + def testRelativeDeltaFractionalHours(self): + rd = relativedelta(days=1, hours=12.5) + d1 = datetime(2009, 9, 3, 0, 0) + self.assertEqual(d1 + rd, + datetime(2009, 9, 4, 12, 30, 0)) + + def testRelativeDeltaFractionalMinutes(self): + rd = relativedelta(hours=1, minutes=30.5) + d1 = datetime(2009, 9, 3, 0, 0) + self.assertEqual(d1 + rd, + datetime(2009, 9, 3, 1, 30, 30)) + + def testRelativeDeltaFractionalSeconds(self): + rd = relativedelta(hours=5, minutes=30, seconds=30.5) + d1 = datetime(2009, 9, 3, 0, 0) + self.assertEqual(d1 + rd, + datetime(2009, 9, 3, 5, 30, 30, 500000)) + + def testRelativeDeltaFractionalPositiveOverflow(self): + # Equivalent to (days=1, hours=14) + rd1 = relativedelta(days=1.5, hours=2) + d1 = datetime(2009, 9, 3, 0, 0) + self.assertEqual(d1 + rd1, + datetime(2009, 9, 4, 14, 0, 0)) + + # Equivalent to (days=1, hours=14, minutes=45) + rd2 = relativedelta(days=1.5, hours=2.5, minutes=15) + d1 = datetime(2009, 9, 3, 0, 0) + self.assertEqual(d1 + rd2, + datetime(2009, 9, 4, 14, 45)) + + # Carry back up - equivalent to (days=2, hours=2, minutes=0, seconds=1) + rd3 = relativedelta(days=1.5, hours=13, minutes=59.5, seconds=31) + self.assertEqual(d1 + rd3, + datetime(2009, 9, 5, 2, 0, 1)) + + def testRelativeDeltaFractionalNegativeDays(self): + # Equivalent to (days=-1, hours=-1) + rd1 = relativedelta(days=-1.5, hours=11) + d1 = datetime(2009, 9, 3, 12, 0) + self.assertEqual(d1 + rd1, + datetime(2009, 9, 2, 11, 0, 0)) + + # Equivalent to (days=-1, hours=-9) + rd2 = relativedelta(days=-1.25, hours=-3) + self.assertEqual(d1 + rd2, + datetime(2009, 9, 2, 3)) + + def testRelativeDeltaNormalizeFractionalDays(self): + # Equivalent to (days=2, hours=18) + rd1 = relativedelta(days=2.75) + + self.assertEqual(rd1.normalized(), relativedelta(days=2, hours=18)) + + # Equvalent to (days=1, hours=11, minutes=31, seconds=12) + rd2 = relativedelta(days=1.48) + + self.assertEqual(rd2.normalized(), + relativedelta(days=1, hours=11, minutes=31, seconds=12)) + + def testRelativeDeltaNormalizeFractionalDays2(self): + # Equivalent to (hours=1, minutes=30) + rd1 = relativedelta(hours=1.5) + + self.assertEqual(rd1.normalized(), relativedelta(hours=1, minutes=30)) + + # Equivalent to (hours=3, minutes=17, seconds=5, microseconds=100) + rd2 = relativedelta(hours=3.28472225) + + self.assertEqual(rd2.normalized(), + relativedelta(hours=3, minutes=17, seconds=5, microseconds=100)) + + def testRelativeDeltaNormalizeFractionalMinutes(self): + # Equivalent to (minutes=15, seconds=36) + rd1 = relativedelta(minutes=15.6) + + self.assertEqual(rd1.normalized(), + relativedelta(minutes=15, seconds=36)) + + # Equivalent to (minutes=25, seconds=20, microseconds=25000) + rd2 = relativedelta(minutes=25.33375) + + self.assertEqual(rd2.normalized(), + relativedelta(minutes=25, seconds=20, microseconds=25000)) + + def testRelativeDeltaNormalizeFractionalSeconds(self): + # Equivalent to (seconds=45, microseconds=25000) + rd1 = relativedelta(seconds=45.025) + self.assertEqual(rd1.normalized(), + relativedelta(seconds=45, microseconds=25000)) + + def testRelativeDeltaFractionalPositiveOverflow2(self): + # Equivalent to (days=1, hours=14) + rd1 = relativedelta(days=1.5, hours=2) + self.assertEqual(rd1.normalized(), + relativedelta(days=1, hours=14)) + + # Equivalent to (days=1, hours=14, minutes=45) + rd2 = relativedelta(days=1.5, hours=2.5, minutes=15) + self.assertEqual(rd2.normalized(), + relativedelta(days=1, hours=14, minutes=45)) + + # Carry back up - equivalent to: + # (days=2, hours=2, minutes=0, seconds=2, microseconds=3) + rd3 = relativedelta(days=1.5, hours=13, minutes=59.50045, + seconds=31.473, microseconds=500003) + self.assertEqual(rd3.normalized(), + relativedelta(days=2, hours=2, minutes=0, + seconds=2, microseconds=3)) + + def testRelativeDeltaFractionalNegativeOverflow(self): + # Equivalent to (days=-1) + rd1 = relativedelta(days=-0.5, hours=-12) + self.assertEqual(rd1.normalized(), + relativedelta(days=-1)) + + # Equivalent to (days=-1) + rd2 = relativedelta(days=-1.5, hours=12) + self.assertEqual(rd2.normalized(), + relativedelta(days=-1)) + + # Equivalent to (days=-1, hours=-14, minutes=-45) + rd3 = relativedelta(days=-1.5, hours=-2.5, minutes=-15) + self.assertEqual(rd3.normalized(), + relativedelta(days=-1, hours=-14, minutes=-45)) + + # Equivalent to (days=-1, hours=-14, minutes=+15) + rd4 = relativedelta(days=-1.5, hours=-2.5, minutes=45) + self.assertEqual(rd4.normalized(), + relativedelta(days=-1, hours=-14, minutes=+15)) + + # Carry back up - equivalent to: + # (days=-2, hours=-2, minutes=0, seconds=-2, microseconds=-3) + rd3 = relativedelta(days=-1.5, hours=-13, minutes=-59.50045, + seconds=-31.473, microseconds=-500003) + self.assertEqual(rd3.normalized(), + relativedelta(days=-2, hours=-2, minutes=0, + seconds=-2, microseconds=-3)) + + def testInvalidYearDay(self): + with self.assertRaises(ValueError): + relativedelta(yearday=367) + + def testAddTimedeltaToUnpopulatedRelativedelta(self): + td = timedelta( + days=1, + seconds=1, + microseconds=1, + milliseconds=1, + minutes=1, + hours=1, + weeks=1 + ) + + expected = relativedelta( + weeks=1, + days=1, + hours=1, + minutes=1, + seconds=1, + microseconds=1001 + ) + + self.assertEqual(expected, relativedelta() + td) + + def testAddTimedeltaToPopulatedRelativeDelta(self): + td = timedelta( + days=1, + seconds=1, + microseconds=1, + milliseconds=1, + minutes=1, + hours=1, + weeks=1 + ) + + rd = relativedelta( + year=1, + month=1, + day=1, + hour=1, + minute=1, + second=1, + microsecond=1, + years=1, + months=1, + days=1, + weeks=1, + hours=1, + minutes=1, + seconds=1, + microseconds=1 + ) + + expected = relativedelta( + year=1, + month=1, + day=1, + hour=1, + minute=1, + second=1, + microsecond=1, + years=1, + months=1, + weeks=2, + days=2, + hours=2, + minutes=2, + seconds=2, + microseconds=1002, + ) + + self.assertEqual(expected, rd + td) + + def testHashable(self): + try: + {relativedelta(minute=1): 'test'} + except: + self.fail("relativedelta() failed to hash!") + + +class RelativeDeltaWeeksPropertyGetterTest(unittest.TestCase): + """Test the weeks property getter""" + + def test_one_day(self): + rd = relativedelta(days=1) + self.assertEqual(rd.days, 1) + self.assertEqual(rd.weeks, 0) + + def test_minus_one_day(self): + rd = relativedelta(days=-1) + self.assertEqual(rd.days, -1) + self.assertEqual(rd.weeks, 0) + + def test_height_days(self): + rd = relativedelta(days=8) + self.assertEqual(rd.days, 8) + self.assertEqual(rd.weeks, 1) + + def test_minus_height_days(self): + rd = relativedelta(days=-8) + self.assertEqual(rd.days, -8) + self.assertEqual(rd.weeks, -1) + + +class RelativeDeltaWeeksPropertySetterTest(unittest.TestCase): + """Test the weeks setter which makes a "smart" update of the days attribute""" + + def test_one_day_set_one_week(self): + rd = relativedelta(days=1) + rd.weeks = 1 # add 7 days + self.assertEqual(rd.days, 8) + self.assertEqual(rd.weeks, 1) + + def test_minus_one_day_set_one_week(self): + rd = relativedelta(days=-1) + rd.weeks = 1 # add 7 days + self.assertEqual(rd.days, 6) + self.assertEqual(rd.weeks, 0) + + def test_height_days_set_minus_one_week(self): + rd = relativedelta(days=8) + rd.weeks = -1 # change from 1 week, 1 day to -1 week, 1 day + self.assertEqual(rd.days, -6) + self.assertEqual(rd.weeks, 0) + + def test_minus_height_days_set_minus_one_week(self): + rd = relativedelta(days=-8) + rd.weeks = -1 # does not change anything + self.assertEqual(rd.days, -8) + self.assertEqual(rd.weeks, -1) + + +# vim:ts=4:sw=4:et diff --git a/ext/dateutil/test/test_rrule.py b/ext/dateutil/test/test_rrule.py new file mode 100644 index 0000000000..cd08ce29dd --- /dev/null +++ b/ext/dateutil/test/test_rrule.py @@ -0,0 +1,4842 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals +from ._common import WarningTestMixin + +from datetime import datetime, date +import unittest +from six import PY3 + +from dateutil import tz +from dateutil.rrule import ( + rrule, rruleset, rrulestr, + YEARLY, MONTHLY, WEEKLY, DAILY, + HOURLY, MINUTELY, SECONDLY, + MO, TU, WE, TH, FR, SA, SU +) + +from freezegun import freeze_time + +import pytest + + +@pytest.mark.rrule +class RRuleTest(WarningTestMixin, unittest.TestCase): + def _rrulestr_reverse_test(self, rule): + """ + Call with an `rrule` and it will test that `str(rrule)` generates a + string which generates the same `rrule` as the input when passed to + `rrulestr()` + """ + rr_str = str(rule) + rrulestr_rrule = rrulestr(rr_str) + + self.assertEqual(list(rule), list(rrulestr_rrule)) + + def testStrAppendRRULEToken(self): + # `_rrulestr_reverse_test` does not check if the "RRULE:" prefix + # property is appended properly, so give it a dedicated test + self.assertEqual(str(rrule(YEARLY, + count=5, + dtstart=datetime(1997, 9, 2, 9, 0))), + "DTSTART:19970902T090000\n" + "RRULE:FREQ=YEARLY;COUNT=5") + + rr_str = ( + 'DTSTART:19970105T083000\nRRULE:FREQ=YEARLY;INTERVAL=2' + ) + self.assertEqual(str(rrulestr(rr_str)), rr_str) + + def testYearly(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1998, 9, 2, 9, 0), + datetime(1999, 9, 2, 9, 0)]) + + def testYearlyInterval(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + interval=2, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1999, 9, 2, 9, 0), + datetime(2001, 9, 2, 9, 0)]) + + def testYearlyIntervalLarge(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + interval=100, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(2097, 9, 2, 9, 0), + datetime(2197, 9, 2, 9, 0)]) + + def testYearlyByMonth(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + bymonth=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 2, 9, 0), + datetime(1998, 3, 2, 9, 0), + datetime(1999, 1, 2, 9, 0)]) + + def testYearlyByMonthDay(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + bymonthday=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 3, 9, 0), + datetime(1997, 10, 1, 9, 0), + datetime(1997, 10, 3, 9, 0)]) + + def testYearlyByMonthAndMonthDay(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + bymonth=(1, 3), + bymonthday=(5, 7), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 5, 9, 0), + datetime(1998, 1, 7, 9, 0), + datetime(1998, 3, 5, 9, 0)]) + + def testYearlyByWeekDay(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 9, 9, 0)]) + + def testYearlyByNWeekDay(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 25, 9, 0), + datetime(1998, 1, 6, 9, 0), + datetime(1998, 12, 31, 9, 0)]) + + def testYearlyByNWeekDayLarge(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + byweekday=(TU(3), TH(-3)), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 11, 9, 0), + datetime(1998, 1, 20, 9, 0), + datetime(1998, 12, 17, 9, 0)]) + + def testYearlyByMonthAndWeekDay(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + bymonth=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 9, 0), + datetime(1998, 1, 6, 9, 0), + datetime(1998, 1, 8, 9, 0)]) + + def testYearlyByMonthAndNWeekDay(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + bymonth=(1, 3), + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 6, 9, 0), + datetime(1998, 1, 29, 9, 0), + datetime(1998, 3, 3, 9, 0)]) + + def testYearlyByMonthAndNWeekDayLarge(self): + # This is interesting because the TH(-3) ends up before + # the TU(3). + self.assertEqual(list(rrule(YEARLY, + count=3, + bymonth=(1, 3), + byweekday=(TU(3), TH(-3)), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 15, 9, 0), + datetime(1998, 1, 20, 9, 0), + datetime(1998, 3, 12, 9, 0)]) + + def testYearlyByMonthDayAndWeekDay(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 9, 0), + datetime(1998, 2, 3, 9, 0), + datetime(1998, 3, 3, 9, 0)]) + + def testYearlyByMonthAndMonthDayAndWeekDay(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + bymonth=(1, 3), + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 9, 0), + datetime(1998, 3, 3, 9, 0), + datetime(2001, 3, 1, 9, 0)]) + + def testYearlyByYearDay(self): + self.assertEqual(list(rrule(YEARLY, + count=4, + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 31, 9, 0), + datetime(1998, 1, 1, 9, 0), + datetime(1998, 4, 10, 9, 0), + datetime(1998, 7, 19, 9, 0)]) + + def testYearlyByYearDayNeg(self): + self.assertEqual(list(rrule(YEARLY, + count=4, + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 31, 9, 0), + datetime(1998, 1, 1, 9, 0), + datetime(1998, 4, 10, 9, 0), + datetime(1998, 7, 19, 9, 0)]) + + def testYearlyByMonthAndYearDay(self): + self.assertEqual(list(rrule(YEARLY, + count=4, + bymonth=(4, 7), + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 10, 9, 0), + datetime(1998, 7, 19, 9, 0), + datetime(1999, 4, 10, 9, 0), + datetime(1999, 7, 19, 9, 0)]) + + def testYearlyByMonthAndYearDayNeg(self): + self.assertEqual(list(rrule(YEARLY, + count=4, + bymonth=(4, 7), + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 10, 9, 0), + datetime(1998, 7, 19, 9, 0), + datetime(1999, 4, 10, 9, 0), + datetime(1999, 7, 19, 9, 0)]) + + def testYearlyByWeekNo(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + byweekno=20, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 5, 11, 9, 0), + datetime(1998, 5, 12, 9, 0), + datetime(1998, 5, 13, 9, 0)]) + + def testYearlyByWeekNoAndWeekDay(self): + # That's a nice one. The first days of week number one + # may be in the last year. + self.assertEqual(list(rrule(YEARLY, + count=3, + byweekno=1, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 29, 9, 0), + datetime(1999, 1, 4, 9, 0), + datetime(2000, 1, 3, 9, 0)]) + + def testYearlyByWeekNoAndWeekDayLarge(self): + # Another nice test. The last days of week number 52/53 + # may be in the next year. + self.assertEqual(list(rrule(YEARLY, + count=3, + byweekno=52, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 28, 9, 0), + datetime(1998, 12, 27, 9, 0), + datetime(2000, 1, 2, 9, 0)]) + + def testYearlyByWeekNoAndWeekDayLast(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + byweekno=-1, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 28, 9, 0), + datetime(1999, 1, 3, 9, 0), + datetime(2000, 1, 2, 9, 0)]) + + def testYearlyByEaster(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + byeaster=0, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 12, 9, 0), + datetime(1999, 4, 4, 9, 0), + datetime(2000, 4, 23, 9, 0)]) + + def testYearlyByEasterPos(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + byeaster=1, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 13, 9, 0), + datetime(1999, 4, 5, 9, 0), + datetime(2000, 4, 24, 9, 0)]) + + def testYearlyByEasterNeg(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + byeaster=-1, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 11, 9, 0), + datetime(1999, 4, 3, 9, 0), + datetime(2000, 4, 22, 9, 0)]) + + def testYearlyByWeekNoAndWeekDay53(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + byweekno=53, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 12, 28, 9, 0), + datetime(2004, 12, 27, 9, 0), + datetime(2009, 12, 28, 9, 0)]) + + def testYearlyByHour(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + byhour=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 0), + datetime(1998, 9, 2, 6, 0), + datetime(1998, 9, 2, 18, 0)]) + + def testYearlyByMinute(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 6), + datetime(1997, 9, 2, 9, 18), + datetime(1998, 9, 2, 9, 6)]) + + def testYearlyBySecond(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0, 6), + datetime(1997, 9, 2, 9, 0, 18), + datetime(1998, 9, 2, 9, 0, 6)]) + + def testYearlyByHourAndMinute(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 6), + datetime(1997, 9, 2, 18, 18), + datetime(1998, 9, 2, 6, 6)]) + + def testYearlyByHourAndSecond(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + byhour=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 0, 6), + datetime(1997, 9, 2, 18, 0, 18), + datetime(1998, 9, 2, 6, 0, 6)]) + + def testYearlyByMinuteAndSecond(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 6, 6), + datetime(1997, 9, 2, 9, 6, 18), + datetime(1997, 9, 2, 9, 18, 6)]) + + def testYearlyByHourAndMinuteAndSecond(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 6, 6), + datetime(1997, 9, 2, 18, 6, 18), + datetime(1997, 9, 2, 18, 18, 6)]) + + def testYearlyBySetPos(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + bymonthday=15, + byhour=(6, 18), + bysetpos=(3, -3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 11, 15, 18, 0), + datetime(1998, 2, 15, 6, 0), + datetime(1998, 11, 15, 18, 0)]) + + def testMonthly(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 10, 2, 9, 0), + datetime(1997, 11, 2, 9, 0)]) + + def testMonthlyInterval(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + interval=2, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 11, 2, 9, 0), + datetime(1998, 1, 2, 9, 0)]) + + def testMonthlyIntervalLarge(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + interval=18, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1999, 3, 2, 9, 0), + datetime(2000, 9, 2, 9, 0)]) + + def testMonthlyByMonth(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + bymonth=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 2, 9, 0), + datetime(1998, 3, 2, 9, 0), + datetime(1999, 1, 2, 9, 0)]) + + def testMonthlyByMonthDay(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + bymonthday=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 3, 9, 0), + datetime(1997, 10, 1, 9, 0), + datetime(1997, 10, 3, 9, 0)]) + + def testMonthlyByMonthAndMonthDay(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + bymonth=(1, 3), + bymonthday=(5, 7), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 5, 9, 0), + datetime(1998, 1, 7, 9, 0), + datetime(1998, 3, 5, 9, 0)]) + + def testMonthlyByWeekDay(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 9, 9, 0)]) + + # Third Monday of the month + self.assertEqual(rrule(MONTHLY, + byweekday=(MO(+3)), + dtstart=datetime(1997, 9, 1)).between(datetime(1997, 9, 1), + datetime(1997, 12, 1)), + [datetime(1997, 9, 15, 0, 0), + datetime(1997, 10, 20, 0, 0), + datetime(1997, 11, 17, 0, 0)]) + + def testMonthlyByNWeekDay(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 25, 9, 0), + datetime(1997, 10, 7, 9, 0)]) + + def testMonthlyByNWeekDayLarge(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + byweekday=(TU(3), TH(-3)), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 11, 9, 0), + datetime(1997, 9, 16, 9, 0), + datetime(1997, 10, 16, 9, 0)]) + + def testMonthlyByMonthAndWeekDay(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + bymonth=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 9, 0), + datetime(1998, 1, 6, 9, 0), + datetime(1998, 1, 8, 9, 0)]) + + def testMonthlyByMonthAndNWeekDay(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + bymonth=(1, 3), + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 6, 9, 0), + datetime(1998, 1, 29, 9, 0), + datetime(1998, 3, 3, 9, 0)]) + + def testMonthlyByMonthAndNWeekDayLarge(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + bymonth=(1, 3), + byweekday=(TU(3), TH(-3)), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 15, 9, 0), + datetime(1998, 1, 20, 9, 0), + datetime(1998, 3, 12, 9, 0)]) + + def testMonthlyByMonthDayAndWeekDay(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 9, 0), + datetime(1998, 2, 3, 9, 0), + datetime(1998, 3, 3, 9, 0)]) + + def testMonthlyByMonthAndMonthDayAndWeekDay(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + bymonth=(1, 3), + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 9, 0), + datetime(1998, 3, 3, 9, 0), + datetime(2001, 3, 1, 9, 0)]) + + def testMonthlyByYearDay(self): + self.assertEqual(list(rrule(MONTHLY, + count=4, + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 31, 9, 0), + datetime(1998, 1, 1, 9, 0), + datetime(1998, 4, 10, 9, 0), + datetime(1998, 7, 19, 9, 0)]) + + def testMonthlyByYearDayNeg(self): + self.assertEqual(list(rrule(MONTHLY, + count=4, + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 31, 9, 0), + datetime(1998, 1, 1, 9, 0), + datetime(1998, 4, 10, 9, 0), + datetime(1998, 7, 19, 9, 0)]) + + def testMonthlyByMonthAndYearDay(self): + self.assertEqual(list(rrule(MONTHLY, + count=4, + bymonth=(4, 7), + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 10, 9, 0), + datetime(1998, 7, 19, 9, 0), + datetime(1999, 4, 10, 9, 0), + datetime(1999, 7, 19, 9, 0)]) + + def testMonthlyByMonthAndYearDayNeg(self): + self.assertEqual(list(rrule(MONTHLY, + count=4, + bymonth=(4, 7), + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 10, 9, 0), + datetime(1998, 7, 19, 9, 0), + datetime(1999, 4, 10, 9, 0), + datetime(1999, 7, 19, 9, 0)]) + + def testMonthlyByWeekNo(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + byweekno=20, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 5, 11, 9, 0), + datetime(1998, 5, 12, 9, 0), + datetime(1998, 5, 13, 9, 0)]) + + def testMonthlyByWeekNoAndWeekDay(self): + # That's a nice one. The first days of week number one + # may be in the last year. + self.assertEqual(list(rrule(MONTHLY, + count=3, + byweekno=1, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 29, 9, 0), + datetime(1999, 1, 4, 9, 0), + datetime(2000, 1, 3, 9, 0)]) + + def testMonthlyByWeekNoAndWeekDayLarge(self): + # Another nice test. The last days of week number 52/53 + # may be in the next year. + self.assertEqual(list(rrule(MONTHLY, + count=3, + byweekno=52, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 28, 9, 0), + datetime(1998, 12, 27, 9, 0), + datetime(2000, 1, 2, 9, 0)]) + + def testMonthlyByWeekNoAndWeekDayLast(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + byweekno=-1, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 28, 9, 0), + datetime(1999, 1, 3, 9, 0), + datetime(2000, 1, 2, 9, 0)]) + + def testMonthlyByWeekNoAndWeekDay53(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + byweekno=53, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 12, 28, 9, 0), + datetime(2004, 12, 27, 9, 0), + datetime(2009, 12, 28, 9, 0)]) + + def testMonthlyByEaster(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + byeaster=0, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 12, 9, 0), + datetime(1999, 4, 4, 9, 0), + datetime(2000, 4, 23, 9, 0)]) + + def testMonthlyByEasterPos(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + byeaster=1, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 13, 9, 0), + datetime(1999, 4, 5, 9, 0), + datetime(2000, 4, 24, 9, 0)]) + + def testMonthlyByEasterNeg(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + byeaster=-1, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 11, 9, 0), + datetime(1999, 4, 3, 9, 0), + datetime(2000, 4, 22, 9, 0)]) + + def testMonthlyByHour(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + byhour=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 0), + datetime(1997, 10, 2, 6, 0), + datetime(1997, 10, 2, 18, 0)]) + + def testMonthlyByMinute(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 6), + datetime(1997, 9, 2, 9, 18), + datetime(1997, 10, 2, 9, 6)]) + + def testMonthlyBySecond(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0, 6), + datetime(1997, 9, 2, 9, 0, 18), + datetime(1997, 10, 2, 9, 0, 6)]) + + def testMonthlyByHourAndMinute(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 6), + datetime(1997, 9, 2, 18, 18), + datetime(1997, 10, 2, 6, 6)]) + + def testMonthlyByHourAndSecond(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + byhour=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 0, 6), + datetime(1997, 9, 2, 18, 0, 18), + datetime(1997, 10, 2, 6, 0, 6)]) + + def testMonthlyByMinuteAndSecond(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 6, 6), + datetime(1997, 9, 2, 9, 6, 18), + datetime(1997, 9, 2, 9, 18, 6)]) + + def testMonthlyByHourAndMinuteAndSecond(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 6, 6), + datetime(1997, 9, 2, 18, 6, 18), + datetime(1997, 9, 2, 18, 18, 6)]) + + def testMonthlyBySetPos(self): + self.assertEqual(list(rrule(MONTHLY, + count=3, + bymonthday=(13, 17), + byhour=(6, 18), + bysetpos=(3, -3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 13, 18, 0), + datetime(1997, 9, 17, 6, 0), + datetime(1997, 10, 13, 18, 0)]) + + def testWeekly(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 9, 9, 0), + datetime(1997, 9, 16, 9, 0)]) + + def testWeeklyInterval(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + interval=2, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 16, 9, 0), + datetime(1997, 9, 30, 9, 0)]) + + def testWeeklyIntervalLarge(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + interval=20, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1998, 1, 20, 9, 0), + datetime(1998, 6, 9, 9, 0)]) + + def testWeeklyByMonth(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + bymonth=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 6, 9, 0), + datetime(1998, 1, 13, 9, 0), + datetime(1998, 1, 20, 9, 0)]) + + def testWeeklyByMonthDay(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + bymonthday=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 3, 9, 0), + datetime(1997, 10, 1, 9, 0), + datetime(1997, 10, 3, 9, 0)]) + + def testWeeklyByMonthAndMonthDay(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + bymonth=(1, 3), + bymonthday=(5, 7), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 5, 9, 0), + datetime(1998, 1, 7, 9, 0), + datetime(1998, 3, 5, 9, 0)]) + + def testWeeklyByWeekDay(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 9, 9, 0)]) + + def testWeeklyByNWeekDay(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 9, 9, 0)]) + + def testWeeklyByMonthAndWeekDay(self): + # This test is interesting, because it crosses the year + # boundary in a weekly period to find day '1' as a + # valid recurrence. + self.assertEqual(list(rrule(WEEKLY, + count=3, + bymonth=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 9, 0), + datetime(1998, 1, 6, 9, 0), + datetime(1998, 1, 8, 9, 0)]) + + def testWeeklyByMonthAndNWeekDay(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + bymonth=(1, 3), + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 9, 0), + datetime(1998, 1, 6, 9, 0), + datetime(1998, 1, 8, 9, 0)]) + + def testWeeklyByMonthDayAndWeekDay(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 9, 0), + datetime(1998, 2, 3, 9, 0), + datetime(1998, 3, 3, 9, 0)]) + + def testWeeklyByMonthAndMonthDayAndWeekDay(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + bymonth=(1, 3), + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 9, 0), + datetime(1998, 3, 3, 9, 0), + datetime(2001, 3, 1, 9, 0)]) + + def testWeeklyByYearDay(self): + self.assertEqual(list(rrule(WEEKLY, + count=4, + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 31, 9, 0), + datetime(1998, 1, 1, 9, 0), + datetime(1998, 4, 10, 9, 0), + datetime(1998, 7, 19, 9, 0)]) + + def testWeeklyByYearDayNeg(self): + self.assertEqual(list(rrule(WEEKLY, + count=4, + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 31, 9, 0), + datetime(1998, 1, 1, 9, 0), + datetime(1998, 4, 10, 9, 0), + datetime(1998, 7, 19, 9, 0)]) + + def testWeeklyByMonthAndYearDay(self): + self.assertEqual(list(rrule(WEEKLY, + count=4, + bymonth=(1, 7), + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 9, 0), + datetime(1998, 7, 19, 9, 0), + datetime(1999, 1, 1, 9, 0), + datetime(1999, 7, 19, 9, 0)]) + + def testWeeklyByMonthAndYearDayNeg(self): + self.assertEqual(list(rrule(WEEKLY, + count=4, + bymonth=(1, 7), + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 9, 0), + datetime(1998, 7, 19, 9, 0), + datetime(1999, 1, 1, 9, 0), + datetime(1999, 7, 19, 9, 0)]) + + def testWeeklyByWeekNo(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + byweekno=20, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 5, 11, 9, 0), + datetime(1998, 5, 12, 9, 0), + datetime(1998, 5, 13, 9, 0)]) + + def testWeeklyByWeekNoAndWeekDay(self): + # That's a nice one. The first days of week number one + # may be in the last year. + self.assertEqual(list(rrule(WEEKLY, + count=3, + byweekno=1, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 29, 9, 0), + datetime(1999, 1, 4, 9, 0), + datetime(2000, 1, 3, 9, 0)]) + + def testWeeklyByWeekNoAndWeekDayLarge(self): + # Another nice test. The last days of week number 52/53 + # may be in the next year. + self.assertEqual(list(rrule(WEEKLY, + count=3, + byweekno=52, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 28, 9, 0), + datetime(1998, 12, 27, 9, 0), + datetime(2000, 1, 2, 9, 0)]) + + def testWeeklyByWeekNoAndWeekDayLast(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + byweekno=-1, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 28, 9, 0), + datetime(1999, 1, 3, 9, 0), + datetime(2000, 1, 2, 9, 0)]) + + def testWeeklyByWeekNoAndWeekDay53(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + byweekno=53, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 12, 28, 9, 0), + datetime(2004, 12, 27, 9, 0), + datetime(2009, 12, 28, 9, 0)]) + + def testWeeklyByEaster(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + byeaster=0, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 12, 9, 0), + datetime(1999, 4, 4, 9, 0), + datetime(2000, 4, 23, 9, 0)]) + + def testWeeklyByEasterPos(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + byeaster=1, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 13, 9, 0), + datetime(1999, 4, 5, 9, 0), + datetime(2000, 4, 24, 9, 0)]) + + def testWeeklyByEasterNeg(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + byeaster=-1, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 11, 9, 0), + datetime(1999, 4, 3, 9, 0), + datetime(2000, 4, 22, 9, 0)]) + + def testWeeklyByHour(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + byhour=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 0), + datetime(1997, 9, 9, 6, 0), + datetime(1997, 9, 9, 18, 0)]) + + def testWeeklyByMinute(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 6), + datetime(1997, 9, 2, 9, 18), + datetime(1997, 9, 9, 9, 6)]) + + def testWeeklyBySecond(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0, 6), + datetime(1997, 9, 2, 9, 0, 18), + datetime(1997, 9, 9, 9, 0, 6)]) + + def testWeeklyByHourAndMinute(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 6), + datetime(1997, 9, 2, 18, 18), + datetime(1997, 9, 9, 6, 6)]) + + def testWeeklyByHourAndSecond(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + byhour=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 0, 6), + datetime(1997, 9, 2, 18, 0, 18), + datetime(1997, 9, 9, 6, 0, 6)]) + + def testWeeklyByMinuteAndSecond(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 6, 6), + datetime(1997, 9, 2, 9, 6, 18), + datetime(1997, 9, 2, 9, 18, 6)]) + + def testWeeklyByHourAndMinuteAndSecond(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 6, 6), + datetime(1997, 9, 2, 18, 6, 18), + datetime(1997, 9, 2, 18, 18, 6)]) + + def testWeeklyBySetPos(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + byweekday=(TU, TH), + byhour=(6, 18), + bysetpos=(3, -3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 0), + datetime(1997, 9, 4, 6, 0), + datetime(1997, 9, 9, 18, 0)]) + + def testDaily(self): + self.assertEqual(list(rrule(DAILY, + count=3, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 3, 9, 0), + datetime(1997, 9, 4, 9, 0)]) + + def testDailyInterval(self): + self.assertEqual(list(rrule(DAILY, + count=3, + interval=2, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 6, 9, 0)]) + + def testDailyIntervalLarge(self): + self.assertEqual(list(rrule(DAILY, + count=3, + interval=92, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 12, 3, 9, 0), + datetime(1998, 3, 5, 9, 0)]) + + def testDailyByMonth(self): + self.assertEqual(list(rrule(DAILY, + count=3, + bymonth=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 9, 0), + datetime(1998, 1, 2, 9, 0), + datetime(1998, 1, 3, 9, 0)]) + + def testDailyByMonthDay(self): + self.assertEqual(list(rrule(DAILY, + count=3, + bymonthday=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 3, 9, 0), + datetime(1997, 10, 1, 9, 0), + datetime(1997, 10, 3, 9, 0)]) + + def testDailyByMonthAndMonthDay(self): + self.assertEqual(list(rrule(DAILY, + count=3, + bymonth=(1, 3), + bymonthday=(5, 7), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 5, 9, 0), + datetime(1998, 1, 7, 9, 0), + datetime(1998, 3, 5, 9, 0)]) + + def testDailyByWeekDay(self): + self.assertEqual(list(rrule(DAILY, + count=3, + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 9, 9, 0)]) + + def testDailyByNWeekDay(self): + self.assertEqual(list(rrule(DAILY, + count=3, + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 9, 9, 0)]) + + def testDailyByMonthAndWeekDay(self): + self.assertEqual(list(rrule(DAILY, + count=3, + bymonth=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 9, 0), + datetime(1998, 1, 6, 9, 0), + datetime(1998, 1, 8, 9, 0)]) + + def testDailyByMonthAndNWeekDay(self): + self.assertEqual(list(rrule(DAILY, + count=3, + bymonth=(1, 3), + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 9, 0), + datetime(1998, 1, 6, 9, 0), + datetime(1998, 1, 8, 9, 0)]) + + def testDailyByMonthDayAndWeekDay(self): + self.assertEqual(list(rrule(DAILY, + count=3, + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 9, 0), + datetime(1998, 2, 3, 9, 0), + datetime(1998, 3, 3, 9, 0)]) + + def testDailyByMonthAndMonthDayAndWeekDay(self): + self.assertEqual(list(rrule(DAILY, + count=3, + bymonth=(1, 3), + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 9, 0), + datetime(1998, 3, 3, 9, 0), + datetime(2001, 3, 1, 9, 0)]) + + def testDailyByYearDay(self): + self.assertEqual(list(rrule(DAILY, + count=4, + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 31, 9, 0), + datetime(1998, 1, 1, 9, 0), + datetime(1998, 4, 10, 9, 0), + datetime(1998, 7, 19, 9, 0)]) + + def testDailyByYearDayNeg(self): + self.assertEqual(list(rrule(DAILY, + count=4, + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 31, 9, 0), + datetime(1998, 1, 1, 9, 0), + datetime(1998, 4, 10, 9, 0), + datetime(1998, 7, 19, 9, 0)]) + + def testDailyByMonthAndYearDay(self): + self.assertEqual(list(rrule(DAILY, + count=4, + bymonth=(1, 7), + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 9, 0), + datetime(1998, 7, 19, 9, 0), + datetime(1999, 1, 1, 9, 0), + datetime(1999, 7, 19, 9, 0)]) + + def testDailyByMonthAndYearDayNeg(self): + self.assertEqual(list(rrule(DAILY, + count=4, + bymonth=(1, 7), + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 9, 0), + datetime(1998, 7, 19, 9, 0), + datetime(1999, 1, 1, 9, 0), + datetime(1999, 7, 19, 9, 0)]) + + def testDailyByWeekNo(self): + self.assertEqual(list(rrule(DAILY, + count=3, + byweekno=20, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 5, 11, 9, 0), + datetime(1998, 5, 12, 9, 0), + datetime(1998, 5, 13, 9, 0)]) + + def testDailyByWeekNoAndWeekDay(self): + # That's a nice one. The first days of week number one + # may be in the last year. + self.assertEqual(list(rrule(DAILY, + count=3, + byweekno=1, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 29, 9, 0), + datetime(1999, 1, 4, 9, 0), + datetime(2000, 1, 3, 9, 0)]) + + def testDailyByWeekNoAndWeekDayLarge(self): + # Another nice test. The last days of week number 52/53 + # may be in the next year. + self.assertEqual(list(rrule(DAILY, + count=3, + byweekno=52, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 28, 9, 0), + datetime(1998, 12, 27, 9, 0), + datetime(2000, 1, 2, 9, 0)]) + + def testDailyByWeekNoAndWeekDayLast(self): + self.assertEqual(list(rrule(DAILY, + count=3, + byweekno=-1, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 28, 9, 0), + datetime(1999, 1, 3, 9, 0), + datetime(2000, 1, 2, 9, 0)]) + + def testDailyByWeekNoAndWeekDay53(self): + self.assertEqual(list(rrule(DAILY, + count=3, + byweekno=53, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 12, 28, 9, 0), + datetime(2004, 12, 27, 9, 0), + datetime(2009, 12, 28, 9, 0)]) + + def testDailyByEaster(self): + self.assertEqual(list(rrule(DAILY, + count=3, + byeaster=0, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 12, 9, 0), + datetime(1999, 4, 4, 9, 0), + datetime(2000, 4, 23, 9, 0)]) + + def testDailyByEasterPos(self): + self.assertEqual(list(rrule(DAILY, + count=3, + byeaster=1, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 13, 9, 0), + datetime(1999, 4, 5, 9, 0), + datetime(2000, 4, 24, 9, 0)]) + + def testDailyByEasterNeg(self): + self.assertEqual(list(rrule(DAILY, + count=3, + byeaster=-1, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 11, 9, 0), + datetime(1999, 4, 3, 9, 0), + datetime(2000, 4, 22, 9, 0)]) + + def testDailyByHour(self): + self.assertEqual(list(rrule(DAILY, + count=3, + byhour=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 0), + datetime(1997, 9, 3, 6, 0), + datetime(1997, 9, 3, 18, 0)]) + + def testDailyByMinute(self): + self.assertEqual(list(rrule(DAILY, + count=3, + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 6), + datetime(1997, 9, 2, 9, 18), + datetime(1997, 9, 3, 9, 6)]) + + def testDailyBySecond(self): + self.assertEqual(list(rrule(DAILY, + count=3, + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0, 6), + datetime(1997, 9, 2, 9, 0, 18), + datetime(1997, 9, 3, 9, 0, 6)]) + + def testDailyByHourAndMinute(self): + self.assertEqual(list(rrule(DAILY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 6), + datetime(1997, 9, 2, 18, 18), + datetime(1997, 9, 3, 6, 6)]) + + def testDailyByHourAndSecond(self): + self.assertEqual(list(rrule(DAILY, + count=3, + byhour=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 0, 6), + datetime(1997, 9, 2, 18, 0, 18), + datetime(1997, 9, 3, 6, 0, 6)]) + + def testDailyByMinuteAndSecond(self): + self.assertEqual(list(rrule(DAILY, + count=3, + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 6, 6), + datetime(1997, 9, 2, 9, 6, 18), + datetime(1997, 9, 2, 9, 18, 6)]) + + def testDailyByHourAndMinuteAndSecond(self): + self.assertEqual(list(rrule(DAILY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 6, 6), + datetime(1997, 9, 2, 18, 6, 18), + datetime(1997, 9, 2, 18, 18, 6)]) + + def testDailyBySetPos(self): + self.assertEqual(list(rrule(DAILY, + count=3, + byhour=(6, 18), + byminute=(15, 45), + bysetpos=(3, -3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 15), + datetime(1997, 9, 3, 6, 45), + datetime(1997, 9, 3, 18, 15)]) + + def testHourly(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 2, 10, 0), + datetime(1997, 9, 2, 11, 0)]) + + def testHourlyInterval(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + interval=2, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 2, 11, 0), + datetime(1997, 9, 2, 13, 0)]) + + def testHourlyIntervalLarge(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + interval=769, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 10, 4, 10, 0), + datetime(1997, 11, 5, 11, 0)]) + + def testHourlyByMonth(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + bymonth=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 0, 0), + datetime(1998, 1, 1, 1, 0), + datetime(1998, 1, 1, 2, 0)]) + + def testHourlyByMonthDay(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + bymonthday=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 3, 0, 0), + datetime(1997, 9, 3, 1, 0), + datetime(1997, 9, 3, 2, 0)]) + + def testHourlyByMonthAndMonthDay(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + bymonth=(1, 3), + bymonthday=(5, 7), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 5, 0, 0), + datetime(1998, 1, 5, 1, 0), + datetime(1998, 1, 5, 2, 0)]) + + def testHourlyByWeekDay(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 2, 10, 0), + datetime(1997, 9, 2, 11, 0)]) + + def testHourlyByNWeekDay(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 2, 10, 0), + datetime(1997, 9, 2, 11, 0)]) + + def testHourlyByMonthAndWeekDay(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + bymonth=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 0, 0), + datetime(1998, 1, 1, 1, 0), + datetime(1998, 1, 1, 2, 0)]) + + def testHourlyByMonthAndNWeekDay(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + bymonth=(1, 3), + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 0, 0), + datetime(1998, 1, 1, 1, 0), + datetime(1998, 1, 1, 2, 0)]) + + def testHourlyByMonthDayAndWeekDay(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 0, 0), + datetime(1998, 1, 1, 1, 0), + datetime(1998, 1, 1, 2, 0)]) + + def testHourlyByMonthAndMonthDayAndWeekDay(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + bymonth=(1, 3), + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 0, 0), + datetime(1998, 1, 1, 1, 0), + datetime(1998, 1, 1, 2, 0)]) + + def testHourlyByYearDay(self): + self.assertEqual(list(rrule(HOURLY, + count=4, + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 31, 0, 0), + datetime(1997, 12, 31, 1, 0), + datetime(1997, 12, 31, 2, 0), + datetime(1997, 12, 31, 3, 0)]) + + def testHourlyByYearDayNeg(self): + self.assertEqual(list(rrule(HOURLY, + count=4, + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 31, 0, 0), + datetime(1997, 12, 31, 1, 0), + datetime(1997, 12, 31, 2, 0), + datetime(1997, 12, 31, 3, 0)]) + + def testHourlyByMonthAndYearDay(self): + self.assertEqual(list(rrule(HOURLY, + count=4, + bymonth=(4, 7), + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 10, 0, 0), + datetime(1998, 4, 10, 1, 0), + datetime(1998, 4, 10, 2, 0), + datetime(1998, 4, 10, 3, 0)]) + + def testHourlyByMonthAndYearDayNeg(self): + self.assertEqual(list(rrule(HOURLY, + count=4, + bymonth=(4, 7), + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 10, 0, 0), + datetime(1998, 4, 10, 1, 0), + datetime(1998, 4, 10, 2, 0), + datetime(1998, 4, 10, 3, 0)]) + + def testHourlyByWeekNo(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + byweekno=20, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 5, 11, 0, 0), + datetime(1998, 5, 11, 1, 0), + datetime(1998, 5, 11, 2, 0)]) + + def testHourlyByWeekNoAndWeekDay(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + byweekno=1, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 29, 0, 0), + datetime(1997, 12, 29, 1, 0), + datetime(1997, 12, 29, 2, 0)]) + + def testHourlyByWeekNoAndWeekDayLarge(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + byweekno=52, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 28, 0, 0), + datetime(1997, 12, 28, 1, 0), + datetime(1997, 12, 28, 2, 0)]) + + def testHourlyByWeekNoAndWeekDayLast(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + byweekno=-1, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 28, 0, 0), + datetime(1997, 12, 28, 1, 0), + datetime(1997, 12, 28, 2, 0)]) + + def testHourlyByWeekNoAndWeekDay53(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + byweekno=53, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 12, 28, 0, 0), + datetime(1998, 12, 28, 1, 0), + datetime(1998, 12, 28, 2, 0)]) + + def testHourlyByEaster(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + byeaster=0, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 12, 0, 0), + datetime(1998, 4, 12, 1, 0), + datetime(1998, 4, 12, 2, 0)]) + + def testHourlyByEasterPos(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + byeaster=1, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 13, 0, 0), + datetime(1998, 4, 13, 1, 0), + datetime(1998, 4, 13, 2, 0)]) + + def testHourlyByEasterNeg(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + byeaster=-1, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 11, 0, 0), + datetime(1998, 4, 11, 1, 0), + datetime(1998, 4, 11, 2, 0)]) + + def testHourlyByHour(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + byhour=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 0), + datetime(1997, 9, 3, 6, 0), + datetime(1997, 9, 3, 18, 0)]) + + def testHourlyByMinute(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 6), + datetime(1997, 9, 2, 9, 18), + datetime(1997, 9, 2, 10, 6)]) + + def testHourlyBySecond(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0, 6), + datetime(1997, 9, 2, 9, 0, 18), + datetime(1997, 9, 2, 10, 0, 6)]) + + def testHourlyByHourAndMinute(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 6), + datetime(1997, 9, 2, 18, 18), + datetime(1997, 9, 3, 6, 6)]) + + def testHourlyByHourAndSecond(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + byhour=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 0, 6), + datetime(1997, 9, 2, 18, 0, 18), + datetime(1997, 9, 3, 6, 0, 6)]) + + def testHourlyByMinuteAndSecond(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 6, 6), + datetime(1997, 9, 2, 9, 6, 18), + datetime(1997, 9, 2, 9, 18, 6)]) + + def testHourlyByHourAndMinuteAndSecond(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 6, 6), + datetime(1997, 9, 2, 18, 6, 18), + datetime(1997, 9, 2, 18, 18, 6)]) + + def testHourlyBySetPos(self): + self.assertEqual(list(rrule(HOURLY, + count=3, + byminute=(15, 45), + bysecond=(15, 45), + bysetpos=(3, -3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 15, 45), + datetime(1997, 9, 2, 9, 45, 15), + datetime(1997, 9, 2, 10, 15, 45)]) + + def testMinutely(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 2, 9, 1), + datetime(1997, 9, 2, 9, 2)]) + + def testMinutelyInterval(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + interval=2, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 2, 9, 2), + datetime(1997, 9, 2, 9, 4)]) + + def testMinutelyIntervalLarge(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + interval=1501, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 3, 10, 1), + datetime(1997, 9, 4, 11, 2)]) + + def testMinutelyByMonth(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + bymonth=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 0, 0), + datetime(1998, 1, 1, 0, 1), + datetime(1998, 1, 1, 0, 2)]) + + def testMinutelyByMonthDay(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + bymonthday=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 3, 0, 0), + datetime(1997, 9, 3, 0, 1), + datetime(1997, 9, 3, 0, 2)]) + + def testMinutelyByMonthAndMonthDay(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + bymonth=(1, 3), + bymonthday=(5, 7), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 5, 0, 0), + datetime(1998, 1, 5, 0, 1), + datetime(1998, 1, 5, 0, 2)]) + + def testMinutelyByWeekDay(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 2, 9, 1), + datetime(1997, 9, 2, 9, 2)]) + + def testMinutelyByNWeekDay(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 2, 9, 1), + datetime(1997, 9, 2, 9, 2)]) + + def testMinutelyByMonthAndWeekDay(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + bymonth=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 0, 0), + datetime(1998, 1, 1, 0, 1), + datetime(1998, 1, 1, 0, 2)]) + + def testMinutelyByMonthAndNWeekDay(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + bymonth=(1, 3), + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 0, 0), + datetime(1998, 1, 1, 0, 1), + datetime(1998, 1, 1, 0, 2)]) + + def testMinutelyByMonthDayAndWeekDay(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 0, 0), + datetime(1998, 1, 1, 0, 1), + datetime(1998, 1, 1, 0, 2)]) + + def testMinutelyByMonthAndMonthDayAndWeekDay(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + bymonth=(1, 3), + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 0, 0), + datetime(1998, 1, 1, 0, 1), + datetime(1998, 1, 1, 0, 2)]) + + def testMinutelyByYearDay(self): + self.assertEqual(list(rrule(MINUTELY, + count=4, + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 31, 0, 0), + datetime(1997, 12, 31, 0, 1), + datetime(1997, 12, 31, 0, 2), + datetime(1997, 12, 31, 0, 3)]) + + def testMinutelyByYearDayNeg(self): + self.assertEqual(list(rrule(MINUTELY, + count=4, + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 31, 0, 0), + datetime(1997, 12, 31, 0, 1), + datetime(1997, 12, 31, 0, 2), + datetime(1997, 12, 31, 0, 3)]) + + def testMinutelyByMonthAndYearDay(self): + self.assertEqual(list(rrule(MINUTELY, + count=4, + bymonth=(4, 7), + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 10, 0, 0), + datetime(1998, 4, 10, 0, 1), + datetime(1998, 4, 10, 0, 2), + datetime(1998, 4, 10, 0, 3)]) + + def testMinutelyByMonthAndYearDayNeg(self): + self.assertEqual(list(rrule(MINUTELY, + count=4, + bymonth=(4, 7), + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 10, 0, 0), + datetime(1998, 4, 10, 0, 1), + datetime(1998, 4, 10, 0, 2), + datetime(1998, 4, 10, 0, 3)]) + + def testMinutelyByWeekNo(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + byweekno=20, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 5, 11, 0, 0), + datetime(1998, 5, 11, 0, 1), + datetime(1998, 5, 11, 0, 2)]) + + def testMinutelyByWeekNoAndWeekDay(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + byweekno=1, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 29, 0, 0), + datetime(1997, 12, 29, 0, 1), + datetime(1997, 12, 29, 0, 2)]) + + def testMinutelyByWeekNoAndWeekDayLarge(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + byweekno=52, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 28, 0, 0), + datetime(1997, 12, 28, 0, 1), + datetime(1997, 12, 28, 0, 2)]) + + def testMinutelyByWeekNoAndWeekDayLast(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + byweekno=-1, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 28, 0, 0), + datetime(1997, 12, 28, 0, 1), + datetime(1997, 12, 28, 0, 2)]) + + def testMinutelyByWeekNoAndWeekDay53(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + byweekno=53, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 12, 28, 0, 0), + datetime(1998, 12, 28, 0, 1), + datetime(1998, 12, 28, 0, 2)]) + + def testMinutelyByEaster(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + byeaster=0, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 12, 0, 0), + datetime(1998, 4, 12, 0, 1), + datetime(1998, 4, 12, 0, 2)]) + + def testMinutelyByEasterPos(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + byeaster=1, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 13, 0, 0), + datetime(1998, 4, 13, 0, 1), + datetime(1998, 4, 13, 0, 2)]) + + def testMinutelyByEasterNeg(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + byeaster=-1, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 11, 0, 0), + datetime(1998, 4, 11, 0, 1), + datetime(1998, 4, 11, 0, 2)]) + + def testMinutelyByHour(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + byhour=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 0), + datetime(1997, 9, 2, 18, 1), + datetime(1997, 9, 2, 18, 2)]) + + def testMinutelyByMinute(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 6), + datetime(1997, 9, 2, 9, 18), + datetime(1997, 9, 2, 10, 6)]) + + def testMinutelyBySecond(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0, 6), + datetime(1997, 9, 2, 9, 0, 18), + datetime(1997, 9, 2, 9, 1, 6)]) + + def testMinutelyByHourAndMinute(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 6), + datetime(1997, 9, 2, 18, 18), + datetime(1997, 9, 3, 6, 6)]) + + def testMinutelyByHourAndSecond(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + byhour=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 0, 6), + datetime(1997, 9, 2, 18, 0, 18), + datetime(1997, 9, 2, 18, 1, 6)]) + + def testMinutelyByMinuteAndSecond(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 6, 6), + datetime(1997, 9, 2, 9, 6, 18), + datetime(1997, 9, 2, 9, 18, 6)]) + + def testMinutelyByHourAndMinuteAndSecond(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 6, 6), + datetime(1997, 9, 2, 18, 6, 18), + datetime(1997, 9, 2, 18, 18, 6)]) + + def testMinutelyBySetPos(self): + self.assertEqual(list(rrule(MINUTELY, + count=3, + bysecond=(15, 30, 45), + bysetpos=(3, -3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0, 15), + datetime(1997, 9, 2, 9, 0, 45), + datetime(1997, 9, 2, 9, 1, 15)]) + + def testSecondly(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0, 0), + datetime(1997, 9, 2, 9, 0, 1), + datetime(1997, 9, 2, 9, 0, 2)]) + + def testSecondlyInterval(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + interval=2, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0, 0), + datetime(1997, 9, 2, 9, 0, 2), + datetime(1997, 9, 2, 9, 0, 4)]) + + def testSecondlyIntervalLarge(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + interval=90061, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0, 0), + datetime(1997, 9, 3, 10, 1, 1), + datetime(1997, 9, 4, 11, 2, 2)]) + + def testSecondlyByMonth(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + bymonth=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 0, 0, 0), + datetime(1998, 1, 1, 0, 0, 1), + datetime(1998, 1, 1, 0, 0, 2)]) + + def testSecondlyByMonthDay(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + bymonthday=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 3, 0, 0, 0), + datetime(1997, 9, 3, 0, 0, 1), + datetime(1997, 9, 3, 0, 0, 2)]) + + def testSecondlyByMonthAndMonthDay(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + bymonth=(1, 3), + bymonthday=(5, 7), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 5, 0, 0, 0), + datetime(1998, 1, 5, 0, 0, 1), + datetime(1998, 1, 5, 0, 0, 2)]) + + def testSecondlyByWeekDay(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0, 0), + datetime(1997, 9, 2, 9, 0, 1), + datetime(1997, 9, 2, 9, 0, 2)]) + + def testSecondlyByNWeekDay(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0, 0), + datetime(1997, 9, 2, 9, 0, 1), + datetime(1997, 9, 2, 9, 0, 2)]) + + def testSecondlyByMonthAndWeekDay(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + bymonth=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 0, 0, 0), + datetime(1998, 1, 1, 0, 0, 1), + datetime(1998, 1, 1, 0, 0, 2)]) + + def testSecondlyByMonthAndNWeekDay(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + bymonth=(1, 3), + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 0, 0, 0), + datetime(1998, 1, 1, 0, 0, 1), + datetime(1998, 1, 1, 0, 0, 2)]) + + def testSecondlyByMonthDayAndWeekDay(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 0, 0, 0), + datetime(1998, 1, 1, 0, 0, 1), + datetime(1998, 1, 1, 0, 0, 2)]) + + def testSecondlyByMonthAndMonthDayAndWeekDay(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + bymonth=(1, 3), + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 1, 0, 0, 0), + datetime(1998, 1, 1, 0, 0, 1), + datetime(1998, 1, 1, 0, 0, 2)]) + + def testSecondlyByYearDay(self): + self.assertEqual(list(rrule(SECONDLY, + count=4, + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 31, 0, 0, 0), + datetime(1997, 12, 31, 0, 0, 1), + datetime(1997, 12, 31, 0, 0, 2), + datetime(1997, 12, 31, 0, 0, 3)]) + + def testSecondlyByYearDayNeg(self): + self.assertEqual(list(rrule(SECONDLY, + count=4, + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 31, 0, 0, 0), + datetime(1997, 12, 31, 0, 0, 1), + datetime(1997, 12, 31, 0, 0, 2), + datetime(1997, 12, 31, 0, 0, 3)]) + + def testSecondlyByMonthAndYearDay(self): + self.assertEqual(list(rrule(SECONDLY, + count=4, + bymonth=(4, 7), + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 10, 0, 0, 0), + datetime(1998, 4, 10, 0, 0, 1), + datetime(1998, 4, 10, 0, 0, 2), + datetime(1998, 4, 10, 0, 0, 3)]) + + def testSecondlyByMonthAndYearDayNeg(self): + self.assertEqual(list(rrule(SECONDLY, + count=4, + bymonth=(4, 7), + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 10, 0, 0, 0), + datetime(1998, 4, 10, 0, 0, 1), + datetime(1998, 4, 10, 0, 0, 2), + datetime(1998, 4, 10, 0, 0, 3)]) + + def testSecondlyByWeekNo(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + byweekno=20, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 5, 11, 0, 0, 0), + datetime(1998, 5, 11, 0, 0, 1), + datetime(1998, 5, 11, 0, 0, 2)]) + + def testSecondlyByWeekNoAndWeekDay(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + byweekno=1, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 29, 0, 0, 0), + datetime(1997, 12, 29, 0, 0, 1), + datetime(1997, 12, 29, 0, 0, 2)]) + + def testSecondlyByWeekNoAndWeekDayLarge(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + byweekno=52, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 28, 0, 0, 0), + datetime(1997, 12, 28, 0, 0, 1), + datetime(1997, 12, 28, 0, 0, 2)]) + + def testSecondlyByWeekNoAndWeekDayLast(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + byweekno=-1, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 12, 28, 0, 0, 0), + datetime(1997, 12, 28, 0, 0, 1), + datetime(1997, 12, 28, 0, 0, 2)]) + + def testSecondlyByWeekNoAndWeekDay53(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + byweekno=53, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 12, 28, 0, 0, 0), + datetime(1998, 12, 28, 0, 0, 1), + datetime(1998, 12, 28, 0, 0, 2)]) + + def testSecondlyByEaster(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + byeaster=0, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 12, 0, 0, 0), + datetime(1998, 4, 12, 0, 0, 1), + datetime(1998, 4, 12, 0, 0, 2)]) + + def testSecondlyByEasterPos(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + byeaster=1, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 13, 0, 0, 0), + datetime(1998, 4, 13, 0, 0, 1), + datetime(1998, 4, 13, 0, 0, 2)]) + + def testSecondlyByEasterNeg(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + byeaster=-1, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 4, 11, 0, 0, 0), + datetime(1998, 4, 11, 0, 0, 1), + datetime(1998, 4, 11, 0, 0, 2)]) + + def testSecondlyByHour(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + byhour=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 0, 0), + datetime(1997, 9, 2, 18, 0, 1), + datetime(1997, 9, 2, 18, 0, 2)]) + + def testSecondlyByMinute(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 6, 0), + datetime(1997, 9, 2, 9, 6, 1), + datetime(1997, 9, 2, 9, 6, 2)]) + + def testSecondlyBySecond(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0, 6), + datetime(1997, 9, 2, 9, 0, 18), + datetime(1997, 9, 2, 9, 1, 6)]) + + def testSecondlyByHourAndMinute(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 6, 0), + datetime(1997, 9, 2, 18, 6, 1), + datetime(1997, 9, 2, 18, 6, 2)]) + + def testSecondlyByHourAndSecond(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + byhour=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 0, 6), + datetime(1997, 9, 2, 18, 0, 18), + datetime(1997, 9, 2, 18, 1, 6)]) + + def testSecondlyByMinuteAndSecond(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 6, 6), + datetime(1997, 9, 2, 9, 6, 18), + datetime(1997, 9, 2, 9, 18, 6)]) + + def testSecondlyByHourAndMinuteAndSecond(self): + self.assertEqual(list(rrule(SECONDLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 18, 6, 6), + datetime(1997, 9, 2, 18, 6, 18), + datetime(1997, 9, 2, 18, 18, 6)]) + + def testSecondlyByHourAndMinuteAndSecondBug(self): + # This explores a bug found by Mathieu Bridon. + self.assertEqual(list(rrule(SECONDLY, + count=3, + bysecond=(0,), + byminute=(1,), + dtstart=datetime(2010, 3, 22, 12, 1))), + [datetime(2010, 3, 22, 12, 1), + datetime(2010, 3, 22, 13, 1), + datetime(2010, 3, 22, 14, 1)]) + + def testLongIntegers(self): + if not PY3: # There is no longs in python3 + self.assertEqual(list(rrule(MINUTELY, + count=long(2), + interval=long(2), + bymonth=long(2), + byweekday=long(3), + byhour=long(6), + byminute=long(6), + bysecond=long(6), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 2, 5, 6, 6, 6), + datetime(1998, 2, 12, 6, 6, 6)]) + self.assertEqual(list(rrule(YEARLY, + count=long(2), + bymonthday=long(5), + byweekno=long(2), + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1998, 1, 5, 9, 0), + datetime(2004, 1, 5, 9, 0)]) + + def testHourlyBadRRule(self): + """ + When `byhour` is specified with `freq=HOURLY`, there are certain + combinations of `dtstart` and `byhour` which result in an rrule with no + valid values. + + See https://github.com/dateutil/dateutil/issues/4 + """ + + self.assertRaises(ValueError, rrule, HOURLY, + **dict(interval=4, byhour=(7, 11, 15, 19), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testMinutelyBadRRule(self): + """ + See :func:`testHourlyBadRRule` for details. + """ + + self.assertRaises(ValueError, rrule, MINUTELY, + **dict(interval=12, byminute=(10, 11, 25, 39, 50), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testSecondlyBadRRule(self): + """ + See :func:`testHourlyBadRRule` for details. + """ + + self.assertRaises(ValueError, rrule, SECONDLY, + **dict(interval=10, bysecond=(2, 15, 37, 42, 59), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testMinutelyBadComboRRule(self): + """ + Certain values of :param:`interval` in :class:`rrule`, when combined + with certain values of :param:`byhour` create rules which apply to no + valid dates. The library should detect this case in the iterator and + raise a :exception:`ValueError`. + """ + + # In Python 2.7 you can use a context manager for this. + def make_bad_rrule(): + list(rrule(MINUTELY, interval=120, byhour=(10, 12, 14, 16), + count=2, dtstart=datetime(1997, 9, 2, 9, 0))) + + self.assertRaises(ValueError, make_bad_rrule) + + def testSecondlyBadComboRRule(self): + """ + See :func:`testMinutelyBadComboRRule' for details. + """ + + # In Python 2.7 you can use a context manager for this. + def make_bad_minute_rrule(): + list(rrule(SECONDLY, interval=360, byminute=(10, 28, 49), + count=4, dtstart=datetime(1997, 9, 2, 9, 0))) + + def make_bad_hour_rrule(): + list(rrule(SECONDLY, interval=43200, byhour=(2, 10, 18, 23), + count=4, dtstart=datetime(1997, 9, 2, 9, 0))) + + self.assertRaises(ValueError, make_bad_minute_rrule) + self.assertRaises(ValueError, make_bad_hour_rrule) + + def testBadUntilCountRRule(self): + """ + See rfc-5545 3.3.10 - This checks for the deprecation warning, and will + eventually check for an error. + """ + with self.assertWarns(DeprecationWarning): + rrule(DAILY, dtstart=datetime(1997, 9, 2, 9, 0), + count=3, until=datetime(1997, 9, 4, 9, 0)) + + def testUntilNotMatching(self): + self.assertEqual(list(rrule(DAILY, + dtstart=datetime(1997, 9, 2, 9, 0), + until=datetime(1997, 9, 5, 8, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 3, 9, 0), + datetime(1997, 9, 4, 9, 0)]) + + def testUntilMatching(self): + self.assertEqual(list(rrule(DAILY, + dtstart=datetime(1997, 9, 2, 9, 0), + until=datetime(1997, 9, 4, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 3, 9, 0), + datetime(1997, 9, 4, 9, 0)]) + + def testUntilSingle(self): + self.assertEqual(list(rrule(DAILY, + dtstart=datetime(1997, 9, 2, 9, 0), + until=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0)]) + + def testUntilEmpty(self): + self.assertEqual(list(rrule(DAILY, + dtstart=datetime(1997, 9, 2, 9, 0), + until=datetime(1997, 9, 1, 9, 0))), + []) + + def testUntilWithDate(self): + self.assertEqual(list(rrule(DAILY, + dtstart=datetime(1997, 9, 2, 9, 0), + until=date(1997, 9, 5))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 3, 9, 0), + datetime(1997, 9, 4, 9, 0)]) + + def testWkStIntervalMO(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + interval=2, + byweekday=(TU, SU), + wkst=MO, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 7, 9, 0), + datetime(1997, 9, 16, 9, 0)]) + + def testWkStIntervalSU(self): + self.assertEqual(list(rrule(WEEKLY, + count=3, + interval=2, + byweekday=(TU, SU), + wkst=SU, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 14, 9, 0), + datetime(1997, 9, 16, 9, 0)]) + + def testDTStartIsDate(self): + self.assertEqual(list(rrule(DAILY, + count=3, + dtstart=date(1997, 9, 2))), + [datetime(1997, 9, 2, 0, 0), + datetime(1997, 9, 3, 0, 0), + datetime(1997, 9, 4, 0, 0)]) + + def testDTStartWithMicroseconds(self): + self.assertEqual(list(rrule(DAILY, + count=3, + dtstart=datetime(1997, 9, 2, 9, 0, 0, 500000))), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 3, 9, 0), + datetime(1997, 9, 4, 9, 0)]) + + def testMaxYear(self): + self.assertEqual(list(rrule(YEARLY, + count=3, + bymonth=2, + bymonthday=31, + dtstart=datetime(9997, 9, 2, 9, 0, 0))), + []) + + def testGetItem(self): + self.assertEqual(rrule(DAILY, + count=3, + dtstart=datetime(1997, 9, 2, 9, 0))[0], + datetime(1997, 9, 2, 9, 0)) + + def testGetItemNeg(self): + self.assertEqual(rrule(DAILY, + count=3, + dtstart=datetime(1997, 9, 2, 9, 0))[-1], + datetime(1997, 9, 4, 9, 0)) + + def testGetItemSlice(self): + self.assertEqual(rrule(DAILY, + # count=3, + dtstart=datetime(1997, 9, 2, 9, 0))[1:2], + [datetime(1997, 9, 3, 9, 0)]) + + def testGetItemSliceEmpty(self): + self.assertEqual(rrule(DAILY, + count=3, + dtstart=datetime(1997, 9, 2, 9, 0))[:], + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 3, 9, 0), + datetime(1997, 9, 4, 9, 0)]) + + def testGetItemSliceStep(self): + self.assertEqual(rrule(DAILY, + count=3, + dtstart=datetime(1997, 9, 2, 9, 0))[::-2], + [datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 2, 9, 0)]) + + def testCount(self): + self.assertEqual(rrule(DAILY, + count=3, + dtstart=datetime(1997, 9, 2, 9, 0)).count(), + 3) + + def testCountZero(self): + self.assertEqual(rrule(YEARLY, + count=0, + dtstart=datetime(1997, 9, 2, 9, 0)).count(), + 0) + + def testContains(self): + rr = rrule(DAILY, count=3, dtstart=datetime(1997, 9, 2, 9, 0)) + self.assertEqual(datetime(1997, 9, 3, 9, 0) in rr, True) + + def testContainsNot(self): + rr = rrule(DAILY, count=3, dtstart=datetime(1997, 9, 2, 9, 0)) + self.assertEqual(datetime(1997, 9, 3, 9, 0) not in rr, False) + + def testBefore(self): + self.assertEqual(rrule(DAILY, # count=5 + dtstart=datetime(1997, 9, 2, 9, 0)).before(datetime(1997, 9, 5, 9, 0)), + datetime(1997, 9, 4, 9, 0)) + + def testBeforeInc(self): + self.assertEqual(rrule(DAILY, + #count=5, + dtstart=datetime(1997, 9, 2, 9, 0)) + .before(datetime(1997, 9, 5, 9, 0), inc=True), + datetime(1997, 9, 5, 9, 0)) + + def testAfter(self): + self.assertEqual(rrule(DAILY, + #count=5, + dtstart=datetime(1997, 9, 2, 9, 0)) + .after(datetime(1997, 9, 4, 9, 0)), + datetime(1997, 9, 5, 9, 0)) + + def testAfterInc(self): + self.assertEqual(rrule(DAILY, + #count=5, + dtstart=datetime(1997, 9, 2, 9, 0)) + .after(datetime(1997, 9, 4, 9, 0), inc=True), + datetime(1997, 9, 4, 9, 0)) + + def testXAfter(self): + self.assertEqual(list(rrule(DAILY, + dtstart=datetime(1997, 9, 2, 9, 0)) + .xafter(datetime(1997, 9, 8, 9, 0), count=12)), + [datetime(1997, 9, 9, 9, 0), + datetime(1997, 9, 10, 9, 0), + datetime(1997, 9, 11, 9, 0), + datetime(1997, 9, 12, 9, 0), + datetime(1997, 9, 13, 9, 0), + datetime(1997, 9, 14, 9, 0), + datetime(1997, 9, 15, 9, 0), + datetime(1997, 9, 16, 9, 0), + datetime(1997, 9, 17, 9, 0), + datetime(1997, 9, 18, 9, 0), + datetime(1997, 9, 19, 9, 0), + datetime(1997, 9, 20, 9, 0)]) + + def testXAfterInc(self): + self.assertEqual(list(rrule(DAILY, + dtstart=datetime(1997, 9, 2, 9, 0)) + .xafter(datetime(1997, 9, 8, 9, 0), count=12, inc=True)), + [datetime(1997, 9, 8, 9, 0), + datetime(1997, 9, 9, 9, 0), + datetime(1997, 9, 10, 9, 0), + datetime(1997, 9, 11, 9, 0), + datetime(1997, 9, 12, 9, 0), + datetime(1997, 9, 13, 9, 0), + datetime(1997, 9, 14, 9, 0), + datetime(1997, 9, 15, 9, 0), + datetime(1997, 9, 16, 9, 0), + datetime(1997, 9, 17, 9, 0), + datetime(1997, 9, 18, 9, 0), + datetime(1997, 9, 19, 9, 0)]) + + def testBetween(self): + self.assertEqual(rrule(DAILY, + #count=5, + dtstart=datetime(1997, 9, 2, 9, 0)) + .between(datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 6, 9, 0)), + [datetime(1997, 9, 3, 9, 0), + datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 5, 9, 0)]) + + def testBetweenInc(self): + self.assertEqual(rrule(DAILY, + #count=5, + dtstart=datetime(1997, 9, 2, 9, 0)) + .between(datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 6, 9, 0), inc=True), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 3, 9, 0), + datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 5, 9, 0), + datetime(1997, 9, 6, 9, 0)]) + + def testCachePre(self): + rr = rrule(DAILY, count=15, cache=True, + dtstart=datetime(1997, 9, 2, 9, 0)) + self.assertEqual(list(rr), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 3, 9, 0), + datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 5, 9, 0), + datetime(1997, 9, 6, 9, 0), + datetime(1997, 9, 7, 9, 0), + datetime(1997, 9, 8, 9, 0), + datetime(1997, 9, 9, 9, 0), + datetime(1997, 9, 10, 9, 0), + datetime(1997, 9, 11, 9, 0), + datetime(1997, 9, 12, 9, 0), + datetime(1997, 9, 13, 9, 0), + datetime(1997, 9, 14, 9, 0), + datetime(1997, 9, 15, 9, 0), + datetime(1997, 9, 16, 9, 0)]) + + def testCachePost(self): + rr = rrule(DAILY, count=15, cache=True, + dtstart=datetime(1997, 9, 2, 9, 0)) + for x in rr: pass + self.assertEqual(list(rr), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 3, 9, 0), + datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 5, 9, 0), + datetime(1997, 9, 6, 9, 0), + datetime(1997, 9, 7, 9, 0), + datetime(1997, 9, 8, 9, 0), + datetime(1997, 9, 9, 9, 0), + datetime(1997, 9, 10, 9, 0), + datetime(1997, 9, 11, 9, 0), + datetime(1997, 9, 12, 9, 0), + datetime(1997, 9, 13, 9, 0), + datetime(1997, 9, 14, 9, 0), + datetime(1997, 9, 15, 9, 0), + datetime(1997, 9, 16, 9, 0)]) + + def testCachePostInternal(self): + rr = rrule(DAILY, count=15, cache=True, + dtstart=datetime(1997, 9, 2, 9, 0)) + for x in rr: pass + self.assertEqual(rr._cache, + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 3, 9, 0), + datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 5, 9, 0), + datetime(1997, 9, 6, 9, 0), + datetime(1997, 9, 7, 9, 0), + datetime(1997, 9, 8, 9, 0), + datetime(1997, 9, 9, 9, 0), + datetime(1997, 9, 10, 9, 0), + datetime(1997, 9, 11, 9, 0), + datetime(1997, 9, 12, 9, 0), + datetime(1997, 9, 13, 9, 0), + datetime(1997, 9, 14, 9, 0), + datetime(1997, 9, 15, 9, 0), + datetime(1997, 9, 16, 9, 0)]) + + def testCachePreContains(self): + rr = rrule(DAILY, count=3, cache=True, + dtstart=datetime(1997, 9, 2, 9, 0)) + self.assertEqual(datetime(1997, 9, 3, 9, 0) in rr, True) + + def testCachePostContains(self): + rr = rrule(DAILY, count=3, cache=True, + dtstart=datetime(1997, 9, 2, 9, 0)) + for x in rr: pass + self.assertEqual(datetime(1997, 9, 3, 9, 0) in rr, True) + + def testStr(self): + self.assertEqual(list(rrulestr( + "DTSTART:19970902T090000\n" + "RRULE:FREQ=YEARLY;COUNT=3\n" + )), + [datetime(1997, 9, 2, 9, 0), + datetime(1998, 9, 2, 9, 0), + datetime(1999, 9, 2, 9, 0)]) + + def testStrWithTZID(self): + NYC = tz.gettz('America/New_York') + self.assertEqual(list(rrulestr( + "DTSTART;TZID=America/New_York:19970902T090000\n" + "RRULE:FREQ=YEARLY;COUNT=3\n" + )), + [datetime(1997, 9, 2, 9, 0, tzinfo=NYC), + datetime(1998, 9, 2, 9, 0, tzinfo=NYC), + datetime(1999, 9, 2, 9, 0, tzinfo=NYC)]) + + def testStrWithTZIDMapping(self): + rrstr = ("DTSTART;TZID=Eastern:19970902T090000\n" + + "RRULE:FREQ=YEARLY;COUNT=3") + + NYC = tz.gettz('America/New_York') + rr = rrulestr(rrstr, tzids={'Eastern': NYC}) + exp = [datetime(1997, 9, 2, 9, 0, tzinfo=NYC), + datetime(1998, 9, 2, 9, 0, tzinfo=NYC), + datetime(1999, 9, 2, 9, 0, tzinfo=NYC)] + + self.assertEqual(list(rr), exp) + + def testStrWithTZIDCallable(self): + rrstr = ('DTSTART;TZID=UTC+04:19970902T090000\n' + + 'RRULE:FREQ=YEARLY;COUNT=3') + + TZ = tz.tzstr('UTC+04') + def parse_tzstr(tzstr): + if tzstr is None: + raise ValueError('Invalid tzstr') + + return tz.tzstr(tzstr) + + rr = rrulestr(rrstr, tzids=parse_tzstr) + + exp = [datetime(1997, 9, 2, 9, 0, tzinfo=TZ), + datetime(1998, 9, 2, 9, 0, tzinfo=TZ), + datetime(1999, 9, 2, 9, 0, tzinfo=TZ),] + + self.assertEqual(list(rr), exp) + + def testStrWithTZIDCallableFailure(self): + rrstr = ('DTSTART;TZID=America/New_York:19970902T090000\n' + + 'RRULE:FREQ=YEARLY;COUNT=3') + + class TzInfoError(Exception): + pass + + def tzinfos(tzstr): + if tzstr == 'America/New_York': + raise TzInfoError('Invalid!') + return None + + with self.assertRaises(TzInfoError): + rrulestr(rrstr, tzids=tzinfos) + + def testStrWithConflictingTZID(self): + # RFC 5545 Section 3.3.5, FORM #2: DATE WITH UTC TIME + # https://tools.ietf.org/html/rfc5545#section-3.3.5 + # The "TZID" property parameter MUST NOT be applied to DATE-TIME + with self.assertRaises(ValueError): + rrulestr("DTSTART;TZID=America/New_York:19970902T090000Z\n"+ + "RRULE:FREQ=YEARLY;COUNT=3\n") + + def testStrType(self): + self.assertEqual(isinstance(rrulestr( + "DTSTART:19970902T090000\n" + "RRULE:FREQ=YEARLY;COUNT=3\n" + ), rrule), True) + + def testStrForceSetType(self): + self.assertEqual(isinstance(rrulestr( + "DTSTART:19970902T090000\n" + "RRULE:FREQ=YEARLY;COUNT=3\n" + , forceset=True), rruleset), True) + + def testStrSetType(self): + self.assertEqual(isinstance(rrulestr( + "DTSTART:19970902T090000\n" + "RRULE:FREQ=YEARLY;COUNT=2;BYDAY=TU\n" + "RRULE:FREQ=YEARLY;COUNT=1;BYDAY=TH\n" + ), rruleset), True) + + def testStrCase(self): + self.assertEqual(list(rrulestr( + "dtstart:19970902T090000\n" + "rrule:freq=yearly;count=3\n" + )), + [datetime(1997, 9, 2, 9, 0), + datetime(1998, 9, 2, 9, 0), + datetime(1999, 9, 2, 9, 0)]) + + def testStrSpaces(self): + self.assertEqual(list(rrulestr( + " DTSTART:19970902T090000 " + " RRULE:FREQ=YEARLY;COUNT=3 " + )), + [datetime(1997, 9, 2, 9, 0), + datetime(1998, 9, 2, 9, 0), + datetime(1999, 9, 2, 9, 0)]) + + def testStrSpacesAndLines(self): + self.assertEqual(list(rrulestr( + " DTSTART:19970902T090000 \n" + " \n" + " RRULE:FREQ=YEARLY;COUNT=3 \n" + )), + [datetime(1997, 9, 2, 9, 0), + datetime(1998, 9, 2, 9, 0), + datetime(1999, 9, 2, 9, 0)]) + + def testStrNoDTStart(self): + self.assertEqual(list(rrulestr( + "RRULE:FREQ=YEARLY;COUNT=3\n" + , dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1998, 9, 2, 9, 0), + datetime(1999, 9, 2, 9, 0)]) + + def testStrValueOnly(self): + self.assertEqual(list(rrulestr( + "FREQ=YEARLY;COUNT=3\n" + , dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1998, 9, 2, 9, 0), + datetime(1999, 9, 2, 9, 0)]) + + def testStrUnfold(self): + self.assertEqual(list(rrulestr( + "FREQ=YEA\n RLY;COUNT=3\n", unfold=True, + dtstart=datetime(1997, 9, 2, 9, 0))), + [datetime(1997, 9, 2, 9, 0), + datetime(1998, 9, 2, 9, 0), + datetime(1999, 9, 2, 9, 0)]) + + def testStrSet(self): + self.assertEqual(list(rrulestr( + "DTSTART:19970902T090000\n" + "RRULE:FREQ=YEARLY;COUNT=2;BYDAY=TU\n" + "RRULE:FREQ=YEARLY;COUNT=1;BYDAY=TH\n" + )), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 9, 9, 0)]) + + def testStrSetDate(self): + self.assertEqual(list(rrulestr( + "DTSTART:19970902T090000\n" + "RRULE:FREQ=YEARLY;COUNT=1;BYDAY=TU\n" + "RDATE:19970904T090000\n" + "RDATE:19970909T090000\n" + )), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 9, 9, 0)]) + + def testStrSetExRule(self): + self.assertEqual(list(rrulestr( + "DTSTART:19970902T090000\n" + "RRULE:FREQ=YEARLY;COUNT=6;BYDAY=TU,TH\n" + "EXRULE:FREQ=YEARLY;COUNT=3;BYDAY=TH\n" + )), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 9, 9, 0), + datetime(1997, 9, 16, 9, 0)]) + + def testStrSetExDate(self): + self.assertEqual(list(rrulestr( + "DTSTART:19970902T090000\n" + "RRULE:FREQ=YEARLY;COUNT=6;BYDAY=TU,TH\n" + "EXDATE:19970904T090000\n" + "EXDATE:19970911T090000\n" + "EXDATE:19970918T090000\n" + )), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 9, 9, 0), + datetime(1997, 9, 16, 9, 0)]) + + def testStrSetDateAndExDate(self): + self.assertEqual(list(rrulestr( + "DTSTART:19970902T090000\n" + "RDATE:19970902T090000\n" + "RDATE:19970904T090000\n" + "RDATE:19970909T090000\n" + "RDATE:19970911T090000\n" + "RDATE:19970916T090000\n" + "RDATE:19970918T090000\n" + "EXDATE:19970904T090000\n" + "EXDATE:19970911T090000\n" + "EXDATE:19970918T090000\n" + )), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 9, 9, 0), + datetime(1997, 9, 16, 9, 0)]) + + def testStrSetDateAndExRule(self): + self.assertEqual(list(rrulestr( + "DTSTART:19970902T090000\n" + "RDATE:19970902T090000\n" + "RDATE:19970904T090000\n" + "RDATE:19970909T090000\n" + "RDATE:19970911T090000\n" + "RDATE:19970916T090000\n" + "RDATE:19970918T090000\n" + "EXRULE:FREQ=YEARLY;COUNT=3;BYDAY=TH\n" + )), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 9, 9, 0), + datetime(1997, 9, 16, 9, 0)]) + + def testStrKeywords(self): + self.assertEqual(list(rrulestr( + "DTSTART:19970902T090000\n" + "RRULE:FREQ=YEARLY;COUNT=3;INTERVAL=3;" + "BYMONTH=3;BYWEEKDAY=TH;BYMONTHDAY=3;" + "BYHOUR=3;BYMINUTE=3;BYSECOND=3\n" + )), + [datetime(2033, 3, 3, 3, 3, 3), + datetime(2039, 3, 3, 3, 3, 3), + datetime(2072, 3, 3, 3, 3, 3)]) + + def testStrNWeekDay(self): + self.assertEqual(list(rrulestr( + "DTSTART:19970902T090000\n" + "RRULE:FREQ=YEARLY;COUNT=3;BYDAY=1TU,-1TH\n" + )), + [datetime(1997, 12, 25, 9, 0), + datetime(1998, 1, 6, 9, 0), + datetime(1998, 12, 31, 9, 0)]) + + def testStrUntil(self): + self.assertEqual(list(rrulestr( + "DTSTART:19970902T090000\n" + "RRULE:FREQ=YEARLY;" + "UNTIL=19990101T000000;BYDAY=1TU,-1TH\n" + )), + [datetime(1997, 12, 25, 9, 0), + datetime(1998, 1, 6, 9, 0), + datetime(1998, 12, 31, 9, 0)]) + + def testStrValueDatetime(self): + rr = rrulestr("DTSTART;VALUE=DATE-TIME:19970902T090000\n" + "RRULE:FREQ=YEARLY;COUNT=2") + + self.assertEqual(list(rr), [datetime(1997, 9, 2, 9, 0, 0), + datetime(1998, 9, 2, 9, 0, 0)]) + + def testStrValueDate(self): + rr = rrulestr("DTSTART;VALUE=DATE:19970902\n" + "RRULE:FREQ=YEARLY;COUNT=2") + + self.assertEqual(list(rr), [datetime(1997, 9, 2, 0, 0, 0), + datetime(1998, 9, 2, 0, 0, 0)]) + + def testStrInvalidUntil(self): + with self.assertRaises(ValueError): + list(rrulestr("DTSTART:19970902T090000\n" + "RRULE:FREQ=YEARLY;" + "UNTIL=TheCowsComeHome;BYDAY=1TU,-1TH\n")) + + def testStrUntilMustBeUTC(self): + with self.assertRaises(ValueError): + list(rrulestr("DTSTART;TZID=America/New_York:19970902T090000\n" + "RRULE:FREQ=YEARLY;" + "UNTIL=19990101T000000;BYDAY=1TU,-1TH\n")) + + def testStrUntilWithTZ(self): + NYC = tz.gettz('America/New_York') + rr = list(rrulestr("DTSTART;TZID=America/New_York:19970101T000000\n" + "RRULE:FREQ=YEARLY;" + "UNTIL=19990101T000000Z\n")) + self.assertEqual(list(rr), [datetime(1997, 1, 1, 0, 0, 0, tzinfo=NYC), + datetime(1998, 1, 1, 0, 0, 0, tzinfo=NYC)]) + + def testStrEmptyByDay(self): + with self.assertRaises(ValueError): + list(rrulestr("DTSTART:19970902T090000\n" + "FREQ=WEEKLY;" + "BYDAY=;" # This part is invalid + "WKST=SU")) + + def testStrInvalidByDay(self): + with self.assertRaises(ValueError): + list(rrulestr("DTSTART:19970902T090000\n" + "FREQ=WEEKLY;" + "BYDAY=-1OK;" # This part is invalid + "WKST=SU")) + + def testBadBySetPos(self): + self.assertRaises(ValueError, + rrule, MONTHLY, + count=1, + bysetpos=0, + dtstart=datetime(1997, 9, 2, 9, 0)) + + def testBadBySetPosMany(self): + self.assertRaises(ValueError, + rrule, MONTHLY, + count=1, + bysetpos=(-1, 0, 1), + dtstart=datetime(1997, 9, 2, 9, 0)) + + # Tests to ensure that str(rrule) works + def testToStrYearly(self): + rule = rrule(YEARLY, count=3, dtstart=datetime(1997, 9, 2, 9, 0)) + self._rrulestr_reverse_test(rule) + + def testToStrYearlyInterval(self): + rule = rrule(YEARLY, count=3, interval=2, + dtstart=datetime(1997, 9, 2, 9, 0)) + self._rrulestr_reverse_test(rule) + + def testToStrYearlyByMonth(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + bymonth=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByMonthDay(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + bymonthday=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByMonthAndMonthDay(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + bymonth=(1, 3), + bymonthday=(5, 7), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByWeekDay(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByNWeekDay(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByNWeekDayLarge(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + byweekday=(TU(3), TH(-3)), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByMonthAndWeekDay(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + bymonth=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByMonthAndNWeekDay(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + bymonth=(1, 3), + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByMonthAndNWeekDayLarge(self): + # This is interesting because the TH(-3) ends up before + # the TU(3). + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + bymonth=(1, 3), + byweekday=(TU(3), TH(-3)), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByMonthDayAndWeekDay(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByMonthAndMonthDayAndWeekDay(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + bymonth=(1, 3), + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByYearDay(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=4, + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByYearDayNeg(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=4, + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByMonthAndYearDay(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=4, + bymonth=(4, 7), + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByMonthAndYearDayNeg(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=4, + bymonth=(4, 7), + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByWeekNo(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + byweekno=20, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByWeekNoAndWeekDay(self): + # That's a nice one. The first days of week number one + # may be in the last year. + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + byweekno=1, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByWeekNoAndWeekDayLarge(self): + # Another nice test. The last days of week number 52/53 + # may be in the next year. + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + byweekno=52, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByWeekNoAndWeekDayLast(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + byweekno=-1, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByEaster(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + byeaster=0, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByEasterPos(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + byeaster=1, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByEasterNeg(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + byeaster=-1, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByWeekNoAndWeekDay53(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + byweekno=53, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByHour(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + byhour=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByMinute(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyBySecond(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByHourAndMinute(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByHourAndSecond(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + byhour=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByMinuteAndSecond(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyByHourAndMinuteAndSecond(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrYearlyBySetPos(self): + self._rrulestr_reverse_test(rrule(YEARLY, + count=3, + bymonthday=15, + byhour=(6, 18), + bysetpos=(3, -3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthly(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyInterval(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + interval=2, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyIntervalLarge(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + interval=18, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByMonth(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + bymonth=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByMonthDay(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + bymonthday=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByMonthAndMonthDay(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + bymonth=(1, 3), + bymonthday=(5, 7), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByWeekDay(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + # Third Monday of the month + self.assertEqual(rrule(MONTHLY, + byweekday=(MO(+3)), + dtstart=datetime(1997, 9, 1)).between(datetime(1997, + 9, + 1), + datetime(1997, + 12, + 1)), + [datetime(1997, 9, 15, 0, 0), + datetime(1997, 10, 20, 0, 0), + datetime(1997, 11, 17, 0, 0)]) + + def testToStrMonthlyByNWeekDay(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByNWeekDayLarge(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + byweekday=(TU(3), TH(-3)), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByMonthAndWeekDay(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + bymonth=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByMonthAndNWeekDay(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + bymonth=(1, 3), + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByMonthAndNWeekDayLarge(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + bymonth=(1, 3), + byweekday=(TU(3), TH(-3)), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByMonthDayAndWeekDay(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByMonthAndMonthDayAndWeekDay(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + bymonth=(1, 3), + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByYearDay(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=4, + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByYearDayNeg(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=4, + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByMonthAndYearDay(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=4, + bymonth=(4, 7), + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByMonthAndYearDayNeg(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=4, + bymonth=(4, 7), + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByWeekNo(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + byweekno=20, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByWeekNoAndWeekDay(self): + # That's a nice one. The first days of week number one + # may be in the last year. + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + byweekno=1, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByWeekNoAndWeekDayLarge(self): + # Another nice test. The last days of week number 52/53 + # may be in the next year. + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + byweekno=52, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByWeekNoAndWeekDayLast(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + byweekno=-1, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByWeekNoAndWeekDay53(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + byweekno=53, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByEaster(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + byeaster=0, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByEasterPos(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + byeaster=1, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByEasterNeg(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + byeaster=-1, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByHour(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + byhour=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByMinute(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyBySecond(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByHourAndMinute(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByHourAndSecond(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + byhour=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByMinuteAndSecond(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyByHourAndMinuteAndSecond(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMonthlyBySetPos(self): + self._rrulestr_reverse_test(rrule(MONTHLY, + count=3, + bymonthday=(13, 17), + byhour=(6, 18), + bysetpos=(3, -3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeekly(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyInterval(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + interval=2, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyIntervalLarge(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + interval=20, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByMonth(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + bymonth=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByMonthDay(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + bymonthday=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByMonthAndMonthDay(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + bymonth=(1, 3), + bymonthday=(5, 7), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByWeekDay(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByNWeekDay(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByMonthAndWeekDay(self): + # This test is interesting, because it crosses the year + # boundary in a weekly period to find day '1' as a + # valid recurrence. + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + bymonth=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByMonthAndNWeekDay(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + bymonth=(1, 3), + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByMonthDayAndWeekDay(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByMonthAndMonthDayAndWeekDay(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + bymonth=(1, 3), + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByYearDay(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=4, + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByYearDayNeg(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=4, + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByMonthAndYearDay(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=4, + bymonth=(1, 7), + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByMonthAndYearDayNeg(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=4, + bymonth=(1, 7), + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByWeekNo(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + byweekno=20, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByWeekNoAndWeekDay(self): + # That's a nice one. The first days of week number one + # may be in the last year. + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + byweekno=1, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByWeekNoAndWeekDayLarge(self): + # Another nice test. The last days of week number 52/53 + # may be in the next year. + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + byweekno=52, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByWeekNoAndWeekDayLast(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + byweekno=-1, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByWeekNoAndWeekDay53(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + byweekno=53, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByEaster(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + byeaster=0, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByEasterPos(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + byeaster=1, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByEasterNeg(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + byeaster=-1, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByHour(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + byhour=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByMinute(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyBySecond(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByHourAndMinute(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByHourAndSecond(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + byhour=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByMinuteAndSecond(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyByHourAndMinuteAndSecond(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrWeeklyBySetPos(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + byweekday=(TU, TH), + byhour=(6, 18), + bysetpos=(3, -3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDaily(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyInterval(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + interval=2, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyIntervalLarge(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + interval=92, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByMonth(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + bymonth=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByMonthDay(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + bymonthday=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByMonthAndMonthDay(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + bymonth=(1, 3), + bymonthday=(5, 7), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByWeekDay(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByNWeekDay(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByMonthAndWeekDay(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + bymonth=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByMonthAndNWeekDay(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + bymonth=(1, 3), + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByMonthDayAndWeekDay(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByMonthAndMonthDayAndWeekDay(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + bymonth=(1, 3), + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByYearDay(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=4, + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByYearDayNeg(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=4, + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByMonthAndYearDay(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=4, + bymonth=(1, 7), + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByMonthAndYearDayNeg(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=4, + bymonth=(1, 7), + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByWeekNo(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + byweekno=20, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByWeekNoAndWeekDay(self): + # That's a nice one. The first days of week number one + # may be in the last year. + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + byweekno=1, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByWeekNoAndWeekDayLarge(self): + # Another nice test. The last days of week number 52/53 + # may be in the next year. + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + byweekno=52, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByWeekNoAndWeekDayLast(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + byweekno=-1, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByWeekNoAndWeekDay53(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + byweekno=53, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByEaster(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + byeaster=0, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByEasterPos(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + byeaster=1, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByEasterNeg(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + byeaster=-1, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByHour(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + byhour=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByMinute(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyBySecond(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByHourAndMinute(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByHourAndSecond(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + byhour=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByMinuteAndSecond(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyByHourAndMinuteAndSecond(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrDailyBySetPos(self): + self._rrulestr_reverse_test(rrule(DAILY, + count=3, + byhour=(6, 18), + byminute=(15, 45), + bysetpos=(3, -3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourly(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyInterval(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + interval=2, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyIntervalLarge(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + interval=769, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByMonth(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + bymonth=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByMonthDay(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + bymonthday=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByMonthAndMonthDay(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + bymonth=(1, 3), + bymonthday=(5, 7), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByWeekDay(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByNWeekDay(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByMonthAndWeekDay(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + bymonth=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByMonthAndNWeekDay(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + bymonth=(1, 3), + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByMonthDayAndWeekDay(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByMonthAndMonthDayAndWeekDay(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + bymonth=(1, 3), + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByYearDay(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=4, + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByYearDayNeg(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=4, + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByMonthAndYearDay(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=4, + bymonth=(4, 7), + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByMonthAndYearDayNeg(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=4, + bymonth=(4, 7), + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByWeekNo(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + byweekno=20, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByWeekNoAndWeekDay(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + byweekno=1, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByWeekNoAndWeekDayLarge(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + byweekno=52, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByWeekNoAndWeekDayLast(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + byweekno=-1, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByWeekNoAndWeekDay53(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + byweekno=53, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByEaster(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + byeaster=0, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByEasterPos(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + byeaster=1, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByEasterNeg(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + byeaster=-1, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByHour(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + byhour=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByMinute(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyBySecond(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByHourAndMinute(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByHourAndSecond(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + byhour=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByMinuteAndSecond(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyByHourAndMinuteAndSecond(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrHourlyBySetPos(self): + self._rrulestr_reverse_test(rrule(HOURLY, + count=3, + byminute=(15, 45), + bysecond=(15, 45), + bysetpos=(3, -3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutely(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyInterval(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + interval=2, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyIntervalLarge(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + interval=1501, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByMonth(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + bymonth=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByMonthDay(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + bymonthday=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByMonthAndMonthDay(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + bymonth=(1, 3), + bymonthday=(5, 7), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByWeekDay(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByNWeekDay(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByMonthAndWeekDay(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + bymonth=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByMonthAndNWeekDay(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + bymonth=(1, 3), + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByMonthDayAndWeekDay(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByMonthAndMonthDayAndWeekDay(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + bymonth=(1, 3), + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByYearDay(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=4, + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByYearDayNeg(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=4, + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByMonthAndYearDay(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=4, + bymonth=(4, 7), + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByMonthAndYearDayNeg(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=4, + bymonth=(4, 7), + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByWeekNo(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + byweekno=20, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByWeekNoAndWeekDay(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + byweekno=1, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByWeekNoAndWeekDayLarge(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + byweekno=52, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByWeekNoAndWeekDayLast(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + byweekno=-1, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByWeekNoAndWeekDay53(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + byweekno=53, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByEaster(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + byeaster=0, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByEasterPos(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + byeaster=1, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByEasterNeg(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + byeaster=-1, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByHour(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + byhour=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByMinute(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyBySecond(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByHourAndMinute(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByHourAndSecond(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + byhour=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByMinuteAndSecond(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyByHourAndMinuteAndSecond(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrMinutelyBySetPos(self): + self._rrulestr_reverse_test(rrule(MINUTELY, + count=3, + bysecond=(15, 30, 45), + bysetpos=(3, -3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondly(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyInterval(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + interval=2, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyIntervalLarge(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + interval=90061, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByMonth(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + bymonth=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByMonthDay(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + bymonthday=(1, 3), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByMonthAndMonthDay(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + bymonth=(1, 3), + bymonthday=(5, 7), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByWeekDay(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByNWeekDay(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByMonthAndWeekDay(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + bymonth=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByMonthAndNWeekDay(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + bymonth=(1, 3), + byweekday=(TU(1), TH(-1)), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByMonthDayAndWeekDay(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByMonthAndMonthDayAndWeekDay(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + bymonth=(1, 3), + bymonthday=(1, 3), + byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByYearDay(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=4, + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByYearDayNeg(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=4, + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByMonthAndYearDay(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=4, + bymonth=(4, 7), + byyearday=(1, 100, 200, 365), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByMonthAndYearDayNeg(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=4, + bymonth=(4, 7), + byyearday=(-365, -266, -166, -1), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByWeekNo(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + byweekno=20, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByWeekNoAndWeekDay(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + byweekno=1, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByWeekNoAndWeekDayLarge(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + byweekno=52, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByWeekNoAndWeekDayLast(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + byweekno=-1, + byweekday=SU, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByWeekNoAndWeekDay53(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + byweekno=53, + byweekday=MO, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByEaster(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + byeaster=0, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByEasterPos(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + byeaster=1, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByEasterNeg(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + byeaster=-1, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByHour(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + byhour=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByMinute(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyBySecond(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByHourAndMinute(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByHourAndSecond(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + byhour=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByMinuteAndSecond(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByHourAndMinuteAndSecond(self): + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + byhour=(6, 18), + byminute=(6, 18), + bysecond=(6, 18), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrSecondlyByHourAndMinuteAndSecondBug(self): + # This explores a bug found by Mathieu Bridon. + self._rrulestr_reverse_test(rrule(SECONDLY, + count=3, + bysecond=(0,), + byminute=(1,), + dtstart=datetime(2010, 3, 22, 12, 1))) + + def testToStrWithWkSt(self): + self._rrulestr_reverse_test(rrule(WEEKLY, + count=3, + wkst=SU, + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testToStrLongIntegers(self): + if not PY3: # There is no longs in python3 + self._rrulestr_reverse_test(rrule(MINUTELY, + count=long(2), + interval=long(2), + bymonth=long(2), + byweekday=long(3), + byhour=long(6), + byminute=long(6), + bysecond=long(6), + dtstart=datetime(1997, 9, 2, 9, 0))) + + self._rrulestr_reverse_test(rrule(YEARLY, + count=long(2), + bymonthday=long(5), + byweekno=long(2), + dtstart=datetime(1997, 9, 2, 9, 0))) + + def testReplaceIfSet(self): + rr = rrule(YEARLY, + count=1, + bymonthday=5, + dtstart=datetime(1997, 1, 1)) + newrr = rr.replace(bymonthday=6) + self.assertEqual(list(rr), [datetime(1997, 1, 5)]) + self.assertEqual(list(newrr), + [datetime(1997, 1, 6)]) + + def testReplaceIfNotSet(self): + rr = rrule(YEARLY, + count=1, + dtstart=datetime(1997, 1, 1)) + newrr = rr.replace(bymonthday=6) + self.assertEqual(list(rr), [datetime(1997, 1, 1)]) + self.assertEqual(list(newrr), + [datetime(1997, 1, 6)]) + + +@pytest.mark.rrule +@freeze_time(datetime(2018, 3, 6, 5, 36, tzinfo=tz.UTC)) +def test_generated_aware_dtstart(): + dtstart_exp = datetime(2018, 3, 6, 5, 36, tzinfo=tz.UTC) + UNTIL = datetime(2018, 3, 6, 8, 0, tzinfo=tz.UTC) + + rule_without_dtstart = rrule(freq=HOURLY, until=UNTIL) + rule_with_dtstart = rrule(freq=HOURLY, dtstart=dtstart_exp, until=UNTIL) + assert list(rule_without_dtstart) == list(rule_with_dtstart) + + +@pytest.mark.rrule +@pytest.mark.rrulestr +@pytest.mark.xfail(reason="rrulestr loses time zone, gh issue #637") +@freeze_time(datetime(2018, 3, 6, 5, 36, tzinfo=tz.UTC)) +def test_generated_aware_dtstart_rrulestr(): + rrule_without_dtstart = rrule(freq=HOURLY, + until=datetime(2018, 3, 6, 8, 0, + tzinfo=tz.UTC)) + rrule_r = rrulestr(str(rrule_without_dtstart)) + + assert list(rrule_r) == list(rrule_without_dtstart) + + +@pytest.mark.rruleset +class RRuleSetTest(unittest.TestCase): + def testSet(self): + rrset = rruleset() + rrset.rrule(rrule(YEARLY, count=2, byweekday=TU, + dtstart=datetime(1997, 9, 2, 9, 0))) + rrset.rrule(rrule(YEARLY, count=1, byweekday=TH, + dtstart=datetime(1997, 9, 2, 9, 0))) + self.assertEqual(list(rrset), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 9, 9, 0)]) + + def testSetDate(self): + rrset = rruleset() + rrset.rrule(rrule(YEARLY, count=1, byweekday=TU, + dtstart=datetime(1997, 9, 2, 9, 0))) + rrset.rdate(datetime(1997, 9, 4, 9)) + rrset.rdate(datetime(1997, 9, 9, 9)) + self.assertEqual(list(rrset), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 9, 9, 0)]) + + def testSetExRule(self): + rrset = rruleset() + rrset.rrule(rrule(YEARLY, count=6, byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + rrset.exrule(rrule(YEARLY, count=3, byweekday=TH, + dtstart=datetime(1997, 9, 2, 9, 0))) + self.assertEqual(list(rrset), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 9, 9, 0), + datetime(1997, 9, 16, 9, 0)]) + + def testSetExDate(self): + rrset = rruleset() + rrset.rrule(rrule(YEARLY, count=6, byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + rrset.exdate(datetime(1997, 9, 4, 9)) + rrset.exdate(datetime(1997, 9, 11, 9)) + rrset.exdate(datetime(1997, 9, 18, 9)) + self.assertEqual(list(rrset), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 9, 9, 0), + datetime(1997, 9, 16, 9, 0)]) + + def testSetExDateRevOrder(self): + rrset = rruleset() + rrset.rrule(rrule(MONTHLY, count=5, bymonthday=10, + dtstart=datetime(2004, 1, 1, 9, 0))) + rrset.exdate(datetime(2004, 4, 10, 9, 0)) + rrset.exdate(datetime(2004, 2, 10, 9, 0)) + self.assertEqual(list(rrset), + [datetime(2004, 1, 10, 9, 0), + datetime(2004, 3, 10, 9, 0), + datetime(2004, 5, 10, 9, 0)]) + + def testSetDateAndExDate(self): + rrset = rruleset() + rrset.rdate(datetime(1997, 9, 2, 9)) + rrset.rdate(datetime(1997, 9, 4, 9)) + rrset.rdate(datetime(1997, 9, 9, 9)) + rrset.rdate(datetime(1997, 9, 11, 9)) + rrset.rdate(datetime(1997, 9, 16, 9)) + rrset.rdate(datetime(1997, 9, 18, 9)) + rrset.exdate(datetime(1997, 9, 4, 9)) + rrset.exdate(datetime(1997, 9, 11, 9)) + rrset.exdate(datetime(1997, 9, 18, 9)) + self.assertEqual(list(rrset), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 9, 9, 0), + datetime(1997, 9, 16, 9, 0)]) + + def testSetDateAndExRule(self): + rrset = rruleset() + rrset.rdate(datetime(1997, 9, 2, 9)) + rrset.rdate(datetime(1997, 9, 4, 9)) + rrset.rdate(datetime(1997, 9, 9, 9)) + rrset.rdate(datetime(1997, 9, 11, 9)) + rrset.rdate(datetime(1997, 9, 16, 9)) + rrset.rdate(datetime(1997, 9, 18, 9)) + rrset.exrule(rrule(YEARLY, count=3, byweekday=TH, + dtstart=datetime(1997, 9, 2, 9, 0))) + self.assertEqual(list(rrset), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 9, 9, 0), + datetime(1997, 9, 16, 9, 0)]) + + def testSetCount(self): + rrset = rruleset() + rrset.rrule(rrule(YEARLY, count=6, byweekday=(TU, TH), + dtstart=datetime(1997, 9, 2, 9, 0))) + rrset.exrule(rrule(YEARLY, count=3, byweekday=TH, + dtstart=datetime(1997, 9, 2, 9, 0))) + self.assertEqual(rrset.count(), 3) + + def testSetCachePre(self): + rrset = rruleset() + rrset.rrule(rrule(YEARLY, count=2, byweekday=TU, + dtstart=datetime(1997, 9, 2, 9, 0))) + rrset.rrule(rrule(YEARLY, count=1, byweekday=TH, + dtstart=datetime(1997, 9, 2, 9, 0))) + self.assertEqual(list(rrset), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 9, 9, 0)]) + + def testSetCachePost(self): + rrset = rruleset(cache=True) + rrset.rrule(rrule(YEARLY, count=2, byweekday=TU, + dtstart=datetime(1997, 9, 2, 9, 0))) + rrset.rrule(rrule(YEARLY, count=1, byweekday=TH, + dtstart=datetime(1997, 9, 2, 9, 0))) + for x in rrset: pass + self.assertEqual(list(rrset), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 9, 9, 0)]) + + def testSetCachePostInternal(self): + rrset = rruleset(cache=True) + rrset.rrule(rrule(YEARLY, count=2, byweekday=TU, + dtstart=datetime(1997, 9, 2, 9, 0))) + rrset.rrule(rrule(YEARLY, count=1, byweekday=TH, + dtstart=datetime(1997, 9, 2, 9, 0))) + for x in rrset: pass + self.assertEqual(list(rrset._cache), + [datetime(1997, 9, 2, 9, 0), + datetime(1997, 9, 4, 9, 0), + datetime(1997, 9, 9, 9, 0)]) + + def testSetRRuleCount(self): + # Test that the count is updated when an rrule is added + rrset = rruleset(cache=False) + for cache in (True, False): + rrset = rruleset(cache=cache) + rrset.rrule(rrule(YEARLY, count=2, byweekday=TH, + dtstart=datetime(1983, 4, 1))) + rrset.rrule(rrule(WEEKLY, count=4, byweekday=FR, + dtstart=datetime(1991, 6, 3))) + + # Check the length twice - first one sets a cache, second reads it + self.assertEqual(rrset.count(), 6) + self.assertEqual(rrset.count(), 6) + + # This should invalidate the cache and force an update + rrset.rrule(rrule(MONTHLY, count=3, dtstart=datetime(1994, 1, 3))) + + self.assertEqual(rrset.count(), 9) + self.assertEqual(rrset.count(), 9) + + def testSetRDateCount(self): + # Test that the count is updated when an rdate is added + rrset = rruleset(cache=False) + for cache in (True, False): + rrset = rruleset(cache=cache) + rrset.rrule(rrule(YEARLY, count=2, byweekday=TH, + dtstart=datetime(1983, 4, 1))) + rrset.rrule(rrule(WEEKLY, count=4, byweekday=FR, + dtstart=datetime(1991, 6, 3))) + + # Check the length twice - first one sets a cache, second reads it + self.assertEqual(rrset.count(), 6) + self.assertEqual(rrset.count(), 6) + + # This should invalidate the cache and force an update + rrset.rdate(datetime(1993, 2, 14)) + + self.assertEqual(rrset.count(), 7) + self.assertEqual(rrset.count(), 7) + + def testSetExRuleCount(self): + # Test that the count is updated when an exrule is added + rrset = rruleset(cache=False) + for cache in (True, False): + rrset = rruleset(cache=cache) + rrset.rrule(rrule(YEARLY, count=2, byweekday=TH, + dtstart=datetime(1983, 4, 1))) + rrset.rrule(rrule(WEEKLY, count=4, byweekday=FR, + dtstart=datetime(1991, 6, 3))) + + # Check the length twice - first one sets a cache, second reads it + self.assertEqual(rrset.count(), 6) + self.assertEqual(rrset.count(), 6) + + # This should invalidate the cache and force an update + rrset.exrule(rrule(WEEKLY, count=2, interval=2, + dtstart=datetime(1991, 6, 14))) + + self.assertEqual(rrset.count(), 4) + self.assertEqual(rrset.count(), 4) + + def testSetExDateCount(self): + # Test that the count is updated when an rdate is added + for cache in (True, False): + rrset = rruleset(cache=cache) + rrset.rrule(rrule(YEARLY, count=2, byweekday=TH, + dtstart=datetime(1983, 4, 1))) + rrset.rrule(rrule(WEEKLY, count=4, byweekday=FR, + dtstart=datetime(1991, 6, 3))) + + # Check the length twice - first one sets a cache, second reads it + self.assertEqual(rrset.count(), 6) + self.assertEqual(rrset.count(), 6) + + # This should invalidate the cache and force an update + rrset.exdate(datetime(1991, 6, 28)) + + self.assertEqual(rrset.count(), 5) + self.assertEqual(rrset.count(), 5) + + +class WeekdayTest(unittest.TestCase): + def testInvalidNthWeekday(self): + with self.assertRaises(ValueError): + FR(0) + + def testWeekdayCallable(self): + # Calling a weekday instance generates a new weekday instance with the + # value of n changed. + from dateutil.rrule import weekday + self.assertEqual(MO(1), weekday(0, 1)) + + # Calling a weekday instance with the identical n returns the original + # object + FR_3 = weekday(4, 3) + self.assertIs(FR_3(3), FR_3) + + def testWeekdayEquality(self): + # Two weekday objects are not equal if they have different values for n + self.assertNotEqual(TH, TH(-1)) + self.assertNotEqual(SA(3), SA(2)) + + def testWeekdayEqualitySubclass(self): + # Two weekday objects equal if their "weekday" and "n" attributes are + # available and the same + class BasicWeekday(object): + def __init__(self, weekday): + self.weekday = weekday + + class BasicNWeekday(BasicWeekday): + def __init__(self, weekday, n=None): + super(BasicNWeekday, self).__init__(weekday) + self.n = n + + MO_Basic = BasicWeekday(0) + + self.assertNotEqual(MO, MO_Basic) + self.assertNotEqual(MO(1), MO_Basic) + + TU_BasicN = BasicNWeekday(1) + + self.assertEqual(TU, TU_BasicN) + self.assertNotEqual(TU(3), TU_BasicN) + + WE_Basic3 = BasicNWeekday(2, 3) + self.assertEqual(WE(3), WE_Basic3) + self.assertNotEqual(WE(2), WE_Basic3) + + def testWeekdayReprNoN(self): + no_n_reprs = ('MO', 'TU', 'WE', 'TH', 'FR', 'SA', 'SU') + no_n_wdays = (MO, TU, WE, TH, FR, SA, SU) + + for repstr, wday in zip(no_n_reprs, no_n_wdays): + self.assertEqual(repr(wday), repstr) + + def testWeekdayReprWithN(self): + with_n_reprs = ('WE(+1)', 'TH(-2)', 'SU(+3)') + with_n_wdays = (WE(1), TH(-2), SU(+3)) + + for repstr, wday in zip(with_n_reprs, with_n_wdays): + self.assertEqual(repr(wday), repstr) diff --git a/ext/dateutil/test/test_tz.py b/ext/dateutil/test/test_tz.py new file mode 100644 index 0000000000..54dfb1bd0c --- /dev/null +++ b/ext/dateutil/test/test_tz.py @@ -0,0 +1,2603 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals +from ._common import PicklableMixin +from ._common import TZEnvContext, TZWinContext +from ._common import WarningTestMixin +from ._common import ComparesEqual + +from datetime import datetime, timedelta +from datetime import time as dt_time +from datetime import tzinfo +from six import BytesIO, StringIO +import unittest + +import sys +import base64 +import copy + +from functools import partial + +IS_WIN = sys.platform.startswith('win') + +import pytest + +# dateutil imports +from dateutil.relativedelta import relativedelta, SU, TH +from dateutil.parser import parse +from dateutil import tz as tz +from dateutil import zoneinfo + +try: + from dateutil import tzwin +except ImportError as e: + if IS_WIN: + raise e + else: + pass + +MISSING_TARBALL = ("This test fails if you don't have the dateutil " + "timezone file installed. Please read the README") + +TZFILE_EST5EDT = b""" +VFppZgAAAAAAAAAAAAAAAAAAAAAAAAAEAAAABAAAAAAAAADrAAAABAAAABCeph5wn7rrYKCGAHCh +ms1gomXicKOD6eCkaq5wpTWnYKZTyvCnFYlgqDOs8Kj+peCqE47wqt6H4KvzcPCsvmngrdNS8K6e +S+CvszTwsH4t4LGcUXCyZ0pgs3wzcLRHLGC1XBVwticOYLc793C4BvBguRvZcLnm0mC7BPXwu8a0 +YLzk1/C9r9DgvsS58L+PsuDApJvwwW+U4MKEffDDT3bgxGRf8MUvWODGTXxwxw864MgtXnDI+Fdg +yg1AcMrYOWDLiPBw0iP0cNJg++DTdeTw1EDd4NVVxvDWIL/g1zWo8NgAoeDZFYrw2eCD4Nr+p3Db +wGXg3N6JcN2pgmDevmtw34lkYOCeTXDhaUZg4n4vcONJKGDkXhFw5Vcu4OZHLfDnNxDg6CcP8OkW +8uDqBvHw6vbU4Ovm0/Ds1rbg7ca18O6/02Dvr9Jw8J+1YPGPtHDyf5dg82+WcPRfeWD1T3hw9j9b +YPcvWnD4KHfg+Q88cPoIWeD6+Fjw++g74PzYOvD9yB3g/rgc8P+n/+AAl/7wAYfh4AJ34PADcP5g +BGD9cAVQ4GAGQN9wBzDCYAeNGXAJEKRgCa2U8ArwhmAL4IVwDNmi4A3AZ3AOuYTgD6mD8BCZZuAR +iWXwEnlI4BNpR/AUWSrgFUkp8BY5DOAXKQvwGCIpYBkI7fAaAgtgGvIKcBvh7WAc0exwHcHPYB6x +znAfobFgIHYA8CGBk2AiVeLwI2qv4CQ1xPAlSpHgJhWm8Ccqc+An/sNwKQpV4CnepXAq6jfgK76H +cCzTVGAtnmlwLrM2YC9+S3AwkxhgMWdn8DJy+mAzR0nwNFLcYDUnK/A2Mr5gNwcN8Dgb2uA45u/w +Ofu84DrG0fA7257gPK/ucD27gOA+j9BwP5ti4EBvsnBBhH9gQk+UcENkYWBEL3ZwRURDYEYPWHBH +JCVgR/h08EkEB2BJ2FbwSuPpYEu4OPBMzQXgTZga8E6s5+BPd/zwUIzJ4FFhGXBSbKvgU0D7cFRM +jeBVIN1wVixv4FcAv3BYFYxgWOChcFn1bmBawINwW9VQYFypn/BdtTJgXomB8F+VFGBgaWPwYX4w +4GJJRfBjXhLgZCkn8GU99OBmEkRwZx3W4GfyJnBo/bjgadIIcGrdmuBrsepwbMa3YG2RzHBupplg +b3GucHCGe2BxWsrwcmZdYHM6rPB0Rj9gdRqO8HYvW+B2+nDweA894HjaUvB57x/gero08HvPAeB8 +o1Fwfa7j4H6DM3B/jsXgAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQAB +AAEAAQABAgMBAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQAB +AAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEA +AQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQAB +AAEAAQABAAEAAQABAAEAAQABAAEAAf//x8ABAP//ubAABP//x8ABCP//x8ABDEVEVABFU1QARVdU +AEVQVAAAAAABAAAAAQ== +""" + +EUROPE_HELSINKI = b""" +VFppZgAAAAAAAAAAAAAAAAAAAAAAAAAFAAAABQAAAAAAAAB1AAAABQAAAA2kc28Yy85RYMy/hdAV +I+uQFhPckBcDzZAX876QGOOvkBnToJAaw5GQG7y9EBysrhAdnJ8QHoyQEB98gRAgbHIQIVxjECJM +VBAjPEUQJCw2ECUcJxAmDBgQJwVDkCf1NJAo5SWQKdUWkCrFB5ArtPiQLKTpkC2U2pAuhMuQL3S8 +kDBkrZAxXdkQMnK0EDM9uxA0UpYQNR2dEDYyeBA2/X8QOBuUkDjdYRA5+3aQOr1DEDvbWJA8pl+Q +Pbs6kD6GQZA/mxyQQGYjkEGEORBCRgWQQ2QbEEQl55BFQ/0QRgXJkEcj3xBH7uYQSQPBEEnOyBBK +46MQS66qEEzMv5BNjowQTqyhkE9ubhBQjIOQUVeKkFJsZZBTN2yQVExHkFUXTpBWLCmQVvcwkFgV +RhBY1xKQWfUoEFq29JBb1QoQXKAREF207BBef/MQX5TOEGBf1RBhfeqQYj+3EGNdzJBkH5kQZT2u +kGYItZBnHZCQZ+iXkGj9cpBpyHmQat1UkGuoW5BsxnEQbYg9kG6mUxBvaB+QcIY1EHFRPBByZhcQ +czEeEHRF+RB1EQAQdi8VkHbw4hB4DveQeNDEEHnu2ZB6sKYQe867kHyZwpB9rp2QfnmkkH+Of5AC +AQIDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQD +BAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAME +AwQAABdoAAAAACowAQQAABwgAAkAACowAQQAABwgAAlITVQARUVTVABFRVQAAAAAAQEAAAABAQ== +""" + +NEW_YORK = b""" +VFppZgAAAAAAAAAAAAAAAAAAAAAAAAAEAAAABAAAABcAAADrAAAABAAAABCeph5wn7rrYKCGAHCh +ms1gomXicKOD6eCkaq5wpTWnYKZTyvCnFYlgqDOs8Kj+peCqE47wqt6H4KvzcPCsvmngrdNS8K6e +S+CvszTwsH4t4LGcUXCyZ0pgs3wzcLRHLGC1XBVwticOYLc793C4BvBguRvZcLnm0mC7BPXwu8a0 +YLzk1/C9r9DgvsS58L+PsuDApJvwwW+U4MKEffDDT3bgxGRf8MUvWODGTXxwxw864MgtXnDI+Fdg +yg1AcMrYOWDLiPBw0iP0cNJg++DTdeTw1EDd4NVVxvDWIL/g1zWo8NgAoeDZFYrw2eCD4Nr+p3Db +wGXg3N6JcN2pgmDevmtw34lkYOCeTXDhaUZg4n4vcONJKGDkXhFw5Vcu4OZHLfDnNxDg6CcP8OkW +8uDqBvHw6vbU4Ovm0/Ds1rbg7ca18O6/02Dvr9Jw8J+1YPGPtHDyf5dg82+WcPRfeWD1T3hw9j9b +YPcvWnD4KHfg+Q88cPoIWeD6+Fjw++g74PzYOvD9yB3g/rgc8P+n/+AAl/7wAYfh4AJ34PADcP5g +BGD9cAVQ4GEGQN9yBzDCYgeNGXMJEKRjCa2U9ArwhmQL4IV1DNmi5Q3AZ3YOuYTmD6mD9xCZZucR +iWX4EnlI6BNpR/kUWSrpFUkp+RY5DOoXKQv6GCIpaxkI7fsaAgtsGvIKfBvh7Wwc0ex8HcHPbR6x +zn0fobFtIHYA/SGBk20iVeL+I2qv7iQ1xP4lSpHuJhWm/ycqc+8n/sOAKQpV8CnepYAq6jfxK76H +gSzTVHItnmmCLrM2cy9+S4MwkxhzMWdoBDJy+nQzR0oENFLcdTUnLAU2Mr51NwcOBjgb2vY45vAG +Ofu89jrG0gY72572PK/uhj27gPY+j9CGP5ti9kBvsoZBhH92Qk+UhkNkYXZEL3aHRURDd0XzqQdH +LV/3R9OLB0kNQfdJs20HSu0j90uciYdM1kB3TXxrh062IndPXE2HUJYEd1E8L4dSdeZ3UxwRh1RV +yHdU+/OHVjWqd1blEAdYHsb3WMTyB1n+qPdapNQHW96K91yEtgddvmz3XmSYB1+eTvdgTbSHYYdr +d2ItlodjZ013ZA14h2VHL3dl7VqHZycRd2fNPIdpBvN3aa0eh2rm1XdrljsHbM/x9212HQdur9P3 +b1X/B3CPtfdxNeEHcm+X93MVwwd0T3n3dP7fh3Y4lnd23sGHeBh4d3i+o4d5+Fp3ep6Fh3vYPHd8 +fmeHfbged35eSYd/mAB3AAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQAB +AAEAAQABAgMBAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQAB +AAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEA +AQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQAB +AAEAAQABAAEAAQABAAEAAQABAAEAAf//x8ABAP//ubAABP//x8ABCP//x8ABDEVEVABFU1QARVdU +AEVQVAAEslgAAAAAAQWk7AEAAAACB4YfggAAAAMJZ1MDAAAABAtIhoQAAAAFDSsLhQAAAAYPDD8G +AAAABxDtcocAAAAIEs6mCAAAAAkVn8qJAAAACheA/goAAAALGWIxiwAAAAwdJeoMAAAADSHa5Q0A +AAAOJZ6djgAAAA8nf9EPAAAAECpQ9ZAAAAARLDIpEQAAABIuE1ySAAAAEzDnJBMAAAAUM7hIlAAA +ABU2jBAVAAAAFkO3G5YAAAAXAAAAAQAAAAE= +""" + +TZICAL_EST5EDT = """ +BEGIN:VTIMEZONE +TZID:US-Eastern +LAST-MODIFIED:19870101T000000Z +TZURL:http://zones.stds_r_us.net/tz/US-Eastern +BEGIN:STANDARD +DTSTART:19671029T020000 +RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 +TZOFFSETFROM:-0400 +TZOFFSETTO:-0500 +TZNAME:EST +END:STANDARD +BEGIN:DAYLIGHT +DTSTART:19870405T020000 +RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4 +TZOFFSETFROM:-0500 +TZOFFSETTO:-0400 +TZNAME:EDT +END:DAYLIGHT +END:VTIMEZONE +""" + +TZICAL_PST8PDT = """ +BEGIN:VTIMEZONE +TZID:US-Pacific +LAST-MODIFIED:19870101T000000Z +BEGIN:STANDARD +DTSTART:19671029T020000 +RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 +TZOFFSETFROM:-0700 +TZOFFSETTO:-0800 +TZNAME:PST +END:STANDARD +BEGIN:DAYLIGHT +DTSTART:19870405T020000 +RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4 +TZOFFSETFROM:-0800 +TZOFFSETTO:-0700 +TZNAME:PDT +END:DAYLIGHT +END:VTIMEZONE +""" + +EST_TUPLE = ('EST', timedelta(hours=-5), timedelta(hours=0)) +EDT_TUPLE = ('EDT', timedelta(hours=-4), timedelta(hours=1)) + + +### +# Helper functions +def get_timezone_tuple(dt): + """Retrieve a (tzname, utcoffset, dst) tuple for a given DST""" + return dt.tzname(), dt.utcoffset(), dt.dst() + + +### +# Mix-ins +class context_passthrough(object): + def __init__(*args, **kwargs): + pass + + def __enter__(*args, **kwargs): + pass + + def __exit__(*args, **kwargs): + pass + + +class TzFoldMixin(object): + """ Mix-in class for testing ambiguous times """ + def gettz(self, tzname): + raise NotImplementedError + + def _get_tzname(self, tzname): + return tzname + + def _gettz_context(self, tzname): + return context_passthrough() + + def testFoldPositiveUTCOffset(self): + # Test that we can resolve ambiguous times + tzname = self._get_tzname('Australia/Sydney') + + with self._gettz_context(tzname): + SYD = self.gettz(tzname) + + t0_u = datetime(2012, 3, 31, 15, 30, tzinfo=tz.tzutc()) # AEST + t1_u = datetime(2012, 3, 31, 16, 30, tzinfo=tz.tzutc()) # AEDT + + t0_syd0 = t0_u.astimezone(SYD) + t1_syd1 = t1_u.astimezone(SYD) + + self.assertEqual(t0_syd0.replace(tzinfo=None), + datetime(2012, 4, 1, 2, 30)) + + self.assertEqual(t1_syd1.replace(tzinfo=None), + datetime(2012, 4, 1, 2, 30)) + + self.assertEqual(t0_syd0.utcoffset(), timedelta(hours=11)) + self.assertEqual(t1_syd1.utcoffset(), timedelta(hours=10)) + + def testGapPositiveUTCOffset(self): + # Test that we don't have a problem around gaps. + tzname = self._get_tzname('Australia/Sydney') + + with self._gettz_context(tzname): + SYD = self.gettz(tzname) + + t0_u = datetime(2012, 10, 6, 15, 30, tzinfo=tz.tzutc()) # AEST + t1_u = datetime(2012, 10, 6, 16, 30, tzinfo=tz.tzutc()) # AEDT + + t0 = t0_u.astimezone(SYD) + t1 = t1_u.astimezone(SYD) + + self.assertEqual(t0.replace(tzinfo=None), + datetime(2012, 10, 7, 1, 30)) + + self.assertEqual(t1.replace(tzinfo=None), + datetime(2012, 10, 7, 3, 30)) + + self.assertEqual(t0.utcoffset(), timedelta(hours=10)) + self.assertEqual(t1.utcoffset(), timedelta(hours=11)) + + def testFoldNegativeUTCOffset(self): + # Test that we can resolve ambiguous times + tzname = self._get_tzname('America/Toronto') + + with self._gettz_context(tzname): + TOR = self.gettz(tzname) + + t0_u = datetime(2011, 11, 6, 5, 30, tzinfo=tz.tzutc()) + t1_u = datetime(2011, 11, 6, 6, 30, tzinfo=tz.tzutc()) + + t0_tor = t0_u.astimezone(TOR) + t1_tor = t1_u.astimezone(TOR) + + self.assertEqual(t0_tor.replace(tzinfo=None), + datetime(2011, 11, 6, 1, 30)) + + self.assertEqual(t1_tor.replace(tzinfo=None), + datetime(2011, 11, 6, 1, 30)) + + self.assertNotEqual(t0_tor.tzname(), t1_tor.tzname()) + self.assertEqual(t0_tor.utcoffset(), timedelta(hours=-4.0)) + self.assertEqual(t1_tor.utcoffset(), timedelta(hours=-5.0)) + + def testGapNegativeUTCOffset(self): + # Test that we don't have a problem around gaps. + tzname = self._get_tzname('America/Toronto') + + with self._gettz_context(tzname): + TOR = self.gettz(tzname) + + t0_u = datetime(2011, 3, 13, 6, 30, tzinfo=tz.tzutc()) + t1_u = datetime(2011, 3, 13, 7, 30, tzinfo=tz.tzutc()) + + t0 = t0_u.astimezone(TOR) + t1 = t1_u.astimezone(TOR) + + self.assertEqual(t0.replace(tzinfo=None), + datetime(2011, 3, 13, 1, 30)) + + self.assertEqual(t1.replace(tzinfo=None), + datetime(2011, 3, 13, 3, 30)) + + self.assertNotEqual(t0, t1) + self.assertEqual(t0.utcoffset(), timedelta(hours=-5.0)) + self.assertEqual(t1.utcoffset(), timedelta(hours=-4.0)) + + def testFoldLondon(self): + tzname = self._get_tzname('Europe/London') + + with self._gettz_context(tzname): + LON = self.gettz(tzname) + UTC = tz.tzutc() + + t0_u = datetime(2013, 10, 27, 0, 30, tzinfo=UTC) # BST + t1_u = datetime(2013, 10, 27, 1, 30, tzinfo=UTC) # GMT + + t0 = t0_u.astimezone(LON) + t1 = t1_u.astimezone(LON) + + self.assertEqual(t0.replace(tzinfo=None), + datetime(2013, 10, 27, 1, 30)) + + self.assertEqual(t1.replace(tzinfo=None), + datetime(2013, 10, 27, 1, 30)) + + self.assertEqual(t0.utcoffset(), timedelta(hours=1)) + self.assertEqual(t1.utcoffset(), timedelta(hours=0)) + + def testFoldIndependence(self): + tzname = self._get_tzname('America/New_York') + + with self._gettz_context(tzname): + NYC = self.gettz(tzname) + UTC = tz.tzutc() + hour = timedelta(hours=1) + + # Firmly 2015-11-01 0:30 EDT-4 + pre_dst = datetime(2015, 11, 1, 0, 30, tzinfo=NYC) + + # Ambiguous between 2015-11-01 1:30 EDT-4 and 2015-11-01 1:30 EST-5 + in_dst = pre_dst + hour + in_dst_tzname_0 = in_dst.tzname() # Stash the tzname - EDT + + # Doing the arithmetic in UTC creates a date that is unambiguously + # 2015-11-01 1:30 EDT-5 + in_dst_via_utc = (pre_dst.astimezone(UTC) + 2*hour).astimezone(NYC) + + # Make sure the dates are actually ambiguous + self.assertEqual(in_dst, in_dst_via_utc) + + # Make sure we got the right folding behavior + self.assertNotEqual(in_dst_via_utc.tzname(), in_dst_tzname_0) + + # Now check to make sure in_dst's tzname hasn't changed + self.assertEqual(in_dst_tzname_0, in_dst.tzname()) + + def testInZoneFoldEquality(self): + # Two datetimes in the same zone are considered to be equal if their + # wall times are equal, even if they have different absolute times. + + tzname = self._get_tzname('America/New_York') + + with self._gettz_context(tzname): + NYC = self.gettz(tzname) + UTC = tz.tzutc() + + dt0 = datetime(2011, 11, 6, 1, 30, tzinfo=NYC) + dt1 = tz.enfold(dt0, fold=1) + + # Make sure these actually represent different times + self.assertNotEqual(dt0.astimezone(UTC), dt1.astimezone(UTC)) + + # Test that they compare equal + self.assertEqual(dt0, dt1) + + def _test_ambiguous_time(self, dt, tzid, ambiguous): + # This is a test to check that the individual is_ambiguous values + # on the _tzinfo subclasses work. + tzname = self._get_tzname(tzid) + + with self._gettz_context(tzname): + tzi = self.gettz(tzname) + + self.assertEqual(tz.datetime_ambiguous(dt, tz=tzi), ambiguous) + + def testAmbiguousNegativeUTCOffset(self): + self._test_ambiguous_time(datetime(2015, 11, 1, 1, 30), + 'America/New_York', True) + + def testAmbiguousPositiveUTCOffset(self): + self._test_ambiguous_time(datetime(2012, 4, 1, 2, 30), + 'Australia/Sydney', True) + + def testUnambiguousNegativeUTCOffset(self): + self._test_ambiguous_time(datetime(2015, 11, 1, 2, 30), + 'America/New_York', False) + + def testUnambiguousPositiveUTCOffset(self): + self._test_ambiguous_time(datetime(2012, 4, 1, 3, 30), + 'Australia/Sydney', False) + + def testUnambiguousGapNegativeUTCOffset(self): + # Imaginary time + self._test_ambiguous_time(datetime(2011, 3, 13, 2, 30), + 'America/New_York', False) + + def testUnambiguousGapPositiveUTCOffset(self): + # Imaginary time + self._test_ambiguous_time(datetime(2012, 10, 7, 2, 30), + 'Australia/Sydney', False) + + def _test_imaginary_time(self, dt, tzid, exists): + tzname = self._get_tzname(tzid) + with self._gettz_context(tzname): + tzi = self.gettz(tzname) + + self.assertEqual(tz.datetime_exists(dt, tz=tzi), exists) + + def testImaginaryNegativeUTCOffset(self): + self._test_imaginary_time(datetime(2011, 3, 13, 2, 30), + 'America/New_York', False) + + def testNotImaginaryNegativeUTCOffset(self): + self._test_imaginary_time(datetime(2011, 3, 13, 1, 30), + 'America/New_York', True) + + def testImaginaryPositiveUTCOffset(self): + self._test_imaginary_time(datetime(2012, 10, 7, 2, 30), + 'Australia/Sydney', False) + + def testNotImaginaryPositiveUTCOffset(self): + self._test_imaginary_time(datetime(2012, 10, 7, 1, 30), + 'Australia/Sydney', True) + + def testNotImaginaryFoldNegativeUTCOffset(self): + self._test_imaginary_time(datetime(2015, 11, 1, 1, 30), + 'America/New_York', True) + + def testNotImaginaryFoldPositiveUTCOffset(self): + self._test_imaginary_time(datetime(2012, 4, 1, 3, 30), + 'Australia/Sydney', True) + + @unittest.skip("Known failure in Python 3.6.") + def testEqualAmbiguousComparison(self): + tzname = self._get_tzname('Australia/Sydney') + + with self._gettz_context(tzname): + SYD0 = self.gettz(tzname) + SYD1 = self.gettz(tzname) + + t0_u = datetime(2012, 3, 31, 14, 30, tzinfo=tz.tzutc()) # AEST + + t0_syd0 = t0_u.astimezone(SYD0) + t0_syd1 = t0_u.astimezone(SYD1) + + # This is considered an "inter-zone comparison" because it's an + # ambiguous datetime. + self.assertEqual(t0_syd0, t0_syd1) + + +class TzWinFoldMixin(object): + def get_args(self, tzname): + return (tzname, ) + + class context(object): + def __init__(*args, **kwargs): + pass + + def __enter__(*args, **kwargs): + pass + + def __exit__(*args, **kwargs): + pass + + def get_utc_transitions(self, tzi, year, gap): + dston, dstoff = tzi.transitions(year) + if gap: + t_n = dston - timedelta(minutes=30) + + t0_u = t_n.replace(tzinfo=tzi).astimezone(tz.tzutc()) + t1_u = t0_u + timedelta(hours=1) + else: + # Get 1 hour before the first ambiguous date + t_n = dstoff - timedelta(minutes=30) + + t0_u = t_n.replace(tzinfo=tzi).astimezone(tz.tzutc()) + t_n += timedelta(hours=1) # Naive ambiguous date + t0_u = t0_u + timedelta(hours=1) # First ambiguous date + t1_u = t0_u + timedelta(hours=1) # Second ambiguous date + + return t_n, t0_u, t1_u + + def testFoldPositiveUTCOffset(self): + # Test that we can resolve ambiguous times + tzname = 'AUS Eastern Standard Time' + args = self.get_args(tzname) + + with self.context(tzname): + # Calling fromutc() alters the tzfile object + SYD = self.tzclass(*args) + + # Get the transition time in UTC from the object, because + # Windows doesn't store historical info + t_n, t0_u, t1_u = self.get_utc_transitions(SYD, 2012, False) + + # Using fresh tzfiles + t0_syd = t0_u.astimezone(SYD) + t1_syd = t1_u.astimezone(SYD) + + self.assertEqual(t0_syd.replace(tzinfo=None), t_n) + + self.assertEqual(t1_syd.replace(tzinfo=None), t_n) + + self.assertEqual(t0_syd.utcoffset(), timedelta(hours=11)) + self.assertEqual(t1_syd.utcoffset(), timedelta(hours=10)) + self.assertNotEqual(t0_syd.tzname(), t1_syd.tzname()) + + def testGapPositiveUTCOffset(self): + # Test that we don't have a problem around gaps. + tzname = 'AUS Eastern Standard Time' + args = self.get_args(tzname) + + with self.context(tzname): + SYD = self.tzclass(*args) + + t_n, t0_u, t1_u = self.get_utc_transitions(SYD, 2012, True) + + t0 = t0_u.astimezone(SYD) + t1 = t1_u.astimezone(SYD) + + self.assertEqual(t0.replace(tzinfo=None), t_n) + + self.assertEqual(t1.replace(tzinfo=None), t_n + timedelta(hours=2)) + + self.assertEqual(t0.utcoffset(), timedelta(hours=10)) + self.assertEqual(t1.utcoffset(), timedelta(hours=11)) + + def testFoldNegativeUTCOffset(self): + # Test that we can resolve ambiguous times + tzname = 'Eastern Standard Time' + args = self.get_args(tzname) + + with self.context(tzname): + TOR = self.tzclass(*args) + + t_n, t0_u, t1_u = self.get_utc_transitions(TOR, 2011, False) + + t0_tor = t0_u.astimezone(TOR) + t1_tor = t1_u.astimezone(TOR) + + self.assertEqual(t0_tor.replace(tzinfo=None), t_n) + self.assertEqual(t1_tor.replace(tzinfo=None), t_n) + + self.assertNotEqual(t0_tor.tzname(), t1_tor.tzname()) + self.assertEqual(t0_tor.utcoffset(), timedelta(hours=-4.0)) + self.assertEqual(t1_tor.utcoffset(), timedelta(hours=-5.0)) + + def testGapNegativeUTCOffset(self): + # Test that we don't have a problem around gaps. + tzname = 'Eastern Standard Time' + args = self.get_args(tzname) + + with self.context(tzname): + TOR = self.tzclass(*args) + + t_n, t0_u, t1_u = self.get_utc_transitions(TOR, 2011, True) + + t0 = t0_u.astimezone(TOR) + t1 = t1_u.astimezone(TOR) + + self.assertEqual(t0.replace(tzinfo=None), + t_n) + + self.assertEqual(t1.replace(tzinfo=None), + t_n + timedelta(hours=2)) + + self.assertNotEqual(t0.tzname(), t1.tzname()) + self.assertEqual(t0.utcoffset(), timedelta(hours=-5.0)) + self.assertEqual(t1.utcoffset(), timedelta(hours=-4.0)) + + def testFoldIndependence(self): + tzname = 'Eastern Standard Time' + args = self.get_args(tzname) + + with self.context(tzname): + NYC = self.tzclass(*args) + UTC = tz.tzutc() + hour = timedelta(hours=1) + + # Firmly 2015-11-01 0:30 EDT-4 + t_n, t0_u, t1_u = self.get_utc_transitions(NYC, 2015, False) + + pre_dst = (t_n - hour).replace(tzinfo=NYC) + + # Currently, there's no way around the fact that this resolves to an + # ambiguous date, which defaults to EST. I'm not hard-coding in the + # answer, though, because the preferred behavior would be that this + # results in a time on the EDT side. + + # Ambiguous between 2015-11-01 1:30 EDT-4 and 2015-11-01 1:30 EST-5 + in_dst = pre_dst + hour + in_dst_tzname_0 = in_dst.tzname() # Stash the tzname - EDT + + # Doing the arithmetic in UTC creates a date that is unambiguously + # 2015-11-01 1:30 EDT-5 + in_dst_via_utc = (pre_dst.astimezone(UTC) + 2*hour).astimezone(NYC) + + # Make sure we got the right folding behavior + self.assertNotEqual(in_dst_via_utc.tzname(), in_dst_tzname_0) + + # Now check to make sure in_dst's tzname hasn't changed + self.assertEqual(in_dst_tzname_0, in_dst.tzname()) + + def testInZoneFoldEquality(self): + # Two datetimes in the same zone are considered to be equal if their + # wall times are equal, even if they have different absolute times. + tzname = 'Eastern Standard Time' + args = self.get_args(tzname) + + with self.context(tzname): + NYC = self.tzclass(*args) + UTC = tz.tzutc() + + t_n, t0_u, t1_u = self.get_utc_transitions(NYC, 2011, False) + + dt0 = t_n.replace(tzinfo=NYC) + dt1 = tz.enfold(dt0, fold=1) + + # Make sure these actually represent different times + self.assertNotEqual(dt0.astimezone(UTC), dt1.astimezone(UTC)) + + # Test that they compare equal + self.assertEqual(dt0, dt1) + +### +# Test Cases +class TzUTCTest(unittest.TestCase): + def testSingleton(self): + UTC_0 = tz.tzutc() + UTC_1 = tz.tzutc() + + self.assertIs(UTC_0, UTC_1) + + def testOffset(self): + ct = datetime(2009, 4, 1, 12, 11, 13, tzinfo=tz.tzutc()) + + self.assertEqual(ct.utcoffset(), timedelta(seconds=0)) + + def testDst(self): + ct = datetime(2009, 4, 1, 12, 11, 13, tzinfo=tz.tzutc()) + + self.assertEqual(ct.dst(), timedelta(seconds=0)) + + def testTzName(self): + ct = datetime(2009, 4, 1, 12, 11, 13, tzinfo=tz.tzutc()) + self.assertEqual(ct.tzname(), 'UTC') + + def testEquality(self): + UTC0 = tz.tzutc() + UTC1 = tz.tzutc() + + self.assertEqual(UTC0, UTC1) + + def testInequality(self): + UTC = tz.tzutc() + UTCp4 = tz.tzoffset('UTC+4', 14400) + + self.assertNotEqual(UTC, UTCp4) + + def testInequalityInteger(self): + self.assertFalse(tz.tzutc() == 7) + self.assertNotEqual(tz.tzutc(), 7) + + def testInequalityUnsupported(self): + self.assertEqual(tz.tzutc(), ComparesEqual) + + def testRepr(self): + UTC = tz.tzutc() + self.assertEqual(repr(UTC), 'tzutc()') + + def testTimeOnlyUTC(self): + # https://github.com/dateutil/dateutil/issues/132 + # tzutc doesn't care + tz_utc = tz.tzutc() + self.assertEqual(dt_time(13, 20, tzinfo=tz_utc).utcoffset(), + timedelta(0)) + + def testAmbiguity(self): + # Pick an arbitrary datetime, this should always return False. + dt = datetime(2011, 9, 1, 2, 30, tzinfo=tz.tzutc()) + + self.assertFalse(tz.datetime_ambiguous(dt)) + + +@pytest.mark.tzoffset +class TzOffsetTest(unittest.TestCase): + def testTimedeltaOffset(self): + est = tz.tzoffset('EST', timedelta(hours=-5)) + est_s = tz.tzoffset('EST', -18000) + + self.assertEqual(est, est_s) + + def testTzNameNone(self): + gmt5 = tz.tzoffset(None, -18000) # -5:00 + self.assertIs(datetime(2003, 10, 26, 0, 0, tzinfo=gmt5).tzname(), + None) + + def testTimeOnlyOffset(self): + # tzoffset doesn't care + tz_offset = tz.tzoffset('+3', 3600) + self.assertEqual(dt_time(13, 20, tzinfo=tz_offset).utcoffset(), + timedelta(seconds=3600)) + + def testTzOffsetRepr(self): + tname = 'EST' + tzo = tz.tzoffset(tname, -5 * 3600) + self.assertEqual(repr(tzo), "tzoffset(" + repr(tname) + ", -18000)") + + def testEquality(self): + utc = tz.tzoffset('UTC', 0) + gmt = tz.tzoffset('GMT', 0) + + self.assertEqual(utc, gmt) + + def testUTCEquality(self): + utc = tz.tzutc() + o_utc = tz.tzoffset('UTC', 0) + + self.assertEqual(utc, o_utc) + self.assertEqual(o_utc, utc) + + def testInequalityInvalid(self): + tzo = tz.tzoffset('-3', -3 * 3600) + self.assertFalse(tzo == -3) + self.assertNotEqual(tzo, -3) + + def testInequalityUnsupported(self): + tzo = tz.tzoffset('-5', -5 * 3600) + + self.assertTrue(tzo == ComparesEqual) + self.assertFalse(tzo != ComparesEqual) + self.assertEqual(tzo, ComparesEqual) + + def testAmbiguity(self): + # Pick an arbitrary datetime, this should always return False. + dt = datetime(2011, 9, 1, 2, 30, tzinfo=tz.tzoffset("EST", -5 * 3600)) + + self.assertFalse(tz.datetime_ambiguous(dt)) + + def testTzOffsetInstance(self): + tz1 = tz.tzoffset.instance('EST', timedelta(hours=-5)) + tz2 = tz.tzoffset.instance('EST', timedelta(hours=-5)) + + assert tz1 is not tz2 + + def testTzOffsetSingletonDifferent(self): + tz1 = tz.tzoffset('EST', timedelta(hours=-5)) + tz2 = tz.tzoffset('EST', -18000) + + assert tz1 is tz2 + +@pytest.mark.tzoffset +@pytest.mark.parametrize('args', [ + ('UTC', 0), + ('EST', -18000), + ('EST', timedelta(hours=-5)), + (None, timedelta(hours=3)), +]) +def test_tzoffset_singleton(args): + tz1 = tz.tzoffset(*args) + tz2 = tz.tzoffset(*args) + + assert tz1 is tz2 + +@pytest.mark.tzlocal +class TzLocalTest(unittest.TestCase): + def testEquality(self): + tz1 = tz.tzlocal() + tz2 = tz.tzlocal() + + # Explicitly calling == and != here to ensure the operators work + self.assertTrue(tz1 == tz2) + self.assertFalse(tz1 != tz2) + + def testInequalityFixedOffset(self): + tzl = tz.tzlocal() + tzos = tz.tzoffset('LST', tzl._std_offset.total_seconds()) + tzod = tz.tzoffset('LDT', tzl._std_offset.total_seconds()) + + self.assertFalse(tzl == tzos) + self.assertFalse(tzl == tzod) + self.assertTrue(tzl != tzos) + self.assertTrue(tzl != tzod) + + def testInequalityInvalid(self): + tzl = tz.tzlocal() + + self.assertTrue(tzl != 1) + self.assertFalse(tzl == 1) + + # TODO: Use some sort of universal local mocking so that it's clear + # that we're expecting tzlocal to *not* be Pacific/Kiritimati + LINT = tz.gettz('Pacific/Kiritimati') + self.assertTrue(tzl != LINT) + self.assertFalse(tzl == LINT) + + def testInequalityUnsupported(self): + tzl = tz.tzlocal() + + self.assertTrue(tzl == ComparesEqual) + self.assertFalse(tzl != ComparesEqual) + + def testRepr(self): + tzl = tz.tzlocal() + + self.assertEqual(repr(tzl), 'tzlocal()') + + +@pytest.mark.parametrize('args,kwargs', [ + (('EST', -18000), {}), + (('EST', timedelta(hours=-5)), {}), + (('EST',), {'offset': -18000}), + (('EST',), {'offset': timedelta(hours=-5)}), + (tuple(), {'name': 'EST', 'offset': -18000}) +]) +def test_tzoffset_is(args, kwargs): + tz_ref = tz.tzoffset('EST', -18000) + assert tz.tzoffset(*args, **kwargs) is tz_ref + + +def test_tzoffset_is_not(): + assert tz.tzoffset('EDT', -14400) is not tz.tzoffset('EST', -18000) + + +@pytest.mark.tzlocal +@unittest.skipIf(IS_WIN, "requires Unix") +@unittest.skipUnless(TZEnvContext.tz_change_allowed(), + TZEnvContext.tz_change_disallowed_message()) +class TzLocalNixTest(unittest.TestCase, TzFoldMixin): + # This is a set of tests for `tzlocal()` on *nix systems + + # POSIX string indicating change to summer time on the 2nd Sunday in March + # at 2AM, and ending the 1st Sunday in November at 2AM. (valid >= 2007) + TZ_EST = 'EST+5EDT,M3.2.0/2,M11.1.0/2' + + # POSIX string for AEST/AEDT (valid >= 2008) + TZ_AEST = 'AEST-10AEDT,M10.1.0/2,M4.1.0/3' + + # POSIX string for BST/GMT + TZ_LON = 'GMT0BST,M3.5.0,M10.5.0' + + # POSIX string for UTC + UTC = 'UTC' + + def gettz(self, tzname): + # Actual time zone changes are handled by the _gettz_context function + return tz.tzlocal() + + def _gettz_context(self, tzname): + tzname_map = {'Australia/Sydney': self.TZ_AEST, + 'America/Toronto': self.TZ_EST, + 'America/New_York': self.TZ_EST, + 'Europe/London': self.TZ_LON} + + return TZEnvContext(tzname_map.get(tzname, tzname)) + + def _testTzFunc(self, tzval, func, std_val, dst_val): + """ + This generates tests about how the behavior of a function ``func`` + changes between STD and DST (e.g. utcoffset, tzname, dst). + + It assume that DST starts the 2nd Sunday in March and ends the 1st + Sunday in November + """ + with TZEnvContext(tzval): + dt1 = datetime(2015, 2, 1, 12, 0, tzinfo=tz.tzlocal()) # STD + dt2 = datetime(2015, 5, 1, 12, 0, tzinfo=tz.tzlocal()) # DST + + self.assertEqual(func(dt1), std_val) + self.assertEqual(func(dt2), dst_val) + + def _testTzName(self, tzval, std_name, dst_name): + func = datetime.tzname + + self._testTzFunc(tzval, func, std_name, dst_name) + + def testTzNameDST(self): + # Test tzname in a zone with DST + self._testTzName(self.TZ_EST, 'EST', 'EDT') + + def testTzNameUTC(self): + # Test tzname in a zone without DST + self._testTzName(self.UTC, 'UTC', 'UTC') + + def _testOffset(self, tzval, std_off, dst_off): + func = datetime.utcoffset + + self._testTzFunc(tzval, func, std_off, dst_off) + + def testOffsetDST(self): + self._testOffset(self.TZ_EST, timedelta(hours=-5), timedelta(hours=-4)) + + def testOffsetUTC(self): + self._testOffset(self.UTC, timedelta(0), timedelta(0)) + + def _testDST(self, tzval, dst_dst): + func = datetime.dst + std_dst = timedelta(0) + + self._testTzFunc(tzval, func, std_dst, dst_dst) + + def testDSTDST(self): + self._testDST(self.TZ_EST, timedelta(hours=1)) + + def testDSTUTC(self): + self._testDST(self.UTC, timedelta(0)) + + def testTimeOnlyOffsetLocalUTC(self): + with TZEnvContext(self.UTC): + self.assertEqual(dt_time(13, 20, tzinfo=tz.tzlocal()).utcoffset(), + timedelta(0)) + + def testTimeOnlyOffsetLocalDST(self): + with TZEnvContext(self.TZ_EST): + self.assertIs(dt_time(13, 20, tzinfo=tz.tzlocal()).utcoffset(), + None) + + def testTimeOnlyDSTLocalUTC(self): + with TZEnvContext(self.UTC): + self.assertEqual(dt_time(13, 20, tzinfo=tz.tzlocal()).dst(), + timedelta(0)) + + def testTimeOnlyDSTLocalDST(self): + with TZEnvContext(self.TZ_EST): + self.assertIs(dt_time(13, 20, tzinfo=tz.tzlocal()).dst(), + None) + + def testUTCEquality(self): + with TZEnvContext(self.UTC): + assert tz.tzlocal() == tz.tzutc() + + +# TODO: Maybe a better hack than this? +def mark_tzlocal_nix(f): + marks = [ + pytest.mark.tzlocal, + pytest.mark.skipif(IS_WIN, reason='requires Unix'), + pytest.mark.skipif(not TZEnvContext.tz_change_allowed, + reason=TZEnvContext.tz_change_disallowed_message()) + ] + + for mark in reversed(marks): + f = mark(f) + + return f + + +@mark_tzlocal_nix +@pytest.mark.parametrize('tzvar', ['UTC', 'GMT0', 'UTC0']) +def test_tzlocal_utc_equal(tzvar): + with TZEnvContext(tzvar): + assert tz.tzlocal() == tz.UTC + + +@mark_tzlocal_nix +@pytest.mark.parametrize('tzvar', [ + 'Europe/London', 'America/New_York', + 'GMT0BST', 'EST5EDT']) +def test_tzlocal_utc_unequal(tzvar): + with TZEnvContext(tzvar): + assert tz.tzlocal() != tz.UTC + + +@mark_tzlocal_nix +def test_tzlocal_local_time_trim_colon(): + with TZEnvContext(':/etc/localtime'): + assert tz.gettz() is not None + + +@mark_tzlocal_nix +@pytest.mark.parametrize('tzvar, tzoff', [ + ('EST5', tz.tzoffset('EST', -18000)), + ('GMT', tz.tzoffset('GMT', 0)), + ('YAKT-9', tz.tzoffset('YAKT', timedelta(hours=9))), + ('JST-9', tz.tzoffset('JST', timedelta(hours=9))), +]) +def test_tzlocal_offset_equal(tzvar, tzoff): + with TZEnvContext(tzvar): + # Including both to test both __eq__ and __ne__ + assert tz.tzlocal() == tzoff + assert not (tz.tzlocal() != tzoff) + + +@mark_tzlocal_nix +@pytest.mark.parametrize('tzvar, tzoff', [ + ('EST5EDT', tz.tzoffset('EST', -18000)), + ('GMT0BST', tz.tzoffset('GMT', 0)), + ('EST5', tz.tzoffset('EST', -14400)), + ('YAKT-9', tz.tzoffset('JST', timedelta(hours=9))), + ('JST-9', tz.tzoffset('YAKT', timedelta(hours=9))), +]) +def test_tzlocal_offset_unequal(tzvar, tzoff): + with TZEnvContext(tzvar): + # Including both to test both __eq__ and __ne__ + assert tz.tzlocal() != tzoff + assert not (tz.tzlocal() == tzoff) + + +@pytest.mark.gettz +class GettzTest(unittest.TestCase, TzFoldMixin): + gettz = staticmethod(tz.gettz) + + def testGettz(self): + # bug 892569 + str(self.gettz('UTC')) + + def testGetTzEquality(self): + self.assertEqual(self.gettz('UTC'), self.gettz('UTC')) + + def testTimeOnlyGettz(self): + # gettz returns None + tz_get = self.gettz('Europe/Minsk') + self.assertIs(dt_time(13, 20, tzinfo=tz_get).utcoffset(), None) + + def testTimeOnlyGettzDST(self): + # gettz returns None + tz_get = self.gettz('Europe/Minsk') + self.assertIs(dt_time(13, 20, tzinfo=tz_get).dst(), None) + + def testTimeOnlyGettzTzName(self): + tz_get = self.gettz('Europe/Minsk') + self.assertIs(dt_time(13, 20, tzinfo=tz_get).tzname(), None) + + def testTimeOnlyFormatZ(self): + tz_get = self.gettz('Europe/Minsk') + t = dt_time(13, 20, tzinfo=tz_get) + + self.assertEqual(t.strftime('%H%M%Z'), '1320') + + def testPortugalDST(self): + # In 1996, Portugal changed from CET to WET + PORTUGAL = self.gettz('Portugal') + + t_cet = datetime(1996, 3, 31, 1, 59, tzinfo=PORTUGAL) + + self.assertEqual(t_cet.tzname(), 'CET') + self.assertEqual(t_cet.utcoffset(), timedelta(hours=1)) + self.assertEqual(t_cet.dst(), timedelta(0)) + + t_west = datetime(1996, 3, 31, 2, 1, tzinfo=PORTUGAL) + + self.assertEqual(t_west.tzname(), 'WEST') + self.assertEqual(t_west.utcoffset(), timedelta(hours=1)) + self.assertEqual(t_west.dst(), timedelta(hours=1)) + + def testGettzCacheTzFile(self): + NYC1 = tz.gettz('America/New_York') + NYC2 = tz.gettz('America/New_York') + + assert NYC1 is NYC2 + + def testGettzCacheTzLocal(self): + local1 = tz.gettz() + local2 = tz.gettz() + + assert local1 is not local2 + +@pytest.mark.gettz +@pytest.mark.xfail(IS_WIN, reason='zoneinfo separately cached') +def test_gettz_cache_clear(): + NYC1 = tz.gettz('America/New_York') + tz.gettz.cache_clear() + + NYC2 = tz.gettz('America/New_York') + + assert NYC1 is not NYC2 + + +class ZoneInfoGettzTest(GettzTest, WarningTestMixin): + def gettz(self, name): + zoneinfo_file = zoneinfo.get_zonefile_instance() + return zoneinfo_file.get(name) + + def testZoneInfoFileStart1(self): + tz = self.gettz("EST5EDT") + self.assertEqual(datetime(2003, 4, 6, 1, 59, tzinfo=tz).tzname(), "EST", + MISSING_TARBALL) + self.assertEqual(datetime(2003, 4, 6, 2, 00, tzinfo=tz).tzname(), "EDT") + + def testZoneInfoFileEnd1(self): + tzc = self.gettz("EST5EDT") + self.assertEqual(datetime(2003, 10, 26, 0, 59, tzinfo=tzc).tzname(), + "EDT", MISSING_TARBALL) + + end_est = tz.enfold(datetime(2003, 10, 26, 1, 00, tzinfo=tzc), fold=1) + self.assertEqual(end_est.tzname(), "EST") + + def testZoneInfoOffsetSignal(self): + utc = self.gettz("UTC") + nyc = self.gettz("America/New_York") + self.assertNotEqual(utc, None, MISSING_TARBALL) + self.assertNotEqual(nyc, None) + t0 = datetime(2007, 11, 4, 0, 30, tzinfo=nyc) + t1 = t0.astimezone(utc) + t2 = t1.astimezone(nyc) + self.assertEqual(t0, t2) + self.assertEqual(nyc.dst(t0), timedelta(hours=1)) + + def testZoneInfoCopy(self): + # copy.copy() called on a ZoneInfo file was returning the same instance + CHI = self.gettz('America/Chicago') + CHI_COPY = copy.copy(CHI) + + self.assertIsNot(CHI, CHI_COPY) + self.assertEqual(CHI, CHI_COPY) + + def testZoneInfoDeepCopy(self): + CHI = self.gettz('America/Chicago') + CHI_COPY = copy.deepcopy(CHI) + + self.assertIsNot(CHI, CHI_COPY) + self.assertEqual(CHI, CHI_COPY) + + def testZoneInfoInstanceCaching(self): + zif_0 = zoneinfo.get_zonefile_instance() + zif_1 = zoneinfo.get_zonefile_instance() + + self.assertIs(zif_0, zif_1) + + def testZoneInfoNewInstance(self): + zif_0 = zoneinfo.get_zonefile_instance() + zif_1 = zoneinfo.get_zonefile_instance(new_instance=True) + zif_2 = zoneinfo.get_zonefile_instance() + + self.assertIsNot(zif_0, zif_1) + self.assertIs(zif_1, zif_2) + + def testZoneInfoDeprecated(self): + with self.assertWarns(DeprecationWarning): + zoneinfo.gettz('US/Eastern') + + def testZoneInfoMetadataDeprecated(self): + with self.assertWarns(DeprecationWarning): + zoneinfo.gettz_db_metadata() + + +class TZRangeTest(unittest.TestCase, TzFoldMixin): + TZ_EST = tz.tzrange('EST', timedelta(hours=-5), + 'EDT', timedelta(hours=-4), + start=relativedelta(month=3, day=1, hour=2, + weekday=SU(+2)), + end=relativedelta(month=11, day=1, hour=1, + weekday=SU(+1))) + + TZ_AEST = tz.tzrange('AEST', timedelta(hours=10), + 'AEDT', timedelta(hours=11), + start=relativedelta(month=10, day=1, hour=2, + weekday=SU(+1)), + end=relativedelta(month=4, day=1, hour=2, + weekday=SU(+1))) + + TZ_LON = tz.tzrange('GMT', timedelta(hours=0), + 'BST', timedelta(hours=1), + start=relativedelta(month=3, day=31, weekday=SU(-1), + hours=2), + end=relativedelta(month=10, day=31, weekday=SU(-1), + hours=1)) + # POSIX string for UTC + UTC = 'UTC' + + def gettz(self, tzname): + tzname_map = {'Australia/Sydney': self.TZ_AEST, + 'America/Toronto': self.TZ_EST, + 'America/New_York': self.TZ_EST, + 'Europe/London': self.TZ_LON} + + return tzname_map[tzname] + + def testRangeCmp1(self): + self.assertEqual(tz.tzstr("EST5EDT"), + tz.tzrange("EST", -18000, "EDT", -14400, + relativedelta(hours=+2, + month=4, day=1, + weekday=SU(+1)), + relativedelta(hours=+1, + month=10, day=31, + weekday=SU(-1)))) + + def testRangeCmp2(self): + self.assertEqual(tz.tzstr("EST5EDT"), + tz.tzrange("EST", -18000, "EDT")) + + def testRangeOffsets(self): + TZR = tz.tzrange('EST', -18000, 'EDT', -14400, + start=relativedelta(hours=2, month=4, day=1, + weekday=SU(+2)), + end=relativedelta(hours=1, month=10, day=31, + weekday=SU(-1))) + + dt_std = datetime(2014, 4, 11, 12, 0, tzinfo=TZR) # STD + dt_dst = datetime(2016, 4, 11, 12, 0, tzinfo=TZR) # DST + + dst_zero = timedelta(0) + dst_hour = timedelta(hours=1) + + std_offset = timedelta(hours=-5) + dst_offset = timedelta(hours=-4) + + # Check dst() + self.assertEqual(dt_std.dst(), dst_zero) + self.assertEqual(dt_dst.dst(), dst_hour) + + # Check utcoffset() + self.assertEqual(dt_std.utcoffset(), std_offset) + self.assertEqual(dt_dst.utcoffset(), dst_offset) + + # Check tzname + self.assertEqual(dt_std.tzname(), 'EST') + self.assertEqual(dt_dst.tzname(), 'EDT') + + def testTimeOnlyRangeFixed(self): + # This is a fixed-offset zone, so tzrange allows this + tz_range = tz.tzrange('dflt', stdoffset=timedelta(hours=-3)) + self.assertEqual(dt_time(13, 20, tzinfo=tz_range).utcoffset(), + timedelta(hours=-3)) + + def testTimeOnlyRange(self): + # tzrange returns None because this zone has DST + tz_range = tz.tzrange('EST', timedelta(hours=-5), + 'EDT', timedelta(hours=-4)) + self.assertIs(dt_time(13, 20, tzinfo=tz_range).utcoffset(), None) + + def testBrokenIsDstHandling(self): + # tzrange._isdst() was using a date() rather than a datetime(). + # Issue reported by Lennart Regebro. + dt = datetime(2007, 8, 6, 4, 10, tzinfo=tz.tzutc()) + self.assertEqual(dt.astimezone(tz=tz.gettz("GMT+2")), + datetime(2007, 8, 6, 6, 10, tzinfo=tz.tzstr("GMT+2"))) + + def testRangeTimeDelta(self): + # Test that tzrange can be specified with a timedelta instead of an int. + EST5EDT_td = tz.tzrange('EST', timedelta(hours=-5), + 'EDT', timedelta(hours=-4)) + + EST5EDT_sec = tz.tzrange('EST', -18000, + 'EDT', -14400) + + self.assertEqual(EST5EDT_td, EST5EDT_sec) + + def testRangeEquality(self): + TZR1 = tz.tzrange('EST', -18000, 'EDT', -14400) + + # Standard abbreviation different + TZR2 = tz.tzrange('ET', -18000, 'EDT', -14400) + self.assertNotEqual(TZR1, TZR2) + + # DST abbreviation different + TZR3 = tz.tzrange('EST', -18000, 'EMT', -14400) + self.assertNotEqual(TZR1, TZR3) + + # STD offset different + TZR4 = tz.tzrange('EST', -14000, 'EDT', -14400) + self.assertNotEqual(TZR1, TZR4) + + # DST offset different + TZR5 = tz.tzrange('EST', -18000, 'EDT', -18000) + self.assertNotEqual(TZR1, TZR5) + + # Start delta different + TZR6 = tz.tzrange('EST', -18000, 'EDT', -14400, + start=relativedelta(hours=+1, month=3, + day=1, weekday=SU(+2))) + self.assertNotEqual(TZR1, TZR6) + + # End delta different + TZR7 = tz.tzrange('EST', -18000, 'EDT', -14400, + end=relativedelta(hours=+1, month=11, + day=1, weekday=SU(+2))) + self.assertNotEqual(TZR1, TZR7) + + def testRangeInequalityUnsupported(self): + TZR = tz.tzrange('EST', -18000, 'EDT', -14400) + + self.assertFalse(TZR == 4) + self.assertTrue(TZR == ComparesEqual) + self.assertFalse(TZR != ComparesEqual) + + +@pytest.mark.tzstr +class TZStrTest(unittest.TestCase, TzFoldMixin): + # POSIX string indicating change to summer time on the 2nd Sunday in March + # at 2AM, and ending the 1st Sunday in November at 2AM. (valid >= 2007) + TZ_EST = 'EST+5EDT,M3.2.0/2,M11.1.0/2' + + # POSIX string for AEST/AEDT (valid >= 2008) + TZ_AEST = 'AEST-10AEDT,M10.1.0/2,M4.1.0/3' + + # POSIX string for GMT/BST + TZ_LON = 'GMT0BST,M3.5.0,M10.5.0' + + def gettz(self, tzname): + # Actual time zone changes are handled by the _gettz_context function + tzname_map = {'Australia/Sydney': self.TZ_AEST, + 'America/Toronto': self.TZ_EST, + 'America/New_York': self.TZ_EST, + 'Europe/London': self.TZ_LON} + + return tz.tzstr(tzname_map[tzname]) + + def testStrStr(self): + # Test that tz.tzstr() won't throw an error if given a str instead + # of a unicode literal. + self.assertEqual(datetime(2003, 4, 6, 1, 59, + tzinfo=tz.tzstr(str("EST5EDT"))).tzname(), "EST") + self.assertEqual(datetime(2003, 4, 6, 2, 00, + tzinfo=tz.tzstr(str("EST5EDT"))).tzname(), "EDT") + + def testStrInequality(self): + TZS1 = tz.tzstr('EST5EDT4') + + # Standard abbreviation different + TZS2 = tz.tzstr('ET5EDT4') + self.assertNotEqual(TZS1, TZS2) + + # DST abbreviation different + TZS3 = tz.tzstr('EST5EMT') + self.assertNotEqual(TZS1, TZS3) + + # STD offset different + TZS4 = tz.tzstr('EST4EDT4') + self.assertNotEqual(TZS1, TZS4) + + # DST offset different + TZS5 = tz.tzstr('EST5EDT3') + self.assertNotEqual(TZS1, TZS5) + + def testStrInequalityStartEnd(self): + TZS1 = tz.tzstr('EST5EDT4') + + # Start delta different + TZS2 = tz.tzstr('EST5EDT4,M4.2.0/02:00:00,M10-5-0/02:00') + self.assertNotEqual(TZS1, TZS2) + + # End delta different + TZS3 = tz.tzstr('EST5EDT4,M4.2.0/02:00:00,M11-5-0/02:00') + self.assertNotEqual(TZS1, TZS3) + + def testPosixOffset(self): + TZ1 = tz.tzstr('UTC-3') + self.assertEqual(datetime(2015, 1, 1, tzinfo=TZ1).utcoffset(), + timedelta(hours=-3)) + + TZ2 = tz.tzstr('UTC-3', posix_offset=True) + self.assertEqual(datetime(2015, 1, 1, tzinfo=TZ2).utcoffset(), + timedelta(hours=+3)) + + def testStrInequalityUnsupported(self): + TZS = tz.tzstr('EST5EDT') + + self.assertFalse(TZS == 4) + self.assertTrue(TZS == ComparesEqual) + self.assertFalse(TZS != ComparesEqual) + + def testTzStrRepr(self): + TZS1 = tz.tzstr('EST5EDT4') + TZS2 = tz.tzstr('EST') + + self.assertEqual(repr(TZS1), "tzstr(" + repr('EST5EDT4') + ")") + self.assertEqual(repr(TZS2), "tzstr(" + repr('EST') + ")") + + def testTzStrFailure(self): + with self.assertRaises(ValueError): + tz.tzstr('InvalidString;439999') + + def testTzStrSingleton(self): + tz1 = tz.tzstr('EST5EDT') + tz2 = tz.tzstr('CST4CST') + tz3 = tz.tzstr('EST5EDT') + + self.assertIsNot(tz1, tz2) + self.assertIs(tz1, tz3) + + def testTzStrSingletonPosix(self): + tz_t1 = tz.tzstr('GMT+3', posix_offset=True) + tz_f1 = tz.tzstr('GMT+3', posix_offset=False) + + tz_t2 = tz.tzstr('GMT+3', posix_offset=True) + tz_f2 = tz.tzstr('GMT+3', posix_offset=False) + + self.assertIs(tz_t1, tz_t2) + self.assertIsNot(tz_t1, tz_f1) + + self.assertIs(tz_f1, tz_f2) + + def testTzStrInstance(self): + tz1 = tz.tzstr('EST5EDT') + tz2 = tz.tzstr.instance('EST5EDT') + tz3 = tz.tzstr.instance('EST5EDT') + + assert tz1 is not tz2 + assert tz2 is not tz3 + + # Ensure that these still are all the same zone + assert tz1 == tz2 == tz3 + +@pytest.mark.tzstr +@pytest.mark.parametrize('tz_str,expected', [ + # From https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html + ('', tz.tzrange(None)), # TODO: Should change this so tz.tzrange('') works + ('EST+5EDT,M3.2.0/2,M11.1.0/12', + tz.tzrange('EST', -18000, 'EDT', -14400, + start=relativedelta(month=3, day=1, weekday=SU(2), hours=2), + end=relativedelta(month=11, day=1, weekday=SU(1), hours=11))), + ('WART4WARST,J1/0,J365/25', # This is DST all year, Western Argentina Summer Time + tz.tzrange('WART', timedelta(hours=-4), 'WARST', + start=relativedelta(month=1, day=1, hours=0), + end=relativedelta(month=12, day=31, days=1))), + ('IST-2IDT,M3.4.4/26,M10.5.0', # Israel Standard / Daylight Time + tz.tzrange('IST', timedelta(hours=2), 'IDT', + start=relativedelta(month=3, day=1, weekday=TH(4), days=1, hours=2), + end=relativedelta(month=10, day=31, weekday=SU(-1), hours=1))), + ('WGT3WGST,M3.5.0/2,M10.5.0/1', + tz.tzrange('WGT', timedelta(hours=-3), 'WGST', + start=relativedelta(month=3, day=31, weekday=SU(-1), hours=2), + end=relativedelta(month=10, day=31, weekday=SU(-1), hours=0))), + + # Different offset specifications + ('WGT0300WGST', + tz.tzrange('WGT', timedelta(hours=-3), 'WGST')), + ('WGT03:00WGST', + tz.tzrange('WGT', timedelta(hours=-3), 'WGST')), + ('AEST-1100AEDT', + tz.tzrange('AEST', timedelta(hours=11), 'AEDT')), + ('AEST-11:00AEDT', + tz.tzrange('AEST', timedelta(hours=11), 'AEDT')), + + # Different time formats + ('EST5EDT,M3.2.0/4:00,M11.1.0/3:00', + tz.tzrange('EST', timedelta(hours=-5), 'EDT', + start=relativedelta(month=3, day=1, weekday=SU(2), hours=4), + end=relativedelta(month=11, day=1, weekday=SU(1), hours=2))), + ('EST5EDT,M3.2.0/04:00,M11.1.0/03:00', + tz.tzrange('EST', timedelta(hours=-5), 'EDT', + start=relativedelta(month=3, day=1, weekday=SU(2), hours=4), + end=relativedelta(month=11, day=1, weekday=SU(1), hours=2))), + ('EST5EDT,M3.2.0/0400,M11.1.0/0300', + tz.tzrange('EST', timedelta(hours=-5), 'EDT', + start=relativedelta(month=3, day=1, weekday=SU(2), hours=4), + end=relativedelta(month=11, day=1, weekday=SU(1), hours=2))), +]) +def test_valid_GNU_tzstr(tz_str, expected): + tzi = tz.tzstr(tz_str) + + assert tzi == expected + + +@pytest.mark.tzstr +@pytest.mark.parametrize('tz_str, expected', [ + ('EST5EDT,5,4,0,7200,11,3,0,7200', + tz.tzrange('EST', timedelta(hours=-5), 'EDT', + start=relativedelta(month=5, day=1, weekday=SU(+4), hours=+2), + end=relativedelta(month=11, day=1, weekday=SU(+3), hours=+1))), + ('EST5EDT,5,-4,0,7200,11,3,0,7200', + tz.tzrange('EST', timedelta(hours=-5), 'EDT', + start=relativedelta(hours=+2, month=5, day=31, weekday=SU(-4)), + end=relativedelta(hours=+1, month=11, day=1, weekday=SU(+3)))), + ('EST5EDT,5,4,0,7200,11,-3,0,7200', + tz.tzrange('EST', timedelta(hours=-5), 'EDT', + start=relativedelta(hours=+2, month=5, day=1, weekday=SU(+4)), + end=relativedelta(hours=+1, month=11, day=31, weekday=SU(-3)))), + ('EST5EDT,5,4,0,7200,11,-3,0,7200,3600', + tz.tzrange('EST', timedelta(hours=-5), 'EDT', + start=relativedelta(hours=+2, month=5, day=1, weekday=SU(+4)), + end=relativedelta(hours=+1, month=11, day=31, weekday=SU(-3)))), + ('EST5EDT,5,4,0,7200,11,-3,0,7200,3600', + tz.tzrange('EST', timedelta(hours=-5), 'EDT', + start=relativedelta(hours=+2, month=5, day=1, weekday=SU(+4)), + end=relativedelta(hours=+1, month=11, day=31, weekday=SU(-3)))), + ('EST5EDT,5,4,0,7200,11,-3,0,7200,-3600', + tz.tzrange('EST', timedelta(hours=-5), 'EDT', timedelta(hours=-6), + start=relativedelta(hours=+2, month=5, day=1, weekday=SU(+4)), + end=relativedelta(hours=+3, month=11, day=31, weekday=SU(-3)))), + ('EST5EDT,5,4,0,7200,11,-3,0,7200,+7200', + tz.tzrange('EST', timedelta(hours=-5), 'EDT', timedelta(hours=-3), + start=relativedelta(hours=+2, month=5, day=1, weekday=SU(+4)), + end=relativedelta(hours=0, month=11, day=31, weekday=SU(-3)))), + ('EST5EDT,5,4,0,7200,11,-3,0,7200,+3600', + tz.tzrange('EST', timedelta(hours=-5), 'EDT', + start=relativedelta(hours=+2, month=5, day=1, weekday=SU(+4)), + end=relativedelta(hours=+1, month=11, day=31, weekday=SU(-3)))), +]) +def test_valid_dateutil_format(tz_str, expected): + # This tests the dateutil-specific format that is used widely in the tests + # and examples. It is unclear where this format originated from. + with pytest.warns(tz.DeprecatedTzFormatWarning): + tzi = tz.tzstr.instance(tz_str) + + assert tzi == expected + + +@pytest.mark.tzstr +@pytest.mark.parametrize('tz_str', [ + 'hdfiughdfuig,dfughdfuigpu87ñ::', + ',dfughdfuigpu87ñ::', + '-1:WART4WARST,J1,J365/25', + 'WART4WARST,J1,J365/-25', + 'IST-2IDT,M3.4.-1/26,M10.5.0', + 'IST-2IDT,M3,2000,1/26,M10,5,0' +]) +def test_invalid_GNU_tzstr(tz_str): + with pytest.raises(ValueError): + tz.tzstr(tz_str) + + +# Different representations of the same default rule set +DEFAULT_TZSTR_RULES_EQUIV_2003 = [ + 'EST5EDT', + 'EST5EDT4,M4.1.0/02:00:00,M10-5-0/02:00', + 'EST5EDT4,95/02:00:00,298/02:00', + 'EST5EDT4,J96/02:00:00,J299/02:00', + 'EST5EDT4,J96/02:00:00,J299/02' +] + + +@pytest.mark.tzstr +@pytest.mark.parametrize('tz_str', DEFAULT_TZSTR_RULES_EQUIV_2003) +def test_tzstr_default_start(tz_str): + tzi = tz.tzstr(tz_str) + dt_std = datetime(2003, 4, 6, 1, 59, tzinfo=tzi) + dt_dst = datetime(2003, 4, 6, 2, 00, tzinfo=tzi) + + assert get_timezone_tuple(dt_std) == EST_TUPLE + assert get_timezone_tuple(dt_dst) == EDT_TUPLE + + +@pytest.mark.tzstr +@pytest.mark.parametrize('tz_str', DEFAULT_TZSTR_RULES_EQUIV_2003) +def test_tzstr_default_end(tz_str): + tzi = tz.tzstr(tz_str) + dt_dst = datetime(2003, 10, 26, 0, 59, tzinfo=tzi) + dt_dst_ambig = datetime(2003, 10, 26, 1, 00, tzinfo=tzi) + dt_std_ambig = tz.enfold(dt_dst_ambig, fold=1) + dt_std = datetime(2003, 10, 26, 2, 00, tzinfo=tzi) + + assert get_timezone_tuple(dt_dst) == EDT_TUPLE + assert get_timezone_tuple(dt_dst_ambig) == EDT_TUPLE + assert get_timezone_tuple(dt_std_ambig) == EST_TUPLE + assert get_timezone_tuple(dt_std) == EST_TUPLE + + +@pytest.mark.tzstr +@pytest.mark.parametrize('tzstr_1', ['EST5EDT', + 'EST5EDT4,M4.1.0/02:00:00,M10-5-0/02:00']) +@pytest.mark.parametrize('tzstr_2', ['EST5EDT', + 'EST5EDT4,M4.1.0/02:00:00,M10-5-0/02:00']) +def test_tzstr_default_cmp(tzstr_1, tzstr_2): + tz1 = tz.tzstr(tzstr_1) + tz2 = tz.tzstr(tzstr_2) + + assert tz1 == tz2 + +class TZICalTest(unittest.TestCase, TzFoldMixin): + def _gettz_str_tuple(self, tzname): + TZ_EST = ( + 'BEGIN:VTIMEZONE', + 'TZID:US-Eastern', + 'BEGIN:STANDARD', + 'DTSTART:19971029T020000', + 'RRULE:FREQ=YEARLY;BYDAY=+1SU;BYMONTH=11', + 'TZOFFSETFROM:-0400', + 'TZOFFSETTO:-0500', + 'TZNAME:EST', + 'END:STANDARD', + 'BEGIN:DAYLIGHT', + 'DTSTART:19980301T020000', + 'RRULE:FREQ=YEARLY;BYDAY=+2SU;BYMONTH=03', + 'TZOFFSETFROM:-0500', + 'TZOFFSETTO:-0400', + 'TZNAME:EDT', + 'END:DAYLIGHT', + 'END:VTIMEZONE' + ) + + TZ_PST = ( + 'BEGIN:VTIMEZONE', + 'TZID:US-Pacific', + 'BEGIN:STANDARD', + 'DTSTART:19971029T020000', + 'RRULE:FREQ=YEARLY;BYDAY=+1SU;BYMONTH=11', + 'TZOFFSETFROM:-0700', + 'TZOFFSETTO:-0800', + 'TZNAME:PST', + 'END:STANDARD', + 'BEGIN:DAYLIGHT', + 'DTSTART:19980301T020000', + 'RRULE:FREQ=YEARLY;BYDAY=+2SU;BYMONTH=03', + 'TZOFFSETFROM:-0800', + 'TZOFFSETTO:-0700', + 'TZNAME:PDT', + 'END:DAYLIGHT', + 'END:VTIMEZONE' + ) + + TZ_AEST = ( + 'BEGIN:VTIMEZONE', + 'TZID:Australia-Sydney', + 'BEGIN:STANDARD', + 'DTSTART:19980301T030000', + 'RRULE:FREQ=YEARLY;BYDAY=+1SU;BYMONTH=04', + 'TZOFFSETFROM:+1100', + 'TZOFFSETTO:+1000', + 'TZNAME:AEST', + 'END:STANDARD', + 'BEGIN:DAYLIGHT', + 'DTSTART:19971029T020000', + 'RRULE:FREQ=YEARLY;BYDAY=+1SU;BYMONTH=10', + 'TZOFFSETFROM:+1000', + 'TZOFFSETTO:+1100', + 'TZNAME:AEDT', + 'END:DAYLIGHT', + 'END:VTIMEZONE' + ) + + TZ_LON = ( + 'BEGIN:VTIMEZONE', + 'TZID:Europe-London', + 'BEGIN:STANDARD', + 'DTSTART:19810301T030000', + 'RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10;BYHOUR=02', + 'TZOFFSETFROM:+0100', + 'TZOFFSETTO:+0000', + 'TZNAME:GMT', + 'END:STANDARD', + 'BEGIN:DAYLIGHT', + 'DTSTART:19961001T030000', + 'RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=03;BYHOUR=01', + 'TZOFFSETFROM:+0000', + 'TZOFFSETTO:+0100', + 'TZNAME:BST', + 'END:DAYLIGHT', + 'END:VTIMEZONE' + ) + + tzname_map = {'Australia/Sydney': TZ_AEST, + 'America/Toronto': TZ_EST, + 'America/New_York': TZ_EST, + 'America/Los_Angeles': TZ_PST, + 'Europe/London': TZ_LON} + + return tzname_map[tzname] + + def _gettz_str(self, tzname): + return '\n'.join(self._gettz_str_tuple(tzname)) + + def _tzstr_dtstart_with_params(self, tzname, param_str): + # Adds parameters to the DTSTART values of a given tzstr + tz_str_tuple = self._gettz_str_tuple(tzname) + + out_tz = [] + for line in tz_str_tuple: + if line.startswith('DTSTART'): + name, value = line.split(':', 1) + line = name + ';' + param_str + ':' + value + + out_tz.append(line) + + return '\n'.join(out_tz) + + def gettz(self, tzname): + tz_str = self._gettz_str(tzname) + + tzc = tz.tzical(StringIO(tz_str)).get() + + return tzc + + def testRepr(self): + instr = StringIO(TZICAL_PST8PDT) + instr.name = 'StringIO(PST8PDT)' + tzc = tz.tzical(instr) + + self.assertEqual(repr(tzc), "tzical(" + repr(instr.name) + ")") + + # Test performance + def _test_us_zone(self, tzc, func, values, start): + if start: + dt1 = datetime(2003, 3, 9, 1, 59) + dt2 = datetime(2003, 3, 9, 2, 00) + fold = [0, 0] + else: + dt1 = datetime(2003, 11, 2, 0, 59) + dt2 = datetime(2003, 11, 2, 1, 00) + fold = [0, 1] + + dts = (tz.enfold(dt.replace(tzinfo=tzc), fold=f) + for dt, f in zip((dt1, dt2), fold)) + + for value, dt in zip(values, dts): + self.assertEqual(func(dt), value) + + def _test_multi_zones(self, tzstrs, tzids, func, values, start): + tzic = tz.tzical(StringIO('\n'.join(tzstrs))) + for tzid, vals in zip(tzids, values): + tzc = tzic.get(tzid) + + self._test_us_zone(tzc, func, vals, start) + + def _prepare_EST(self): + tz_str = self._gettz_str('America/New_York') + return tz.tzical(StringIO(tz_str)).get() + + def _testEST(self, start, test_type, tzc=None): + if tzc is None: + tzc = self._prepare_EST() + + argdict = { + 'name': (datetime.tzname, ('EST', 'EDT')), + 'offset': (datetime.utcoffset, (timedelta(hours=-5), + timedelta(hours=-4))), + 'dst': (datetime.dst, (timedelta(hours=0), + timedelta(hours=1))) + } + + func, values = argdict[test_type] + + if not start: + values = reversed(values) + + self._test_us_zone(tzc, func, values, start=start) + + def testESTStartName(self): + self._testEST(start=True, test_type='name') + + def testESTEndName(self): + self._testEST(start=False, test_type='name') + + def testESTStartOffset(self): + self._testEST(start=True, test_type='offset') + + def testESTEndOffset(self): + self._testEST(start=False, test_type='offset') + + def testESTStartDST(self): + self._testEST(start=True, test_type='dst') + + def testESTEndDST(self): + self._testEST(start=False, test_type='dst') + + def testESTValueDatetime(self): + # Violating one-test-per-test rule because we're not set up to do + # parameterized tests and the manual proliferation is getting a bit + # out of hand. + tz_str = self._tzstr_dtstart_with_params('America/New_York', + 'VALUE=DATE-TIME') + + tzc = tz.tzical(StringIO(tz_str)).get() + + for start in (True, False): + for test_type in ('name', 'offset', 'dst'): + self._testEST(start=start, test_type=test_type, tzc=tzc) + + def _testMultizone(self, start, test_type): + tzstrs = (self._gettz_str('America/New_York'), + self._gettz_str('America/Los_Angeles')) + tzids = ('US-Eastern', 'US-Pacific') + + argdict = { + 'name': (datetime.tzname, (('EST', 'EDT'), + ('PST', 'PDT'))), + 'offset': (datetime.utcoffset, ((timedelta(hours=-5), + timedelta(hours=-4)), + (timedelta(hours=-8), + timedelta(hours=-7)))), + 'dst': (datetime.dst, ((timedelta(hours=0), + timedelta(hours=1)), + (timedelta(hours=0), + timedelta(hours=1)))) + } + + func, values = argdict[test_type] + + if not start: + values = map(reversed, values) + + self._test_multi_zones(tzstrs, tzids, func, values, start) + + def testMultiZoneStartName(self): + self._testMultizone(start=True, test_type='name') + + def testMultiZoneEndName(self): + self._testMultizone(start=False, test_type='name') + + def testMultiZoneStartOffset(self): + self._testMultizone(start=True, test_type='offset') + + def testMultiZoneEndOffset(self): + self._testMultizone(start=False, test_type='offset') + + def testMultiZoneStartDST(self): + self._testMultizone(start=True, test_type='dst') + + def testMultiZoneEndDST(self): + self._testMultizone(start=False, test_type='dst') + + def testMultiZoneKeys(self): + est_str = self._gettz_str('America/New_York') + pst_str = self._gettz_str('America/Los_Angeles') + tzic = tz.tzical(StringIO('\n'.join((est_str, pst_str)))) + + # Sort keys because they are in a random order, being dictionary keys + keys = sorted(tzic.keys()) + + self.assertEqual(keys, ['US-Eastern', 'US-Pacific']) + + # Test error conditions + def testEmptyString(self): + with self.assertRaises(ValueError): + tz.tzical(StringIO("")) + + def testMultiZoneGet(self): + tzic = tz.tzical(StringIO(TZICAL_EST5EDT + TZICAL_PST8PDT)) + + with self.assertRaises(ValueError): + tzic.get() + + def testDtstartDate(self): + tz_str = self._tzstr_dtstart_with_params('America/New_York', + 'VALUE=DATE') + with self.assertRaises(ValueError): + tz.tzical(StringIO(tz_str)) + + def testDtstartTzid(self): + tz_str = self._tzstr_dtstart_with_params('America/New_York', + 'TZID=UTC') + with self.assertRaises(ValueError): + tz.tzical(StringIO(tz_str)) + + def testDtstartBadParam(self): + tz_str = self._tzstr_dtstart_with_params('America/New_York', + 'FOO=BAR') + with self.assertRaises(ValueError): + tz.tzical(StringIO(tz_str)) + + # Test Parsing + def testGap(self): + tzic = tz.tzical(StringIO('\n'.join((TZICAL_EST5EDT, TZICAL_PST8PDT)))) + + keys = sorted(tzic.keys()) + self.assertEqual(keys, ['US-Eastern', 'US-Pacific']) + + +class TZTest(unittest.TestCase): + def testFileStart1(self): + tzc = tz.tzfile(BytesIO(base64.b64decode(TZFILE_EST5EDT))) + self.assertEqual(datetime(2003, 4, 6, 1, 59, tzinfo=tzc).tzname(), "EST") + self.assertEqual(datetime(2003, 4, 6, 2, 00, tzinfo=tzc).tzname(), "EDT") + + def testFileEnd1(self): + tzc = tz.tzfile(BytesIO(base64.b64decode(TZFILE_EST5EDT))) + self.assertEqual(datetime(2003, 10, 26, 0, 59, tzinfo=tzc).tzname(), + "EDT") + end_est = tz.enfold(datetime(2003, 10, 26, 1, 00, tzinfo=tzc)) + self.assertEqual(end_est.tzname(), "EST") + + def testFileLastTransition(self): + # After the last transition, it goes to standard time in perpetuity + tzc = tz.tzfile(BytesIO(base64.b64decode(TZFILE_EST5EDT))) + self.assertEqual(datetime(2037, 10, 25, 0, 59, tzinfo=tzc).tzname(), + "EDT") + + last_date = tz.enfold(datetime(2037, 10, 25, 1, 00, tzinfo=tzc), fold=1) + self.assertEqual(last_date.tzname(), + "EST") + + self.assertEqual(datetime(2038, 5, 25, 12, 0, tzinfo=tzc).tzname(), + "EST") + + def testInvalidFile(self): + # Should throw a ValueError if an invalid file is passed + with self.assertRaises(ValueError): + tz.tzfile(BytesIO(b'BadFile')) + + def testFilestreamWithNameRepr(self): + # If fileobj is a filestream with a "name" attribute this name should + # be reflected in the tz object's repr + fileobj = BytesIO(base64.b64decode(TZFILE_EST5EDT)) + fileobj.name = 'foo' + tzc = tz.tzfile(fileobj) + self.assertEqual(repr(tzc), 'tzfile(' + repr('foo') + ')') + + def testRoundNonFullMinutes(self): + # This timezone has an offset of 5992 seconds in 1900-01-01. + tzc = tz.tzfile(BytesIO(base64.b64decode(EUROPE_HELSINKI))) + self.assertEqual(str(datetime(1900, 1, 1, 0, 0, tzinfo=tzc)), + "1900-01-01 00:00:00+01:40") + + def testLeapCountDecodesProperly(self): + # This timezone has leapcnt, and failed to decode until + # Eugene Oden notified about the issue. + + # As leap information is currently unused (and unstored) by tzfile() we + # can only indirectly test this: Take advantage of tzfile() not closing + # the input file if handed in as an opened file and assert that the + # full file content has been read by tzfile(). Note: For this test to + # work NEW_YORK must be in TZif version 1 format i.e. no more data + # after TZif v1 header + data has been read + fileobj = BytesIO(base64.b64decode(NEW_YORK)) + tz.tzfile(fileobj) + # we expect no remaining file content now, i.e. zero-length; if there's + # still data we haven't read the file format correctly + remaining_tzfile_content = fileobj.read() + self.assertEqual(len(remaining_tzfile_content), 0) + + def testIsStd(self): + # NEW_YORK tzfile contains this isstd information: + isstd_expected = (0, 0, 0, 1) + tzc = tz.tzfile(BytesIO(base64.b64decode(NEW_YORK))) + # gather the actual information as parsed by the tzfile class + isstd = [] + for ttinfo in tzc._ttinfo_list: + # ttinfo objects contain boolean values + isstd.append(int(ttinfo.isstd)) + # ttinfo list may contain more entries than isstd file content + isstd = tuple(isstd[:len(isstd_expected)]) + self.assertEqual( + isstd_expected, isstd, + "isstd UTC/local indicators parsed: %s != tzfile contents: %s" + % (isstd, isstd_expected)) + + def testGMTHasNoDaylight(self): + # tz.tzstr("GMT+2") improperly considered daylight saving time. + # Issue reported by Lennart Regebro. + dt = datetime(2007, 8, 6, 4, 10) + self.assertEqual(tz.gettz("GMT+2").dst(dt), timedelta(0)) + + def testGMTOffset(self): + # GMT and UTC offsets have inverted signal when compared to the + # usual TZ variable handling. + dt = datetime(2007, 8, 6, 4, 10, tzinfo=tz.tzutc()) + self.assertEqual(dt.astimezone(tz=tz.tzstr("GMT+2")), + datetime(2007, 8, 6, 6, 10, tzinfo=tz.tzstr("GMT+2"))) + self.assertEqual(dt.astimezone(tz=tz.gettz("UTC-2")), + datetime(2007, 8, 6, 2, 10, tzinfo=tz.tzstr("UTC-2"))) + + @unittest.skipIf(IS_WIN, "requires Unix") + @unittest.skipUnless(TZEnvContext.tz_change_allowed(), + TZEnvContext.tz_change_disallowed_message()) + def testTZSetDoesntCorrupt(self): + # if we start in non-UTC then tzset UTC make sure parse doesn't get + # confused + with TZEnvContext('UTC'): + # this should parse to UTC timezone not the original timezone + dt = parse('2014-07-20T12:34:56+00:00') + self.assertEqual(str(dt), '2014-07-20 12:34:56+00:00') + + +@unittest.skipUnless(IS_WIN, "Requires Windows") +class TzWinTest(unittest.TestCase, TzWinFoldMixin): + def setUp(self): + self.tzclass = tzwin.tzwin + + def testTzResLoadName(self): + # This may not work right on non-US locales. + tzr = tzwin.tzres() + self.assertEqual(tzr.load_name(112), "Eastern Standard Time") + + def testTzResNameFromString(self): + tzr = tzwin.tzres() + self.assertEqual(tzr.name_from_string('@tzres.dll,-221'), + 'Alaskan Daylight Time') + + self.assertEqual(tzr.name_from_string('Samoa Daylight Time'), + 'Samoa Daylight Time') + + with self.assertRaises(ValueError): + tzr.name_from_string('@tzres.dll,100') + + def testIsdstZoneWithNoDaylightSaving(self): + tz = tzwin.tzwin("UTC") + dt = parse("2013-03-06 19:08:15") + self.assertFalse(tz._isdst(dt)) + + def testOffset(self): + tz = tzwin.tzwin("Cape Verde Standard Time") + self.assertEqual(tz.utcoffset(datetime(1995, 5, 21, 12, 9, 13)), + timedelta(-1, 82800)) + + def testTzwinName(self): + # https://github.com/dateutil/dateutil/issues/143 + tw = tz.tzwin('Eastern Standard Time') + + # Cover the transitions for at least two years. + ESTs = 'Eastern Standard Time' + EDTs = 'Eastern Daylight Time' + transition_dates = [(datetime(2015, 3, 8, 0, 59), ESTs), + (datetime(2015, 3, 8, 3, 1), EDTs), + (datetime(2015, 11, 1, 0, 59), EDTs), + (datetime(2015, 11, 1, 3, 1), ESTs), + (datetime(2016, 3, 13, 0, 59), ESTs), + (datetime(2016, 3, 13, 3, 1), EDTs), + (datetime(2016, 11, 6, 0, 59), EDTs), + (datetime(2016, 11, 6, 3, 1), ESTs)] + + for t_date, expected in transition_dates: + self.assertEqual(t_date.replace(tzinfo=tw).tzname(), expected) + + def testTzwinRepr(self): + tw = tz.tzwin('Yakutsk Standard Time') + self.assertEqual(repr(tw), 'tzwin(' + + repr('Yakutsk Standard Time') + ')') + + def testTzWinEquality(self): + # https://github.com/dateutil/dateutil/issues/151 + tzwin_names = ('Eastern Standard Time', + 'West Pacific Standard Time', + 'Yakutsk Standard Time', + 'Iran Standard Time', + 'UTC') + + for tzwin_name in tzwin_names: + # Get two different instances to compare + tw1 = tz.tzwin(tzwin_name) + tw2 = tz.tzwin(tzwin_name) + + self.assertEqual(tw1, tw2) + + def testTzWinInequality(self): + # https://github.com/dateutil/dateutil/issues/151 + # Note these last two currently differ only in their name. + tzwin_names = (('Eastern Standard Time', 'Yakutsk Standard Time'), + ('Greenwich Standard Time', 'GMT Standard Time'), + ('GMT Standard Time', 'UTC'), + ('E. South America Standard Time', + 'Argentina Standard Time')) + + for tzwn1, tzwn2 in tzwin_names: + # Get two different instances to compare + tw1 = tz.tzwin(tzwn1) + tw2 = tz.tzwin(tzwn2) + + self.assertNotEqual(tw1, tw2) + + def testTzWinEqualityInvalid(self): + # Compare to objects that do not implement comparison with this + # (should default to False) + UTC = tz.tzutc() + EST = tz.tzwin('Eastern Standard Time') + + self.assertFalse(EST == UTC) + self.assertFalse(EST == 1) + self.assertFalse(UTC == EST) + + self.assertTrue(EST != UTC) + self.assertTrue(EST != 1) + + def testTzWinInequalityUnsupported(self): + # Compare it to an object that is promiscuous about equality, but for + # which tzwin does not implement an equality operator. + EST = tz.tzwin('Eastern Standard Time') + self.assertTrue(EST == ComparesEqual) + self.assertFalse(EST != ComparesEqual) + + def testTzwinTimeOnlyDST(self): + # For zones with DST, .dst() should return None + tw_est = tz.tzwin('Eastern Standard Time') + self.assertIs(dt_time(14, 10, tzinfo=tw_est).dst(), None) + + # This zone has no DST, so .dst() can return 0 + tw_sast = tz.tzwin('South Africa Standard Time') + self.assertEqual(dt_time(14, 10, tzinfo=tw_sast).dst(), + timedelta(0)) + + def testTzwinTimeOnlyUTCOffset(self): + # For zones with DST, .utcoffset() should return None + tw_est = tz.tzwin('Eastern Standard Time') + self.assertIs(dt_time(14, 10, tzinfo=tw_est).utcoffset(), None) + + # This zone has no DST, so .utcoffset() returns standard offset + tw_sast = tz.tzwin('South Africa Standard Time') + self.assertEqual(dt_time(14, 10, tzinfo=tw_sast).utcoffset(), + timedelta(hours=2)) + + def testTzwinTimeOnlyTZName(self): + # For zones with DST, the name defaults to standard time + tw_est = tz.tzwin('Eastern Standard Time') + self.assertEqual(dt_time(14, 10, tzinfo=tw_est).tzname(), + 'Eastern Standard Time') + + # For zones with no DST, this should work normally. + tw_sast = tz.tzwin('South Africa Standard Time') + self.assertEqual(dt_time(14, 10, tzinfo=tw_sast).tzname(), + 'South Africa Standard Time') + + +@unittest.skipUnless(IS_WIN, "Requires Windows") +@unittest.skipUnless(TZWinContext.tz_change_allowed(), + TZWinContext.tz_change_disallowed_message()) +class TzWinLocalTest(unittest.TestCase, TzWinFoldMixin): + + def setUp(self): + self.tzclass = tzwin.tzwinlocal + self.context = TZWinContext + + def get_args(self, tzname): + return () + + def testLocal(self): + # Not sure how to pin a local time zone, so for now we're just going + # to run this and make sure it doesn't raise an error + # See Github Issue #135: https://github.com/dateutil/dateutil/issues/135 + datetime.now(tzwin.tzwinlocal()) + + def testTzwinLocalUTCOffset(self): + with TZWinContext('Eastern Standard Time'): + tzwl = tzwin.tzwinlocal() + self.assertEqual(datetime(2014, 3, 11, tzinfo=tzwl).utcoffset(), + timedelta(hours=-4)) + + def testTzwinLocalName(self): + # https://github.com/dateutil/dateutil/issues/143 + ESTs = 'Eastern Standard Time' + EDTs = 'Eastern Daylight Time' + transition_dates = [(datetime(2015, 3, 8, 0, 59), ESTs), + (datetime(2015, 3, 8, 3, 1), EDTs), + (datetime(2015, 11, 1, 0, 59), EDTs), + (datetime(2015, 11, 1, 3, 1), ESTs), + (datetime(2016, 3, 13, 0, 59), ESTs), + (datetime(2016, 3, 13, 3, 1), EDTs), + (datetime(2016, 11, 6, 0, 59), EDTs), + (datetime(2016, 11, 6, 3, 1), ESTs)] + + with TZWinContext('Eastern Standard Time'): + tw = tz.tzwinlocal() + + for t_date, expected in transition_dates: + self.assertEqual(t_date.replace(tzinfo=tw).tzname(), expected) + + def testTzWinLocalRepr(self): + tw = tz.tzwinlocal() + self.assertEqual(repr(tw), 'tzwinlocal()') + + def testTzwinLocalRepr(self): + # https://github.com/dateutil/dateutil/issues/143 + with TZWinContext('Eastern Standard Time'): + tw = tz.tzwinlocal() + + self.assertEqual(str(tw), 'tzwinlocal(' + + repr('Eastern Standard Time') + ')') + + with TZWinContext('Pacific Standard Time'): + tw = tz.tzwinlocal() + + self.assertEqual(str(tw), 'tzwinlocal(' + + repr('Pacific Standard Time') + ')') + + def testTzwinLocalEquality(self): + tw_est = tz.tzwin('Eastern Standard Time') + tw_pst = tz.tzwin('Pacific Standard Time') + + with TZWinContext('Eastern Standard Time'): + twl1 = tz.tzwinlocal() + twl2 = tz.tzwinlocal() + + self.assertEqual(twl1, twl2) + self.assertEqual(twl1, tw_est) + self.assertNotEqual(twl1, tw_pst) + + with TZWinContext('Pacific Standard Time'): + twl1 = tz.tzwinlocal() + twl2 = tz.tzwinlocal() + tw = tz.tzwin('Pacific Standard Time') + + self.assertEqual(twl1, twl2) + self.assertEqual(twl1, tw) + self.assertEqual(twl1, tw_pst) + self.assertNotEqual(twl1, tw_est) + + def testTzwinLocalTimeOnlyDST(self): + # For zones with DST, .dst() should return None + with TZWinContext('Eastern Standard Time'): + twl = tz.tzwinlocal() + self.assertIs(dt_time(14, 10, tzinfo=twl).dst(), None) + + # This zone has no DST, so .dst() can return 0 + with TZWinContext('South Africa Standard Time'): + twl = tz.tzwinlocal() + self.assertEqual(dt_time(14, 10, tzinfo=twl).dst(), timedelta(0)) + + def testTzwinLocalTimeOnlyUTCOffset(self): + # For zones with DST, .utcoffset() should return None + with TZWinContext('Eastern Standard Time'): + twl = tz.tzwinlocal() + self.assertIs(dt_time(14, 10, tzinfo=twl).utcoffset(), None) + + # This zone has no DST, so .utcoffset() returns standard offset + with TZWinContext('South Africa Standard Time'): + twl = tz.tzwinlocal() + self.assertEqual(dt_time(14, 10, tzinfo=twl).utcoffset(), + timedelta(hours=2)) + + def testTzwinLocalTimeOnlyTZName(self): + # For zones with DST, the name defaults to standard time + with TZWinContext('Eastern Standard Time'): + twl = tz.tzwinlocal() + self.assertEqual(dt_time(14, 10, tzinfo=twl).tzname(), + 'Eastern Standard Time') + + # For zones with no DST, this should work normally. + with TZWinContext('South Africa Standard Time'): + twl = tz.tzwinlocal() + self.assertEqual(dt_time(14, 10, tzinfo=twl).tzname(), + 'South Africa Standard Time') + + +class TzPickleTest(PicklableMixin, unittest.TestCase): + _asfile = False + + def setUp(self): + self.assertPicklable = partial(self.assertPicklable, + asfile=self._asfile) + + def testPickleTzUTC(self): + self.assertPicklable(tz.tzutc(), singleton=True) + + def testPickleTzOffsetZero(self): + self.assertPicklable(tz.tzoffset('UTC', 0), singleton=True) + + def testPickleTzOffsetPos(self): + self.assertPicklable(tz.tzoffset('UTC+1', 3600), singleton=True) + + def testPickleTzOffsetNeg(self): + self.assertPicklable(tz.tzoffset('UTC-1', -3600), singleton=True) + + @pytest.mark.tzlocal + def testPickleTzLocal(self): + self.assertPicklable(tz.tzlocal()) + + def testPickleTzFileEST5EDT(self): + tzc = tz.tzfile(BytesIO(base64.b64decode(TZFILE_EST5EDT))) + self.assertPicklable(tzc) + + def testPickleTzFileEurope_Helsinki(self): + tzc = tz.tzfile(BytesIO(base64.b64decode(EUROPE_HELSINKI))) + self.assertPicklable(tzc) + + def testPickleTzFileNew_York(self): + tzc = tz.tzfile(BytesIO(base64.b64decode(NEW_YORK))) + self.assertPicklable(tzc) + + @unittest.skip("Known failure") + def testPickleTzICal(self): + tzc = tz.tzical(StringIO(TZICAL_EST5EDT)).get() + self.assertPicklable(tzc) + + def testPickleTzGettz(self): + self.assertPicklable(tz.gettz('America/New_York')) + + def testPickleZoneFileGettz(self): + zoneinfo_file = zoneinfo.get_zonefile_instance() + tzi = zoneinfo_file.get('America/New_York') + self.assertIsNot(tzi, None) + self.assertPicklable(tzi) + + +class TzPickleFileTest(TzPickleTest): + """ Run all the TzPickleTest tests, using a temporary file """ + _asfile = True + + +class DatetimeAmbiguousTest(unittest.TestCase): + """ Test the datetime_exists / datetime_ambiguous functions """ + + def testNoTzSpecified(self): + with self.assertRaises(ValueError): + tz.datetime_ambiguous(datetime(2016, 4, 1, 2, 9)) + + def _get_no_support_tzinfo_class(self, dt_start, dt_end, dst_only=False): + # Generates a class of tzinfo with no support for is_ambiguous + # where dates between dt_start and dt_end are ambiguous. + + class FoldingTzInfo(tzinfo): + def utcoffset(self, dt): + if not dst_only: + dt_n = dt.replace(tzinfo=None) + + if dt_start <= dt_n < dt_end and getattr(dt_n, 'fold', 0): + return timedelta(hours=-1) + + return timedelta(hours=0) + + def dst(self, dt): + dt_n = dt.replace(tzinfo=None) + + if dt_start <= dt_n < dt_end and getattr(dt_n, 'fold', 0): + return timedelta(hours=1) + else: + return timedelta(0) + + return FoldingTzInfo + + def _get_no_support_tzinfo(self, dt_start, dt_end, dst_only=False): + return self._get_no_support_tzinfo_class(dt_start, dt_end, dst_only)() + + def testNoSupportAmbiguityFoldNaive(self): + dt_start = datetime(2018, 9, 1, 1, 0) + dt_end = datetime(2018, 9, 1, 2, 0) + + tzi = self._get_no_support_tzinfo(dt_start, dt_end) + + self.assertTrue(tz.datetime_ambiguous(datetime(2018, 9, 1, 1, 30), + tz=tzi)) + + def testNoSupportAmbiguityFoldAware(self): + dt_start = datetime(2018, 9, 1, 1, 0) + dt_end = datetime(2018, 9, 1, 2, 0) + + tzi = self._get_no_support_tzinfo(dt_start, dt_end) + + self.assertTrue(tz.datetime_ambiguous(datetime(2018, 9, 1, 1, 30, + tzinfo=tzi))) + + def testNoSupportAmbiguityUnambiguousNaive(self): + dt_start = datetime(2018, 9, 1, 1, 0) + dt_end = datetime(2018, 9, 1, 2, 0) + + tzi = self._get_no_support_tzinfo(dt_start, dt_end) + + self.assertFalse(tz.datetime_ambiguous(datetime(2018, 10, 1, 12, 30), + tz=tzi)) + + def testNoSupportAmbiguityUnambiguousAware(self): + dt_start = datetime(2018, 9, 1, 1, 0) + dt_end = datetime(2018, 9, 1, 2, 0) + + tzi = self._get_no_support_tzinfo(dt_start, dt_end) + + self.assertFalse(tz.datetime_ambiguous(datetime(2018, 10, 1, 12, 30, + tzinfo=tzi))) + + def testNoSupportAmbiguityFoldDSTOnly(self): + dt_start = datetime(2018, 9, 1, 1, 0) + dt_end = datetime(2018, 9, 1, 2, 0) + + tzi = self._get_no_support_tzinfo(dt_start, dt_end, dst_only=True) + + self.assertTrue(tz.datetime_ambiguous(datetime(2018, 9, 1, 1, 30), + tz=tzi)) + + def testNoSupportAmbiguityUnambiguousDSTOnly(self): + dt_start = datetime(2018, 9, 1, 1, 0) + dt_end = datetime(2018, 9, 1, 2, 0) + + tzi = self._get_no_support_tzinfo(dt_start, dt_end, dst_only=True) + + self.assertFalse(tz.datetime_ambiguous(datetime(2018, 10, 1, 12, 30), + tz=tzi)) + + def testSupportAmbiguityFoldNaive(self): + tzi = tz.gettz('US/Eastern') + + dt = datetime(2011, 11, 6, 1, 30) + + self.assertTrue(tz.datetime_ambiguous(dt, tz=tzi)) + + def testSupportAmbiguityFoldAware(self): + tzi = tz.gettz('US/Eastern') + + dt = datetime(2011, 11, 6, 1, 30, tzinfo=tzi) + + self.assertTrue(tz.datetime_ambiguous(dt)) + + def testSupportAmbiguityUnambiguousAware(self): + tzi = tz.gettz('US/Eastern') + + dt = datetime(2011, 11, 6, 4, 30) + + self.assertFalse(tz.datetime_ambiguous(dt, tz=tzi)) + + def testSupportAmbiguityUnambiguousNaive(self): + tzi = tz.gettz('US/Eastern') + + dt = datetime(2011, 11, 6, 4, 30, tzinfo=tzi) + + self.assertFalse(tz.datetime_ambiguous(dt)) + + def _get_ambig_error_tzinfo(self, dt_start, dt_end, dst_only=False): + cTzInfo = self._get_no_support_tzinfo_class(dt_start, dt_end, dst_only) + + # Takes the wrong number of arguments and raises an error anyway. + class FoldTzInfoRaises(cTzInfo): + def is_ambiguous(self, dt, other_arg): + raise NotImplementedError('This is not implemented') + + return FoldTzInfoRaises() + + def testIncompatibleAmbiguityFoldNaive(self): + dt_start = datetime(2018, 9, 1, 1, 0) + dt_end = datetime(2018, 9, 1, 2, 0) + + tzi = self._get_ambig_error_tzinfo(dt_start, dt_end) + + self.assertTrue(tz.datetime_ambiguous(datetime(2018, 9, 1, 1, 30), + tz=tzi)) + + def testIncompatibleAmbiguityFoldAware(self): + dt_start = datetime(2018, 9, 1, 1, 0) + dt_end = datetime(2018, 9, 1, 2, 0) + + tzi = self._get_ambig_error_tzinfo(dt_start, dt_end) + + self.assertTrue(tz.datetime_ambiguous(datetime(2018, 9, 1, 1, 30, + tzinfo=tzi))) + + def testIncompatibleAmbiguityUnambiguousNaive(self): + dt_start = datetime(2018, 9, 1, 1, 0) + dt_end = datetime(2018, 9, 1, 2, 0) + + tzi = self._get_ambig_error_tzinfo(dt_start, dt_end) + + self.assertFalse(tz.datetime_ambiguous(datetime(2018, 10, 1, 12, 30), + tz=tzi)) + + def testIncompatibleAmbiguityUnambiguousAware(self): + dt_start = datetime(2018, 9, 1, 1, 0) + dt_end = datetime(2018, 9, 1, 2, 0) + + tzi = self._get_ambig_error_tzinfo(dt_start, dt_end) + + self.assertFalse(tz.datetime_ambiguous(datetime(2018, 10, 1, 12, 30, + tzinfo=tzi))) + + def testIncompatibleAmbiguityFoldDSTOnly(self): + dt_start = datetime(2018, 9, 1, 1, 0) + dt_end = datetime(2018, 9, 1, 2, 0) + + tzi = self._get_ambig_error_tzinfo(dt_start, dt_end, dst_only=True) + + self.assertTrue(tz.datetime_ambiguous(datetime(2018, 9, 1, 1, 30), + tz=tzi)) + + def testIncompatibleAmbiguityUnambiguousDSTOnly(self): + dt_start = datetime(2018, 9, 1, 1, 0) + dt_end = datetime(2018, 9, 1, 2, 0) + + tzi = self._get_ambig_error_tzinfo(dt_start, dt_end, dst_only=True) + + self.assertFalse(tz.datetime_ambiguous(datetime(2018, 10, 1, 12, 30), + tz=tzi)) + + def testSpecifiedTzOverridesAttached(self): + # If a tz is specified, the datetime will be treated as naive. + + # This is not ambiguous in the local zone + dt = datetime(2011, 11, 6, 1, 30, tzinfo=tz.gettz('Australia/Sydney')) + + self.assertFalse(tz.datetime_ambiguous(dt)) + + tzi = tz.gettz('US/Eastern') + self.assertTrue(tz.datetime_ambiguous(dt, tz=tzi)) + + +class DatetimeExistsTest(unittest.TestCase): + def testNoTzSpecified(self): + with self.assertRaises(ValueError): + tz.datetime_exists(datetime(2016, 4, 1, 2, 9)) + + def testInGapNaive(self): + tzi = tz.gettz('Australia/Sydney') + + dt = datetime(2012, 10, 7, 2, 30) + + self.assertFalse(tz.datetime_exists(dt, tz=tzi)) + + def testInGapAware(self): + tzi = tz.gettz('Australia/Sydney') + + dt = datetime(2012, 10, 7, 2, 30, tzinfo=tzi) + + self.assertFalse(tz.datetime_exists(dt)) + + def testExistsNaive(self): + tzi = tz.gettz('Australia/Sydney') + + dt = datetime(2012, 10, 7, 10, 30) + + self.assertTrue(tz.datetime_exists(dt, tz=tzi)) + + def testExistsAware(self): + tzi = tz.gettz('Australia/Sydney') + + dt = datetime(2012, 10, 7, 10, 30, tzinfo=tzi) + + self.assertTrue(tz.datetime_exists(dt)) + + def testSpecifiedTzOverridesAttached(self): + EST = tz.gettz('US/Eastern') + AEST = tz.gettz('Australia/Sydney') + + dt = datetime(2012, 10, 7, 2, 30, tzinfo=EST) # This time exists + + self.assertFalse(tz.datetime_exists(dt, tz=AEST)) + + +class EnfoldTest(unittest.TestCase): + def testEnterFoldDefault(self): + dt = tz.enfold(datetime(2020, 1, 19, 3, 32)) + + self.assertEqual(dt.fold, 1) + + def testEnterFold(self): + dt = tz.enfold(datetime(2020, 1, 19, 3, 32), fold=1) + + self.assertEqual(dt.fold, 1) + + def testExitFold(self): + dt = tz.enfold(datetime(2020, 1, 19, 3, 32), fold=0) + + # Before Python 3.6, dt.fold won't exist if fold is 0. + self.assertEqual(getattr(dt, 'fold', 0), 0) + + +@pytest.mark.tz_resolve_imaginary +class ImaginaryDateTest(unittest.TestCase): + def testCanberraForward(self): + tzi = tz.gettz('Australia/Canberra') + dt = datetime(2018, 10, 7, 2, 30, tzinfo=tzi) + dt_act = tz.resolve_imaginary(dt) + dt_exp = datetime(2018, 10, 7, 3, 30, tzinfo=tzi) + self.assertEqual(dt_act, dt_exp) + + def testLondonForward(self): + tzi = tz.gettz('Europe/London') + dt = datetime(2018, 3, 25, 1, 30, tzinfo=tzi) + dt_act = tz.resolve_imaginary(dt) + dt_exp = datetime(2018, 3, 25, 2, 30, tzinfo=tzi) + self.assertEqual(dt_act, dt_exp) + + def testKeivForward(self): + tzi = tz.gettz('Europe/Kiev') + dt = datetime(2018, 3, 25, 3, 30, tzinfo=tzi) + dt_act = tz.resolve_imaginary(dt) + dt_exp = datetime(2018, 3, 25, 4, 30, tzinfo=tzi) + self.assertEqual(dt_act, dt_exp) + + +@pytest.mark.tz_resolve_imaginary +@pytest.mark.parametrize('dt', [ + datetime(2017, 11, 5, 1, 30, tzinfo=tz.gettz('America/New_York')), + datetime(2018, 10, 28, 1, 30, tzinfo=tz.gettz('Europe/London')), + datetime(2017, 4, 2, 2, 30, tzinfo=tz.gettz('Australia/Sydney')), +]) +def test_resolve_imaginary_ambiguous(dt): + assert tz.resolve_imaginary(dt) is dt + + dt_f = tz.enfold(dt) + assert dt is not dt_f + assert tz.resolve_imaginary(dt_f) is dt_f + + +@pytest.mark.tz_resolve_imaginary +@pytest.mark.parametrize('dt', [ + datetime(2017, 6, 2, 12, 30, tzinfo=tz.gettz('America/New_York')), + datetime(2018, 4, 2, 9, 30, tzinfo=tz.gettz('Europe/London')), + datetime(2017, 2, 2, 16, 30, tzinfo=tz.gettz('Australia/Sydney')), + datetime(2017, 12, 2, 12, 30, tzinfo=tz.gettz('America/New_York')), + datetime(2018, 12, 2, 9, 30, tzinfo=tz.gettz('Europe/London')), + datetime(2017, 6, 2, 16, 30, tzinfo=tz.gettz('Australia/Sydney')), + datetime(2025, 9, 25, 1, 17, tzinfo=tz.tzutc()), + datetime(2025, 9, 25, 1, 17, tzinfo=tz.tzoffset('EST', -18000)), + datetime(2019, 3, 4, tzinfo=None) +]) +def test_resolve_imaginary_existing(dt): + assert tz.resolve_imaginary(dt) is dt + + +def __get_kiritimati_resolve_imaginary_test(): + # In the 2018d release of the IANA database, the Kiritimati "imaginary day" + # data was corrected, so if the system zoneinfo is older than 2018d, the + # Kiritimati test will fail. + + tzi = tz.gettz('Pacific/Kiritimati') + new_version = False + if not tz.datetime_exists(datetime(1995, 1, 1, 12, 30), tzi): + zif = zoneinfo.get_zonefile_instance() + if zif.metadata is not None: + new_version = zif.metadata['tzversion'] >= '2018d' + + if new_version: + tzi = zif.get('Pacific/Kiritimati') + else: + new_version = True + + if new_version: + dates = (datetime(1994, 12, 31, 12, 30), datetime(1995, 1, 1, 12, 30)) + else: + dates = (datetime(1995, 1, 1, 12, 30), datetime(1995, 1, 2, 12, 30)) + + return (tzi, ) + dates + + +@pytest.mark.tz_resolve_imaginary +@pytest.mark.parametrize('tzi, dt, dt_exp', [ + (tz.gettz('Europe/London'), + datetime(2018, 3, 25, 1, 30), datetime(2018, 3, 25, 2, 30)), + (tz.gettz('America/New_York'), + datetime(2017, 3, 12, 2, 30), datetime(2017, 3, 12, 3, 30)), + (tz.gettz('Australia/Sydney'), + datetime(2014, 10, 5, 2, 0), datetime(2014, 10, 5, 3, 0)), + __get_kiritimati_resolve_imaginary_test(), +]) +def test_resolve_imaginary(tzi, dt, dt_exp): + dt = dt.replace(tzinfo=tzi) + dt_exp = dt_exp.replace(tzinfo=tzi) + + dt_r = tz.resolve_imaginary(dt) + assert dt_r == dt_exp + assert dt_r.tzname() == dt_exp.tzname() + assert dt_r.utcoffset() == dt_exp.utcoffset() + + +@pytest.mark.xfail +@pytest.mark.tz_resolve_imaginary +def test_resolve_imaginary_monrovia(): + # See GH #582 - When that is resolved, move this into test_resolve_imaginary + tzi = tz.gettz('Africa/Monrovia') + dt = datetime(1972, 1, 7, hour=0, minute=30, second=0, tzinfo=tzi) + dt_exp = datetime(1972, 1, 7, hour=1, minute=14, second=30, tzinfo=tzi) + + dt_r = tz.resolve_imaginary(dt) + assert dt_r == dt_exp + assert dt_r.tzname() == dt_exp.tzname() + assert dt_r.utcoffset() == dt_exp.utcoffset() diff --git a/ext/dateutil/test/test_utils.py b/ext/dateutil/test/test_utils.py new file mode 100644 index 0000000000..fcdec1a5a4 --- /dev/null +++ b/ext/dateutil/test/test_utils.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals +from datetime import timedelta, datetime + +import unittest + +from dateutil import tz +from dateutil import utils +from dateutil.utils import within_delta + +from freezegun import freeze_time + +UTC = tz.tzutc() +NYC = tz.gettz("America/New_York") + + +class UtilsTest(unittest.TestCase): + @freeze_time(datetime(2014, 12, 15, 1, 21, 33, 4003)) + def testToday(self): + self.assertEqual(utils.today(), datetime(2014, 12, 15, 0, 0, 0)) + + @freeze_time(datetime(2014, 12, 15, 12), tz_offset=5) + def testTodayTzInfo(self): + self.assertEqual(utils.today(NYC), + datetime(2014, 12, 15, 0, 0, 0, tzinfo=NYC)) + + @freeze_time(datetime(2014, 12, 15, 23), tz_offset=5) + def testTodayTzInfoDifferentDay(self): + self.assertEqual(utils.today(UTC), + datetime(2014, 12, 16, 0, 0, 0, tzinfo=UTC)) + + def testDefaultTZInfoNaive(self): + dt = datetime(2014, 9, 14, 9, 30) + self.assertIs(utils.default_tzinfo(dt, NYC).tzinfo, + NYC) + + def testDefaultTZInfoAware(self): + dt = datetime(2014, 9, 14, 9, 30, tzinfo=UTC) + self.assertIs(utils.default_tzinfo(dt, NYC).tzinfo, + UTC) + + def testWithinDelta(self): + d1 = datetime(2016, 1, 1, 12, 14, 1, 9) + d2 = d1.replace(microsecond=15) + + self.assertTrue(within_delta(d1, d2, timedelta(seconds=1))) + self.assertFalse(within_delta(d1, d2, timedelta(microseconds=1))) + + def testWithinDeltaWithNegativeDelta(self): + d1 = datetime(2016, 1, 1) + d2 = datetime(2015, 12, 31) + + self.assertTrue(within_delta(d2, d1, timedelta(days=-1))) diff --git a/ext/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz b/ext/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz index e86b54fe28..6e8c05efd4 100644 Binary files a/ext/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz and b/ext/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz differ diff --git a/ext/readme.md b/ext/readme.md index add7dfaa68..bd948bf75a 100644 --- a/ext/readme.md +++ b/ext/readme.md @@ -21,11 +21,12 @@ :: | `dirtyjson` | [1.0.7](https://pypi.org/project/dirtyjson/1.0.7/) | **`medusa`** | ext | - :: | `diskcache` | [2.9.0](https://pypi.org/project/diskcache/2.9.0/) | `imdbpie` | ext | - :: | `dogpile.cache` | [0.6.7](https://pypi.org/project/dogpile.cache/0.6.7/) | **`medusa`**, `subliminal` | ext | - +:: | `enum34` | [1.1.6](https://pypi.org/project/enum34/1.1.6/) | **`medusa`** | **ext2** | Module: `enum`
Backport from Python 3.4 :: | `enzyme` | pymedusa/[665cf69](https://github.com/pymedusa/enzyme/tree/665cf6948aab1c249dcc99bd9624a81d17b3302a) | `knowit`, `subliminal` | ext | - :: | `feedparser` | [2b11c80](https://github.com/kurtmckee/feedparser/tree/2b11c8028321ed43cbaf313f83b0c94820143d66) | **`medusa`** | ext | Requires `sgmllib3k` on Python 3 -:: | **`future`**
`_dummy_thread`
`_markupbase`
`_thread`
`builtins`
`copyreg`
`html`
`http`
`libfuturize`
`libpasteurize`
`past`
`queue`
`reprlib`
`socketserver`
`tkinter`
`winreg`
`xmlrpc` | [0.16.0](https://pypi.org/project/future/0.16.0/) | **`medusa`**, `python-twitter`, ????? | **ext2** | - +:: | **`future`**
`_dummy_thread`
`_markupbase`
`_thread`
`builtins`
`copyreg`
`html`
`http`
`libfuturize`
`libpasteurize`
`past`
`queue`
`reprlib`
`socketserver`
`tkinter`
`winreg`
`xmlrpc` | [0.17.1](https://pypi.org/project/future/0.17.1/) | **`medusa`**, `python-twitter`, ????? | **ext2** | - :: | `futures` | [3.2.0](https://pypi.org/project/futures/3.2.0/) | **`medusa`**, `subliminal`, `tornado` | **ext2** | Module: `concurrent.futures`
Markers: `python_version >= '2.6' and python_version < '3'` -:: | `PyGithub` | [1.43.2](https://pypi.org/project/PyGithub/1.43.2/) | **`medusa`** | **ext2 ext3** | Module: `github`
**Removed tests** +:: | `PyGithub` | [1.43.3](https://pypi.org/project/PyGithub/1.43.3/) | **`medusa`** | **ext2 ext3** | Module: `github`
**Removed tests** :: | `gntp` | [1.0.3](https://pypi.org/project/gntp/1.0.3/) | **`medusa`** | ext | - :: | `guessit` | [3.0.3](https://pypi.org/project/guessit/3.0.3/) | **`medusa`**, `subliminal` | ext | - :: | `html5lib` | [1.0.1](https://pypi.org/project/html5lib/1.0.1/) | **`medusa`** (via `beautifulsoup4`) | ext | - @@ -45,24 +46,24 @@ :: | profilehooks.py | [1.10.0](https://pypi.org/project/profilehooks/1.10.0/) | **`medusa`** | ext | - :: | `pyjsparser` | [2.5.2](https://pypi.org/project/pyjsparser/2.5.2/) | `Js2Py` | ext | - :: | `pysrt` | [1.1.0](https://pypi.org/project/pysrt/1.1.0/) | `subliminal` | ext | - -:: | `python-dateutil` | [2.7.3](https://pypi.org/project/python-dateutil/2.7.3/) | **`medusa`**, `tvdbapiv2`, `guessit`, `imdbpie` | ext | Module: `dateutil` -:: | `python-twitter` | [3.4.2](https://pypi.org/project/python-twitter/3.4.2/) | **`medusa`** | ext | Module: `twitter` +:: | `python-dateutil` | [2.7.5](https://pypi.org/project/python-dateutil/2.7.5/) | **`medusa`**, `tvdbapiv2`, `guessit`, `imdbpie` | ext | Module: `dateutil` +:: | `python-twitter` | [3.5](https://pypi.org/project/python-twitter/3.5/) | **`medusa`** | ext | Module: `twitter` :: | `pytz` | [2018.4](https://pypi.org/project/pytz/2018.4/) | `subliminal`, `tzlocal` | ext | - :: | rarfile.py | [3.0](https://pypi.org/project/rarfile/3.0/) | **`medusa`**, `subliminal` | ext | - :: | `rebulk` | [1.0.0](https://pypi.org/project/rebulk/1.0.0/) | **`medusa`**, `guessit` | ext | - -:: | `requests` | [2.20.0](https://pypi.org/project/requests/2.20.0/) | **`medusa`**, `adba`, `pytvmaze`, `simpleanidb`, `tmdbsimple`, `traktor`, `tvdbapiv2`, `boto`, `rtorrent`, `CacheControl`, `cloudflare-scrape`, `subliminal`, `PyGithub`, `python-twitter` | ext | - +:: | `requests` | [2.20.1](https://pypi.org/project/requests/2.20.1/) | **`medusa`**, `adba`, `pytvmaze`, `simpleanidb`, `tmdbsimple`, `traktor`, `tvdbapiv2`, `boto`, `rtorrent`, `CacheControl`, `cloudflare-scrape`, `subliminal`, `PyGithub`, `python-twitter` | ext | - :: | `requests-oauthlib` | [1.0.0](https://pypi.org/project/requests-oauthlib/1.0.0/) | **`medusa`**, `python-twitter` | ext | Module: `requests_oauthlib` :: | sgmllib3k | [1.0.0](https://pypi.org/project/sgmllib3k/1.0.0/) | `feedparser` | **ext3** | File: `sgmllib.py` :: | singledispatch.py
`singledispatch_helpers.py` | [3.4.0.3](https://pypi.org/project/singledispatch/3.4.0.3/) | `tornado` | ext | Markers: `python_version < '3.4'` :: | six.py | [1.11.0](https://pypi.org/project/six/1.11.0/) | **`medusa`**, `tvdbapiv2`, `configobj`, `python-dateutil`, `guessit`, `html5lib`, `imdbpie`, `Js2Py`, `knowit`, `rebulk`, `subliminal`, `validators` | ext | - -:: | `stevedore` | [1.29.0](https://pypi.org/project/stevedore/1.29.0/) | `subliminal` | ext | - +:: | `stevedore` | [1.30.0](https://pypi.org/project/stevedore/1.30.0/) | `subliminal` | ext | - :: | `subliminal` | pymedusa/[78687f4](https://github.com/pymedusa/subliminal/tree/78687f45d23b1bc47fae0a5493be0198dc1fd5b5) | **`medusa`** | ext | - :: | `tornado` | [5.1.1](https://pypi.org/project/tornado/5.1.1/) | **`medusa`**, `tornroutes` | ext | - :: | `tornroutes` | [0.5.1](https://pypi.org/project/tornroutes/0.5.1/) | **`medusa`** | ext | - :: | `trans` | [2.1.0](https://pypi.org/project/trans/2.1.0/) | `imdbpie` | ext | - :: | `tzlocal` | [1.5.1](https://pypi.org/project/tzlocal/1.5.1/) | `Js2Py` | ext | - :: | `urllib3` | [1.23](https://pypi.org/project/urllib3/1.23/) | `requests`, `CacheControl` | ext | - -:: | `validators` | [0.12.2](https://pypi.org/project/validators/0.12.2/) | **`medusa`** | ext | - +:: | `validators` | [0.12.3](https://pypi.org/project/validators/0.12.3/) | **`medusa`** | ext | - :: | `webencodings` | [0.5.1](https://pypi.org/project/webencodings/0.5.1/) | `html5lib` | ext | - :: | `wrapt` | [1.10.11](https://pypi.org/project/wrapt/1.10.11/) | `deprecated` | ext | - :: | `PyYAML` | [3.13](https://pypi.org/project/PyYAML/3.13/) | `knowit` | **ext2 ext3** | Module: `yaml` diff --git a/ext/requests/__version__.py b/ext/requests/__version__.py index be8a45fe0e..803773a0fd 100644 --- a/ext/requests/__version__.py +++ b/ext/requests/__version__.py @@ -5,8 +5,8 @@ __title__ = 'requests' __description__ = 'Python HTTP for Humans.' __url__ = 'http://python-requests.org' -__version__ = '2.20.0' -__build__ = 0x022000 +__version__ = '2.20.1' +__build__ = 0x022001 __author__ = 'Kenneth Reitz' __author_email__ = 'me@kennethreitz.org' __license__ = 'Apache 2.0' diff --git a/ext/requests/sessions.py b/ext/requests/sessions.py index a448bd83f2..d73d700fa6 100644 --- a/ext/requests/sessions.py +++ b/ext/requests/sessions.py @@ -19,7 +19,7 @@ from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT from .hooks import default_hooks, dispatch_hook from ._internal_utils import to_native_string -from .utils import to_key_val_list, default_headers +from .utils import to_key_val_list, default_headers, DEFAULT_PORTS from .exceptions import ( TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) @@ -128,8 +128,17 @@ def should_strip_auth(self, old_url, new_url): if (old_parsed.scheme == 'http' and old_parsed.port in (80, None) and new_parsed.scheme == 'https' and new_parsed.port in (443, None)): return False + + # Handle default port usage corresponding to scheme. + changed_port = old_parsed.port != new_parsed.port + changed_scheme = old_parsed.scheme != new_parsed.scheme + default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None) + if (not changed_scheme and old_parsed.port in default_port + and new_parsed.port in default_port): + return False + # Standard case: root URI must match - return old_parsed.port != new_parsed.port or old_parsed.scheme != new_parsed.scheme + return changed_port or changed_scheme def resolve_redirects(self, resp, req, stream=False, timeout=None, verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs): diff --git a/ext/requests/utils.py b/ext/requests/utils.py index 0ce7fe115c..8170a8d2c4 100644 --- a/ext/requests/utils.py +++ b/ext/requests/utils.py @@ -38,6 +38,8 @@ DEFAULT_CA_BUNDLE_PATH = certs.where() +DEFAULT_PORTS = {'http': 80, 'https': 443} + if sys.platform == 'win32': # provide a proxy_bypass version on Windows without DNS lookups @@ -264,7 +266,7 @@ def from_key_val_list(value): >>> from_key_val_list([('key', 'val')]) OrderedDict([('key', 'val')]) >>> from_key_val_list('string') - ValueError: need more than 1 value to unpack + ValueError: cannot encode objects that are not 2-tuples >>> from_key_val_list({'key': 'val'}) OrderedDict([('key', 'val')]) diff --git a/ext/stevedore/example/setup.py b/ext/stevedore/example/setup.py index 4289971f9a..702e6d4da7 100644 --- a/ext/stevedore/example/setup.py +++ b/ext/stevedore/example/setup.py @@ -17,7 +17,7 @@ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', 'Intended Audience :: Developers', 'Environment :: Console', ], diff --git a/ext/stevedore/example2/setup.py b/ext/stevedore/example2/setup.py index 17fe130040..bd23838ac1 100644 --- a/ext/stevedore/example2/setup.py +++ b/ext/stevedore/example2/setup.py @@ -17,7 +17,7 @@ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', 'Intended Audience :: Developers', 'Environment :: Console', ], diff --git a/ext/stevedore/sphinxext.py b/ext/stevedore/sphinxext.py index 3c9b6ce7f9..8ca88bbb43 100644 --- a/ext/stevedore/sphinxext.py +++ b/ext/stevedore/sphinxext.py @@ -18,10 +18,13 @@ from docutils.parsers import rst from docutils.parsers.rst import directives from docutils.statemachine import ViewList +from sphinx.util import logging from sphinx.util.nodes import nested_parse_with_titles from stevedore import extension +LOG = logging.getLogger(__name__) + def _get_docstring(plugin): return inspect.getdoc(plugin) or '' @@ -72,16 +75,13 @@ class ListPluginsDirective(rst.Directive): has_content = True def run(self): - env = self.state.document.settings.env - app = env.app - namespace = ' '.join(self.content).strip() - app.info('documenting plugins from %r' % namespace) + LOG.info('documenting plugins from %r' % namespace) overline_style = self.options.get('overline-style', '') underline_style = self.options.get('underline-style', '=') def report_load_failure(mgr, ep, err): - app.warn(u'Failed to load %s: %s' % (ep.module_name, err)) + LOG.warning(u'Failed to load %s: %s' % (ep.module_name, err)) mgr = extension.ExtensionManager( namespace, @@ -111,5 +111,5 @@ def report_load_failure(mgr, ep, err): def setup(app): - app.info('loading stevedore.sphinxext') + LOG.info('loading stevedore.sphinxext') app.add_directive('list-plugins', ListPluginsDirective) diff --git a/ext/twitter/__init__.py b/ext/twitter/__init__.py index c6e433f8e7..b4836243a4 100644 --- a/ext/twitter/__init__.py +++ b/ext/twitter/__init__.py @@ -22,7 +22,7 @@ __email__ = 'python-twitter@googlegroups.com' __copyright__ = 'Copyright (c) 2007-2016 The Python-Twitter Developers' __license__ = 'Apache License 2.0' -__version__ = '3.4.2' +__version__ = '3.5' __url__ = 'https://github.com/bear/python-twitter' __download_url__ = 'https://pypi.python.org/pypi/python-twitter' __description__ = 'A Python wrapper around the Twitter API' diff --git a/ext/twitter/api.py b/ext/twitter/api.py index ecdbbc31a5..a823fc3866 100644 --- a/ext/twitter/api.py +++ b/ext/twitter/api.py @@ -2,7 +2,7 @@ # # -# Copyright 2007-2016 The Python-Twitter Developers +# Copyright 2007-2016, 2018 The Python-Twitter Developers # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -292,6 +292,8 @@ def __init__(self, requests_log.setLevel(logging.DEBUG) requests_log.propagate = True + self._session = requests.Session() + @staticmethod def GetAppOnlyAuthToken(consumer_key, consumer_secret): """ @@ -2823,8 +2825,6 @@ def UsersLookup(self, if len(uids) > 100: raise TwitterError("No more than 100 users may be requested per request.") - print(parameters) - resp = self._RequestUrl(url, 'GET', data=parameters) data = self._ParseAndCheckTwitter(resp.content.decode('utf-8')) @@ -3007,30 +3007,48 @@ def PostDirectMessage(self, Args: text: The message text to be posted. user_id: - The ID of the user who should receive the direct message. [Optional] - screen_name: - The screen name of the user who should receive the direct message. [Optional] + The ID of the user who should receive the direct message. return_json (bool, optional): - If True JSON data will be returned, instead of twitter.User + If True JSON data will be returned, instead of twitter.DirectMessage Returns: A twitter.DirectMessage instance representing the message posted """ - url = '%s/direct_messages/new.json' % self.base_url - data = {'text': text} - if user_id: - data['user_id'] = user_id - elif screen_name: - data['screen_name'] = screen_name - else: - raise TwitterError({'message': "Specify at least one of user_id or screen_name."}) + url = '%s/direct_messages/events/new.json' % self.base_url + + # Hack to allow some sort of backwards compatibility with older versions + # part of the fix for Issue #587 + if user_id is None and screen_name is not None: + user_id = self.GetUser(screen_name=screen_name).id + + event = { + 'event': { + 'type': 'message_create', + 'message_create': { + 'target': { + 'recipient_id': user_id, + }, + 'message_data': { + 'text': text + } + } + } + } - resp = self._RequestUrl(url, 'POST', data=data) - data = self._ParseAndCheckTwitter(resp.content.decode('utf-8')) + resp = self._RequestUrl(url, 'POST', json=event) + data = resp.json() if return_json: return data else: - return DirectMessage.NewFromJsonDict(data) + dm = DirectMessage( + created_at=data['event']['created_timestamp'], + id=data['event']['id'], + recipient_id=data['event']['message_create']['target']['recipient_id'], + sender_id=data['event']['message_create']['sender_id'], + text=data['event']['message_create']['message_data']['text'], + ) + dm._json = data + return dm def DestroyDirectMessage(self, message_id, include_entities=True, return_json=False): """Destroys the direct message specified in the required ID parameter. @@ -4886,7 +4904,7 @@ def _ParseAndCheckTwitter(self, json_data): raise TwitterError({'message': "Exceeded connection limit for user"}) if "Error 401 Unauthorized" in json_data: raise TwitterError({'message': "Unauthorized"}) - raise TwitterError({'Unknown error: {0}'.format(json_data)}) + raise TwitterError({'Unknown error': '{0}'.format(json_data)}) self._CheckForTwitterError(data) return data @@ -4958,20 +4976,20 @@ def _RequestUrl(self, url, verb, data=None, json=None, enforce_auth=True): if data: if 'media_ids' in data: url = self._BuildUrl(url, extra_params={'media_ids': data['media_ids']}) - resp = requests.post(url, data=data, auth=self.__auth, timeout=self._timeout, proxies=self.proxies) + resp = self._session.post(url, data=data, auth=self.__auth, timeout=self._timeout, proxies=self.proxies) elif 'media' in data: - resp = requests.post(url, files=data, auth=self.__auth, timeout=self._timeout, proxies=self.proxies) + resp = self._session.post(url, files=data, auth=self.__auth, timeout=self._timeout, proxies=self.proxies) else: - resp = requests.post(url, data=data, auth=self.__auth, timeout=self._timeout, proxies=self.proxies) + resp = self._session.post(url, data=data, auth=self.__auth, timeout=self._timeout, proxies=self.proxies) elif json: - resp = requests.post(url, json=json, auth=self.__auth, timeout=self._timeout, proxies=self.proxies) + resp = self._session.post(url, json=json, auth=self.__auth, timeout=self._timeout, proxies=self.proxies) else: resp = 0 # POST request, but without data or json elif verb == 'GET': data['tweet_mode'] = self.tweet_mode url = self._BuildUrl(url, extra_params=data) - resp = requests.get(url, auth=self.__auth, timeout=self._timeout, proxies=self.proxies) + resp = self._session.get(url, auth=self.__auth, timeout=self._timeout, proxies=self.proxies) else: resp = 0 # if not a POST or GET request diff --git a/ext/twitter/debug.py b/ext/twitter/debug.py index 342bbadeac..4e7a5f2b7d 100644 --- a/ext/twitter/debug.py +++ b/ext/twitter/debug.py @@ -1,9 +1,8 @@ -import twitter -from twitter import TwitterError +from twitter import Api, TwitterError import requests -class Api(twitter.Api): +class Api(Api): def DebugEndpoint(self, verb=None, endpoint=None, data=None): """ Request a url and return raw data. For testing purposes only. @@ -18,8 +17,8 @@ def DebugEndpoint(self, verb=None, endpoint=None, data=None): Returns: data """ + url = "{0}{1}".format(self.base_url, endpoint) - print(url) if verb == 'POST': if 'media_ids' in data: @@ -36,7 +35,7 @@ def DebugEndpoint(self, verb=None, endpoint=None, data=None): raw_data = requests.post( url, files=data, - auth=self._Api__auth, + auth=self.__auth, timeout=self._timeout ) except requests.RequestException as e: @@ -47,7 +46,7 @@ def DebugEndpoint(self, verb=None, endpoint=None, data=None): raw_data = requests.post( url, data=data, - auth=self._Api__auth, + auth=self.__auth, timeout=self._timeout ) except requests.RequestException as e: @@ -58,7 +57,7 @@ def DebugEndpoint(self, verb=None, endpoint=None, data=None): try: raw_data = requests.get( url, - auth=self._Api__auth, + auth=self.__auth, timeout=self._timeout) except requests.RequestException as e: diff --git a/ext/twitter/models.py b/ext/twitter/models.py index a79515df26..e8974ebe54 100644 --- a/ext/twitter/models.py +++ b/ext/twitter/models.py @@ -35,10 +35,10 @@ def __hash__(self): raise TypeError('unhashable type: {} (no id attribute)' .format(type(self))) - def AsJsonString(self): + def AsJsonString(self, ensure_ascii=True): """ Returns the TwitterModel as a JSON string based on key/value pairs returned from the AsDict() method. """ - return json.dumps(self.AsDict(), sort_keys=True) + return json.dumps(self.AsDict(), ensure_ascii=ensure_ascii, sort_keys=True) def AsDict(self): """ Create a dictionary representation of the object. Please see inline @@ -185,21 +185,13 @@ def __init__(self, **kwargs): self.param_defaults = { 'created_at': None, 'id': None, - 'recipient': None, 'recipient_id': None, - 'recipient_screen_name': None, - 'sender': None, 'sender_id': None, - 'sender_screen_name': None, 'text': None, } for (param, default) in self.param_defaults.items(): setattr(self, param, kwargs.get(param, default)) - if 'sender' in kwargs: - self.sender = User.NewFromJsonDict(kwargs.get('sender', None)) - if 'recipient' in kwargs: - self.recipient = User.NewFromJsonDict(kwargs.get('recipient', None)) def __repr__(self): if self.text and len(self.text) > 140: @@ -208,7 +200,7 @@ def __repr__(self): text = self.text return "DirectMessage(ID={dm_id}, Sender={sender}, Created={time}, Text='{text!r}')".format( dm_id=self.id, - sender=self.sender_screen_name, + sender=self.sender_id, time=self.created_at, text=text) diff --git a/ext/validators/__init__.py b/ext/validators/__init__.py index 4f218b75b9..e4ccfd5311 100644 --- a/ext/validators/__init__.py +++ b/ext/validators/__init__.py @@ -14,4 +14,4 @@ from .utils import ValidationFailure, validator # noqa from .uuid import uuid # noqa -__version__ = '0.12.2' +__version__ = '0.12.3' diff --git a/ext/validators/i18n/fi.py b/ext/validators/i18n/fi.py index 357d61ddec..2e5eb57801 100644 --- a/ext/validators/i18n/fi.py +++ b/ext/validators/i18n/fi.py @@ -6,13 +6,13 @@ ssn_checkmarks = '0123456789ABCDEFHJKLMNPRSTUVWXY' ssn_pattern = re.compile( r"""^ - (?P([0-2]\d|3[01]) - (0\d|1[012]) + (?P(0[1-9]|[1-2]\d|3[01]) + (0[1-9]|1[012]) (\d{{2}})) [A+-] (?P(\d{{3}})) (?P[{checkmarks}])$""".format(checkmarks=ssn_checkmarks), - re.VERBOSE | re.IGNORECASE + re.VERBOSE ) @@ -52,7 +52,7 @@ def fi_business_id(business_id): @validator -def fi_ssn(ssn): +def fi_ssn(ssn, allow_temporal_ssn=True): """ Validate a Finnish Social Security Number. @@ -67,11 +67,16 @@ def fi_ssn(ssn): True >>> fi_ssn('101010-0102') - ValidationFailure(func=fi_ssn, args={'ssn': '101010-0102'}) + ValidationFailure(func=fi_ssn, args=...) .. versionadded:: 0.5 :param ssn: Social Security Number to validate + :param allow_temporal_ssn: + Whether to accept temporal SSN numbers. Temporal SSN numbers are the + ones where the serial is in the range [900-999]. By default temporal + SSN numbers are valid. + """ if not ssn: return False @@ -82,6 +87,8 @@ def fi_ssn(ssn): gd = result.groupdict() checksum = int(gd['date'] + gd['serial']) return ( + int(gd['serial']) >= 2 and + (allow_temporal_ssn or int(gd['serial']) <= 899) and ssn_checkmarks[checksum % len(ssn_checkmarks)] == - gd['checksum'].upper() + gd['checksum'] ) diff --git a/ext/validators/url.py b/ext/validators/url.py index 5ca4f34cc8..5f19e2d269 100644 --- a/ext/validators/url.py +++ b/ext/validators/url.py @@ -2,88 +2,88 @@ from .utils import validator -ip_middle_octet = u"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5]))" -ip_last_octet = u"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))" +ip_middle_octet = r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5]))" +ip_last_octet = r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))" -regex = re.compile( - u"^" +regex = re.compile( # noqa: W605 + r"^" # protocol identifier - u"(?:(?:https?|ftp)://)" + r"(?:(?:https?|ftp)://)" # user:pass authentication - u"(?:[-a-z\u00a1-\uffff0-9._~%!$&'()*+,;=:]+" - u"(?::[-a-z0-9._~%!$&'()*+,;=:]*)?@)?" - u"(?:" - u"(?P" + r"(?:[-a-z\u00a1-\uffff0-9._~%!$&'()*+,;=:]+" + r"(?::[-a-z0-9._~%!$&'()*+,;=:]*)?@)?" + r"(?:" + r"(?P" # IP address exclusion # private & local networks - u"(?:(?:10|127)" + ip_middle_octet + u"{2}" + ip_last_octet + u")|" - u"(?:(?:169\.254|192\.168)" + ip_middle_octet + ip_last_octet + u")|" - u"(?:172\.(?:1[6-9]|2\d|3[0-1])" + ip_middle_octet + ip_last_octet + u"))" - u"|" + r"(?:(?:10|127)" + ip_middle_octet + r"{2}" + ip_last_octet + r")|" + r"(?:(?:169\.254|192\.168)" + ip_middle_octet + ip_last_octet + r")|" + r"(?:172\.(?:1[6-9]|2\d|3[0-1])" + ip_middle_octet + ip_last_octet + r"))" + r"|" # private & local hosts - u"(?P" - u"(?:localhost))" - u"|" + r"(?P" + r"(?:localhost))" + r"|" # IP address dotted notation octets # excludes loopback network 0.0.0.0 # excludes reserved space >= 224.0.0.0 # excludes network & broadcast addresses # (first & last IP address of each class) - u"(?P" - u"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])" - u"" + ip_middle_octet + u"{2}" - u"" + ip_last_octet + u")" - u"|" + r"(?P" + r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])" + r"" + ip_middle_octet + r"{2}" + r"" + ip_last_octet + r")" + r"|" # IPv6 RegEx from https://stackoverflow.com/a/17871737 - u"\[(" + r"\[(" # 1:2:3:4:5:6:7:8 - u"([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|" + r"([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|" # 1:: 1:2:3:4:5:6:7:: - u"([0-9a-fA-F]{1,4}:){1,7}:|" + r"([0-9a-fA-F]{1,4}:){1,7}:|" # 1::8 1:2:3:4:5:6::8 1:2:3:4:5:6::8 - u"([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|" + r"([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|" # 1::7:8 1:2:3:4:5::7:8 1:2:3:4:5::8 - u"([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|" + r"([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|" # 1::6:7:8 1:2:3:4::6:7:8 1:2:3:4::8 - u"([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|" + r"([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|" # 1::5:6:7:8 1:2:3::5:6:7:8 1:2:3::8 - u"([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|" + r"([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|" # 1::4:5:6:7:8 1:2::4:5:6:7:8 1:2::8 - u"([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|" + r"([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|" # 1::3:4:5:6:7:8 1::3:4:5:6:7:8 1::8 - u"[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|" + r"[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|" # ::2:3:4:5:6:7:8 ::2:3:4:5:6:7:8 ::8 :: - u":((:[0-9a-fA-F]{1,4}){1,7}|:)|" + r":((:[0-9a-fA-F]{1,4}){1,7}|:)|" # fe80::7:8%eth0 fe80::7:8%1 # (link-local IPv6 addresses with zone index) - u"fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|" - u"::(ffff(:0{1,4}){0,1}:){0,1}" - u"((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}" + r"fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|" + r"::(ffff(:0{1,4}){0,1}:){0,1}" + r"((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}" # ::255.255.255.255 ::ffff:255.255.255.255 ::ffff:0:255.255.255.255 # (IPv4-mapped IPv6 addresses and IPv4-translated addresses) - u"(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|" - u"([0-9a-fA-F]{1,4}:){1,4}:" - u"((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}" + r"(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|" + r"([0-9a-fA-F]{1,4}:){1,4}:" + r"((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}" # 2001:db8:3:4::192.0.2.33 64:ff9b::192.0.2.33 # (IPv4-Embedded IPv6 Address) - u"(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])" - u")\]|" + r"(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])" + r")\]|" # host name u"(?:(?:[a-z\u00a1-\uffff0-9]-?)*[a-z\u00a1-\uffff0-9]+)" # domain name u"(?:\.(?:[a-z\u00a1-\uffff0-9]-?)*[a-z\u00a1-\uffff0-9]+)*" # TLD identifier u"(?:\.(?:[a-z\u00a1-\uffff]{2,}))" - u")" + r")" # port number - u"(?::\d{2,5})?" + r"(?::\d{2,5})?" # resource path u"(?:/[-a-z\u00a1-\uffff0-9._~%!$&'()*+,;=:@/]*)?" # query string - u"(?:\?\S*)?" + r"(?:\?\S*)?" # fragment - u"(?:#\S*)?" - u"$", + r"(?:#\S*)?" + r"$", re.UNICODE | re.IGNORECASE ) diff --git a/ext2/__init__.py b/ext2/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/ext2/enum/LICENSE b/ext2/enum/LICENSE new file mode 100644 index 0000000000..9003b8850e --- /dev/null +++ b/ext2/enum/LICENSE @@ -0,0 +1,32 @@ +Copyright (c) 2013, Ethan Furman. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + Redistributions of source code must retain the above + copyright notice, this list of conditions and the + following disclaimer. + + Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + + Neither the name Ethan Furman nor the names of any + contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/ext2/enum/README b/ext2/enum/README new file mode 100644 index 0000000000..aa2333d8df --- /dev/null +++ b/ext2/enum/README @@ -0,0 +1,3 @@ +enum34 is the new Python stdlib enum module available in Python 3.4 +backported for previous versions of Python from 2.4 to 3.3. +tested on 2.6, 2.7, and 3.3+ diff --git a/ext2/enum/__init__.py b/ext2/enum/__init__.py new file mode 100644 index 0000000000..d6ffb3a40f --- /dev/null +++ b/ext2/enum/__init__.py @@ -0,0 +1,837 @@ +"""Python Enumerations""" + +import sys as _sys + +__all__ = ['Enum', 'IntEnum', 'unique'] + +version = 1, 1, 6 + +pyver = float('%s.%s' % _sys.version_info[:2]) + +try: + any +except NameError: + def any(iterable): + for element in iterable: + if element: + return True + return False + +try: + from collections import OrderedDict +except ImportError: + OrderedDict = None + +try: + basestring +except NameError: + # In Python 2 basestring is the ancestor of both str and unicode + # in Python 3 it's just str, but was missing in 3.1 + basestring = str + +try: + unicode +except NameError: + # In Python 3 unicode no longer exists (it's just str) + unicode = str + +class _RouteClassAttributeToGetattr(object): + """Route attribute access on a class to __getattr__. + + This is a descriptor, used to define attributes that act differently when + accessed through an instance and through a class. Instance access remains + normal, but access to an attribute through a class will be routed to the + class's __getattr__ method; this is done by raising AttributeError. + + """ + def __init__(self, fget=None): + self.fget = fget + + def __get__(self, instance, ownerclass=None): + if instance is None: + raise AttributeError() + return self.fget(instance) + + def __set__(self, instance, value): + raise AttributeError("can't set attribute") + + def __delete__(self, instance): + raise AttributeError("can't delete attribute") + + +def _is_descriptor(obj): + """Returns True if obj is a descriptor, False otherwise.""" + return ( + hasattr(obj, '__get__') or + hasattr(obj, '__set__') or + hasattr(obj, '__delete__')) + + +def _is_dunder(name): + """Returns True if a __dunder__ name, False otherwise.""" + return (name[:2] == name[-2:] == '__' and + name[2:3] != '_' and + name[-3:-2] != '_' and + len(name) > 4) + + +def _is_sunder(name): + """Returns True if a _sunder_ name, False otherwise.""" + return (name[0] == name[-1] == '_' and + name[1:2] != '_' and + name[-2:-1] != '_' and + len(name) > 2) + + +def _make_class_unpicklable(cls): + """Make the given class un-picklable.""" + def _break_on_call_reduce(self, protocol=None): + raise TypeError('%r cannot be pickled' % self) + cls.__reduce_ex__ = _break_on_call_reduce + cls.__module__ = '' + + +class _EnumDict(dict): + """Track enum member order and ensure member names are not reused. + + EnumMeta will use the names found in self._member_names as the + enumeration member names. + + """ + def __init__(self): + super(_EnumDict, self).__init__() + self._member_names = [] + + def __setitem__(self, key, value): + """Changes anything not dundered or not a descriptor. + + If a descriptor is added with the same name as an enum member, the name + is removed from _member_names (this may leave a hole in the numerical + sequence of values). + + If an enum member name is used twice, an error is raised; duplicate + values are not checked for. + + Single underscore (sunder) names are reserved. + + Note: in 3.x __order__ is simply discarded as a not necessary piece + leftover from 2.x + + """ + if pyver >= 3.0 and key in ('_order_', '__order__'): + return + elif key == '__order__': + key = '_order_' + if _is_sunder(key): + if key != '_order_': + raise ValueError('_names_ are reserved for future Enum use') + elif _is_dunder(key): + pass + elif key in self._member_names: + # descriptor overwriting an enum? + raise TypeError('Attempted to reuse key: %r' % key) + elif not _is_descriptor(value): + if key in self: + # enum overwriting a descriptor? + raise TypeError('Key already defined as: %r' % self[key]) + self._member_names.append(key) + super(_EnumDict, self).__setitem__(key, value) + + +# Dummy value for Enum as EnumMeta explicity checks for it, but of course until +# EnumMeta finishes running the first time the Enum class doesn't exist. This +# is also why there are checks in EnumMeta like `if Enum is not None` +Enum = None + + +class EnumMeta(type): + """Metaclass for Enum""" + @classmethod + def __prepare__(metacls, cls, bases): + return _EnumDict() + + def __new__(metacls, cls, bases, classdict): + # an Enum class is final once enumeration items have been defined; it + # cannot be mixed with other types (int, float, etc.) if it has an + # inherited __new__ unless a new __new__ is defined (or the resulting + # class will fail). + if type(classdict) is dict: + original_dict = classdict + classdict = _EnumDict() + for k, v in original_dict.items(): + classdict[k] = v + + member_type, first_enum = metacls._get_mixins_(bases) + __new__, save_new, use_args = metacls._find_new_(classdict, member_type, + first_enum) + # save enum items into separate mapping so they don't get baked into + # the new class + members = dict((k, classdict[k]) for k in classdict._member_names) + for name in classdict._member_names: + del classdict[name] + + # py2 support for definition order + _order_ = classdict.get('_order_') + if _order_ is None: + if pyver < 3.0: + try: + _order_ = [name for (name, value) in sorted(members.items(), key=lambda item: item[1])] + except TypeError: + _order_ = [name for name in sorted(members.keys())] + else: + _order_ = classdict._member_names + else: + del classdict['_order_'] + if pyver < 3.0: + _order_ = _order_.replace(',', ' ').split() + aliases = [name for name in members if name not in _order_] + _order_ += aliases + + # check for illegal enum names (any others?) + invalid_names = set(members) & set(['mro']) + if invalid_names: + raise ValueError('Invalid enum member name(s): %s' % ( + ', '.join(invalid_names), )) + + # save attributes from super classes so we know if we can take + # the shortcut of storing members in the class dict + base_attributes = set([a for b in bases for a in b.__dict__]) + # create our new Enum type + enum_class = super(EnumMeta, metacls).__new__(metacls, cls, bases, classdict) + enum_class._member_names_ = [] # names in random order + if OrderedDict is not None: + enum_class._member_map_ = OrderedDict() + else: + enum_class._member_map_ = {} # name->value map + enum_class._member_type_ = member_type + + # Reverse value->name map for hashable values. + enum_class._value2member_map_ = {} + + # instantiate them, checking for duplicates as we go + # we instantiate first instead of checking for duplicates first in case + # a custom __new__ is doing something funky with the values -- such as + # auto-numbering ;) + if __new__ is None: + __new__ = enum_class.__new__ + for member_name in _order_: + value = members[member_name] + if not isinstance(value, tuple): + args = (value, ) + else: + args = value + if member_type is tuple: # special case for tuple enums + args = (args, ) # wrap it one more time + if not use_args or not args: + enum_member = __new__(enum_class) + if not hasattr(enum_member, '_value_'): + enum_member._value_ = value + else: + enum_member = __new__(enum_class, *args) + if not hasattr(enum_member, '_value_'): + enum_member._value_ = member_type(*args) + value = enum_member._value_ + enum_member._name_ = member_name + enum_member.__objclass__ = enum_class + enum_member.__init__(*args) + # If another member with the same value was already defined, the + # new member becomes an alias to the existing one. + for name, canonical_member in enum_class._member_map_.items(): + if canonical_member.value == enum_member._value_: + enum_member = canonical_member + break + else: + # Aliases don't appear in member names (only in __members__). + enum_class._member_names_.append(member_name) + # performance boost for any member that would not shadow + # a DynamicClassAttribute (aka _RouteClassAttributeToGetattr) + if member_name not in base_attributes: + setattr(enum_class, member_name, enum_member) + # now add to _member_map_ + enum_class._member_map_[member_name] = enum_member + try: + # This may fail if value is not hashable. We can't add the value + # to the map, and by-value lookups for this value will be + # linear. + enum_class._value2member_map_[value] = enum_member + except TypeError: + pass + + + # If a custom type is mixed into the Enum, and it does not know how + # to pickle itself, pickle.dumps will succeed but pickle.loads will + # fail. Rather than have the error show up later and possibly far + # from the source, sabotage the pickle protocol for this class so + # that pickle.dumps also fails. + # + # However, if the new class implements its own __reduce_ex__, do not + # sabotage -- it's on them to make sure it works correctly. We use + # __reduce_ex__ instead of any of the others as it is preferred by + # pickle over __reduce__, and it handles all pickle protocols. + unpicklable = False + if '__reduce_ex__' not in classdict: + if member_type is not object: + methods = ('__getnewargs_ex__', '__getnewargs__', + '__reduce_ex__', '__reduce__') + if not any(m in member_type.__dict__ for m in methods): + _make_class_unpicklable(enum_class) + unpicklable = True + + + # double check that repr and friends are not the mixin's or various + # things break (such as pickle) + for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'): + class_method = getattr(enum_class, name) + obj_method = getattr(member_type, name, None) + enum_method = getattr(first_enum, name, None) + if name not in classdict and class_method is not enum_method: + if name == '__reduce_ex__' and unpicklable: + continue + setattr(enum_class, name, enum_method) + + # method resolution and int's are not playing nice + # Python's less than 2.6 use __cmp__ + + if pyver < 2.6: + + if issubclass(enum_class, int): + setattr(enum_class, '__cmp__', getattr(int, '__cmp__')) + + elif pyver < 3.0: + + if issubclass(enum_class, int): + for method in ( + '__le__', + '__lt__', + '__gt__', + '__ge__', + '__eq__', + '__ne__', + '__hash__', + ): + setattr(enum_class, method, getattr(int, method)) + + # replace any other __new__ with our own (as long as Enum is not None, + # anyway) -- again, this is to support pickle + if Enum is not None: + # if the user defined their own __new__, save it before it gets + # clobbered in case they subclass later + if save_new: + setattr(enum_class, '__member_new__', enum_class.__dict__['__new__']) + setattr(enum_class, '__new__', Enum.__dict__['__new__']) + return enum_class + + def __bool__(cls): + """ + classes/types should always be True. + """ + return True + + def __call__(cls, value, names=None, module=None, type=None, start=1): + """Either returns an existing member, or creates a new enum class. + + This method is used both when an enum class is given a value to match + to an enumeration member (i.e. Color(3)) and for the functional API + (i.e. Color = Enum('Color', names='red green blue')). + + When used for the functional API: `module`, if set, will be stored in + the new class' __module__ attribute; `type`, if set, will be mixed in + as the first base class. + + Note: if `module` is not set this routine will attempt to discover the + calling module by walking the frame stack; if this is unsuccessful + the resulting class will not be pickleable. + + """ + if names is None: # simple value lookup + return cls.__new__(cls, value) + # otherwise, functional API: we're creating a new Enum type + return cls._create_(value, names, module=module, type=type, start=start) + + def __contains__(cls, member): + return isinstance(member, cls) and member.name in cls._member_map_ + + def __delattr__(cls, attr): + # nicer error message when someone tries to delete an attribute + # (see issue19025). + if attr in cls._member_map_: + raise AttributeError( + "%s: cannot delete Enum member." % cls.__name__) + super(EnumMeta, cls).__delattr__(attr) + + def __dir__(self): + return (['__class__', '__doc__', '__members__', '__module__'] + + self._member_names_) + + @property + def __members__(cls): + """Returns a mapping of member name->value. + + This mapping lists all enum members, including aliases. Note that this + is a copy of the internal mapping. + + """ + return cls._member_map_.copy() + + def __getattr__(cls, name): + """Return the enum member matching `name` + + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + + """ + if _is_dunder(name): + raise AttributeError(name) + try: + return cls._member_map_[name] + except KeyError: + raise AttributeError(name) + + def __getitem__(cls, name): + return cls._member_map_[name] + + def __iter__(cls): + return (cls._member_map_[name] for name in cls._member_names_) + + def __reversed__(cls): + return (cls._member_map_[name] for name in reversed(cls._member_names_)) + + def __len__(cls): + return len(cls._member_names_) + + __nonzero__ = __bool__ + + def __repr__(cls): + return "" % cls.__name__ + + def __setattr__(cls, name, value): + """Block attempts to reassign Enum members. + + A simple assignment to the class namespace only changes one of the + several possible ways to get an Enum member from the Enum class, + resulting in an inconsistent Enumeration. + + """ + member_map = cls.__dict__.get('_member_map_', {}) + if name in member_map: + raise AttributeError('Cannot reassign members.') + super(EnumMeta, cls).__setattr__(name, value) + + def _create_(cls, class_name, names=None, module=None, type=None, start=1): + """Convenience method to create a new Enum class. + + `names` can be: + + * A string containing member names, separated either with spaces or + commas. Values are auto-numbered from 1. + * An iterable of member names. Values are auto-numbered from 1. + * An iterable of (member name, value) pairs. + * A mapping of member name -> value. + + """ + if pyver < 3.0: + # if class_name is unicode, attempt a conversion to ASCII + if isinstance(class_name, unicode): + try: + class_name = class_name.encode('ascii') + except UnicodeEncodeError: + raise TypeError('%r is not representable in ASCII' % class_name) + metacls = cls.__class__ + if type is None: + bases = (cls, ) + else: + bases = (type, cls) + classdict = metacls.__prepare__(class_name, bases) + _order_ = [] + + # special processing needed for names? + if isinstance(names, basestring): + names = names.replace(',', ' ').split() + if isinstance(names, (tuple, list)) and isinstance(names[0], basestring): + names = [(e, i+start) for (i, e) in enumerate(names)] + + # Here, names is either an iterable of (name, value) or a mapping. + item = None # in case names is empty + for item in names: + if isinstance(item, basestring): + member_name, member_value = item, names[item] + else: + member_name, member_value = item + classdict[member_name] = member_value + _order_.append(member_name) + # only set _order_ in classdict if name/value was not from a mapping + if not isinstance(item, basestring): + classdict['_order_'] = ' '.join(_order_) + enum_class = metacls.__new__(metacls, class_name, bases, classdict) + + # TODO: replace the frame hack if a blessed way to know the calling + # module is ever developed + if module is None: + try: + module = _sys._getframe(2).f_globals['__name__'] + except (AttributeError, ValueError): + pass + if module is None: + _make_class_unpicklable(enum_class) + else: + enum_class.__module__ = module + + return enum_class + + @staticmethod + def _get_mixins_(bases): + """Returns the type for creating enum members, and the first inherited + enum class. + + bases: the tuple of bases that was given to __new__ + + """ + if not bases or Enum is None: + return object, Enum + + + # double check that we are not subclassing a class with existing + # enumeration members; while we're at it, see if any other data + # type has been mixed in so we can use the correct __new__ + member_type = first_enum = None + for base in bases: + if (base is not Enum and + issubclass(base, Enum) and + base._member_names_): + raise TypeError("Cannot extend enumerations") + # base is now the last base in bases + if not issubclass(base, Enum): + raise TypeError("new enumerations must be created as " + "`ClassName([mixin_type,] enum_type)`") + + # get correct mix-in type (either mix-in type of Enum subclass, or + # first base if last base is Enum) + if not issubclass(bases[0], Enum): + member_type = bases[0] # first data type + first_enum = bases[-1] # enum type + else: + for base in bases[0].__mro__: + # most common: (IntEnum, int, Enum, object) + # possible: (, , + # , , + # ) + if issubclass(base, Enum): + if first_enum is None: + first_enum = base + else: + if member_type is None: + member_type = base + + return member_type, first_enum + + if pyver < 3.0: + @staticmethod + def _find_new_(classdict, member_type, first_enum): + """Returns the __new__ to be used for creating the enum members. + + classdict: the class dictionary given to __new__ + member_type: the data type whose __new__ will be used by default + first_enum: enumeration to check for an overriding __new__ + + """ + # now find the correct __new__, checking to see of one was defined + # by the user; also check earlier enum classes in case a __new__ was + # saved as __member_new__ + __new__ = classdict.get('__new__', None) + if __new__: + return None, True, True # __new__, save_new, use_args + + N__new__ = getattr(None, '__new__') + O__new__ = getattr(object, '__new__') + if Enum is None: + E__new__ = N__new__ + else: + E__new__ = Enum.__dict__['__new__'] + # check all possibles for __member_new__ before falling back to + # __new__ + for method in ('__member_new__', '__new__'): + for possible in (member_type, first_enum): + try: + target = possible.__dict__[method] + except (AttributeError, KeyError): + target = getattr(possible, method, None) + if target not in [ + None, + N__new__, + O__new__, + E__new__, + ]: + if method == '__member_new__': + classdict['__new__'] = target + return None, False, True + if isinstance(target, staticmethod): + target = target.__get__(member_type) + __new__ = target + break + if __new__ is not None: + break + else: + __new__ = object.__new__ + + # if a non-object.__new__ is used then whatever value/tuple was + # assigned to the enum member name will be passed to __new__ and to the + # new enum member's __init__ + if __new__ is object.__new__: + use_args = False + else: + use_args = True + + return __new__, False, use_args + else: + @staticmethod + def _find_new_(classdict, member_type, first_enum): + """Returns the __new__ to be used for creating the enum members. + + classdict: the class dictionary given to __new__ + member_type: the data type whose __new__ will be used by default + first_enum: enumeration to check for an overriding __new__ + + """ + # now find the correct __new__, checking to see of one was defined + # by the user; also check earlier enum classes in case a __new__ was + # saved as __member_new__ + __new__ = classdict.get('__new__', None) + + # should __new__ be saved as __member_new__ later? + save_new = __new__ is not None + + if __new__ is None: + # check all possibles for __member_new__ before falling back to + # __new__ + for method in ('__member_new__', '__new__'): + for possible in (member_type, first_enum): + target = getattr(possible, method, None) + if target not in ( + None, + None.__new__, + object.__new__, + Enum.__new__, + ): + __new__ = target + break + if __new__ is not None: + break + else: + __new__ = object.__new__ + + # if a non-object.__new__ is used then whatever value/tuple was + # assigned to the enum member name will be passed to __new__ and to the + # new enum member's __init__ + if __new__ is object.__new__: + use_args = False + else: + use_args = True + + return __new__, save_new, use_args + + +######################################################## +# In order to support Python 2 and 3 with a single +# codebase we have to create the Enum methods separately +# and then use the `type(name, bases, dict)` method to +# create the class. +######################################################## +temp_enum_dict = {} +temp_enum_dict['__doc__'] = "Generic enumeration.\n\n Derive from this class to define new enumerations.\n\n" + +def __new__(cls, value): + # all enum instances are actually created during class construction + # without calling this method; this method is called by the metaclass' + # __call__ (i.e. Color(3) ), and by pickle + if type(value) is cls: + # For lookups like Color(Color.red) + value = value.value + #return value + # by-value search for a matching enum member + # see if it's in the reverse mapping (for hashable values) + try: + if value in cls._value2member_map_: + return cls._value2member_map_[value] + except TypeError: + # not there, now do long search -- O(n) behavior + for member in cls._member_map_.values(): + if member.value == value: + return member + raise ValueError("%s is not a valid %s" % (value, cls.__name__)) +temp_enum_dict['__new__'] = __new__ +del __new__ + +def __repr__(self): + return "<%s.%s: %r>" % ( + self.__class__.__name__, self._name_, self._value_) +temp_enum_dict['__repr__'] = __repr__ +del __repr__ + +def __str__(self): + return "%s.%s" % (self.__class__.__name__, self._name_) +temp_enum_dict['__str__'] = __str__ +del __str__ + +if pyver >= 3.0: + def __dir__(self): + added_behavior = [ + m + for cls in self.__class__.mro() + for m in cls.__dict__ + if m[0] != '_' and m not in self._member_map_ + ] + return (['__class__', '__doc__', '__module__', ] + added_behavior) + temp_enum_dict['__dir__'] = __dir__ + del __dir__ + +def __format__(self, format_spec): + # mixed-in Enums should use the mixed-in type's __format__, otherwise + # we can get strange results with the Enum name showing up instead of + # the value + + # pure Enum branch + if self._member_type_ is object: + cls = str + val = str(self) + # mix-in branch + else: + cls = self._member_type_ + val = self.value + return cls.__format__(val, format_spec) +temp_enum_dict['__format__'] = __format__ +del __format__ + + +#################################### +# Python's less than 2.6 use __cmp__ + +if pyver < 2.6: + + def __cmp__(self, other): + if type(other) is self.__class__: + if self is other: + return 0 + return -1 + return NotImplemented + raise TypeError("unorderable types: %s() and %s()" % (self.__class__.__name__, other.__class__.__name__)) + temp_enum_dict['__cmp__'] = __cmp__ + del __cmp__ + +else: + + def __le__(self, other): + raise TypeError("unorderable types: %s() <= %s()" % (self.__class__.__name__, other.__class__.__name__)) + temp_enum_dict['__le__'] = __le__ + del __le__ + + def __lt__(self, other): + raise TypeError("unorderable types: %s() < %s()" % (self.__class__.__name__, other.__class__.__name__)) + temp_enum_dict['__lt__'] = __lt__ + del __lt__ + + def __ge__(self, other): + raise TypeError("unorderable types: %s() >= %s()" % (self.__class__.__name__, other.__class__.__name__)) + temp_enum_dict['__ge__'] = __ge__ + del __ge__ + + def __gt__(self, other): + raise TypeError("unorderable types: %s() > %s()" % (self.__class__.__name__, other.__class__.__name__)) + temp_enum_dict['__gt__'] = __gt__ + del __gt__ + + +def __eq__(self, other): + if type(other) is self.__class__: + return self is other + return NotImplemented +temp_enum_dict['__eq__'] = __eq__ +del __eq__ + +def __ne__(self, other): + if type(other) is self.__class__: + return self is not other + return NotImplemented +temp_enum_dict['__ne__'] = __ne__ +del __ne__ + +def __hash__(self): + return hash(self._name_) +temp_enum_dict['__hash__'] = __hash__ +del __hash__ + +def __reduce_ex__(self, proto): + return self.__class__, (self._value_, ) +temp_enum_dict['__reduce_ex__'] = __reduce_ex__ +del __reduce_ex__ + +# _RouteClassAttributeToGetattr is used to provide access to the `name` +# and `value` properties of enum members while keeping some measure of +# protection from modification, while still allowing for an enumeration +# to have members named `name` and `value`. This works because enumeration +# members are not set directly on the enum class -- __getattr__ is +# used to look them up. + +@_RouteClassAttributeToGetattr +def name(self): + return self._name_ +temp_enum_dict['name'] = name +del name + +@_RouteClassAttributeToGetattr +def value(self): + return self._value_ +temp_enum_dict['value'] = value +del value + +@classmethod +def _convert(cls, name, module, filter, source=None): + """ + Create a new Enum subclass that replaces a collection of global constants + """ + # convert all constants from source (or module) that pass filter() to + # a new Enum called name, and export the enum and its members back to + # module; + # also, replace the __reduce_ex__ method so unpickling works in + # previous Python versions + module_globals = vars(_sys.modules[module]) + if source: + source = vars(source) + else: + source = module_globals + members = dict((name, value) for name, value in source.items() if filter(name)) + cls = cls(name, members, module=module) + cls.__reduce_ex__ = _reduce_ex_by_name + module_globals.update(cls.__members__) + module_globals[name] = cls + return cls +temp_enum_dict['_convert'] = _convert +del _convert + +Enum = EnumMeta('Enum', (object, ), temp_enum_dict) +del temp_enum_dict + +# Enum has now been created +########################### + +class IntEnum(int, Enum): + """Enum where members are also (and must be) ints""" + +def _reduce_ex_by_name(self, proto): + return self.name + +def unique(enumeration): + """Class decorator that ensures only unique members exist in an enumeration.""" + duplicates = [] + for name, member in enumeration.__members__.items(): + if name != member.name: + duplicates.append((name, member.name)) + if duplicates: + duplicate_names = ', '.join( + ["%s -> %s" % (alias, name) for (alias, name) in duplicates] + ) + raise ValueError('duplicate names found in %r: %s' % + (enumeration, duplicate_names) + ) + return enumeration diff --git a/ext2/future/__init__.py b/ext2/future/__init__.py index 8139aa33a9..f7a6fbebb1 100644 --- a/ext2/future/__init__.py +++ b/ext2/future/__init__.py @@ -76,7 +76,7 @@ Licensing --------- -Copyright 2013-2016 Python Charmers Pty Ltd, Australia. +Copyright 2013-2018 Python Charmers Pty Ltd, Australia. The software is distributed under an MIT licence. See LICENSE.txt. """ @@ -84,10 +84,10 @@ __title__ = 'future' __author__ = 'Ed Schofield' __license__ = 'MIT' -__copyright__ = 'Copyright 2013-2016 Python Charmers Pty Ltd' +__copyright__ = 'Copyright 2013-2018 Python Charmers Pty Ltd' __ver_major__ = 0 -__ver_minor__ = 16 -__ver_patch__ = 0 +__ver_minor__ = 17 +__ver_patch__ = 1 __ver_sub__ = '' __version__ = "%d.%d.%d%s" % (__ver_major__, __ver_minor__, __ver_patch__, __ver_sub__) diff --git a/ext2/future/backports/html/__init__.py b/ext2/future/backports/html/__init__.py index 837afce1ba..58e133fd4b 100644 --- a/ext2/future/backports/html/__init__.py +++ b/ext2/future/backports/html/__init__.py @@ -25,4 +25,3 @@ def escape(s, quote=True): if quote: return s.translate(_escape_map_full) return s.translate(_escape_map) - diff --git a/ext2/future/backports/html/entities.py b/ext2/future/backports/html/entities.py index 6798187c6a..5c73f6923a 100644 --- a/ext2/future/backports/html/entities.py +++ b/ext2/future/backports/html/entities.py @@ -2512,4 +2512,3 @@ entitydefs[name] = chr(codepoint) del name, codepoint - diff --git a/ext2/future/backports/html/parser.py b/ext2/future/backports/html/parser.py index 7b8cdba613..fb652636d4 100644 --- a/ext2/future/backports/html/parser.py +++ b/ext2/future/backports/html/parser.py @@ -534,4 +534,3 @@ def replaceEntities(s): return re.sub(r"&(#?[xX]?(?:[0-9a-fA-F]+;|\w{1,32};?))", replaceEntities, s) - diff --git a/ext2/future/backports/misc.py b/ext2/future/backports/misc.py index 31e713ae38..ef75207885 100644 --- a/ext2/future/backports/misc.py +++ b/ext2/future/backports/misc.py @@ -779,7 +779,7 @@ def __bool__(self): # Py2 compatibility: __nonzero__ = __bool__ - + @recursive_repr() def __repr__(self): return '{0.__class__.__name__}({1})'.format( @@ -817,7 +817,7 @@ def __delitem__(self, key): try: del self.maps[0][key] except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + raise KeyError('Key not found in the first mapping: {0!r}'.format(key)) def popitem(self): 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' @@ -831,7 +831,7 @@ def pop(self, key, *args): try: return self.maps[0].pop(key, *args) except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + raise KeyError('Key not found in the first mapping: {0!r}'.format(key)) def clear(self): 'Clear maps[0], leaving maps[1:] intact.' diff --git a/ext2/future/backports/test/support.py b/ext2/future/backports/test/support.py index b59c4ff73b..1999e208fe 100644 --- a/ext2/future/backports/test/support.py +++ b/ext2/future/backports/test/support.py @@ -666,7 +666,7 @@ def _is_ipv6_enabled(): # # First try printable and common characters to have a readable filename. # # For each character, the encoding list are just example of encodings able # # to encode the character (the list is not exhaustive). -# +# # # U+00E6 (Latin Small Letter Ae): cp1252, iso-8859-1 # '\u00E6', # # U+0130 (Latin Capital Letter I With Dot Above): cp1254, iso8859_3 @@ -685,11 +685,11 @@ def _is_ipv6_enabled(): # '\u062A', # # U+0E01 (Thai Character Ko Kai): cp874 # '\u0E01', -# +# # # Then try more "special" characters. "special" because they may be # # interpreted or displayed differently depending on the exact locale # # encoding and the font. -# +# # # U+00A0 (No-Break Space) # '\u00A0', # # U+20AC (Euro Sign) @@ -702,7 +702,7 @@ def _is_ipv6_enabled(): # else: # FS_NONASCII = character # break -# +# # # TESTFN_UNICODE is a non-ascii filename # TESTFN_UNICODE = TESTFN + "-\xe0\xf2\u0258\u0141\u011f" # if sys.platform == 'darwin': @@ -712,7 +712,7 @@ def _is_ipv6_enabled(): # import unicodedata # TESTFN_UNICODE = unicodedata.normalize('NFD', TESTFN_UNICODE) # TESTFN_ENCODING = sys.getfilesystemencoding() -# +# # # TESTFN_UNENCODABLE is a filename (str type) that should *not* be able to be # # encoded by the filesystem encoding (in strict mode). It can be None if we # # cannot generate such filename. @@ -745,7 +745,7 @@ def _is_ipv6_enabled(): # # File system encoding (eg. ISO-8859-* encodings) can encode # # the byte 0xff. Skip some unicode filename tests. # pass -# +# # # TESTFN_UNDECODABLE is a filename (bytes type) that should *not* be able to be # # decoded from the filesystem encoding (in strict mode). It can be None if we # # cannot generate such filename (ex: the latin1 encoding can decode any byte @@ -775,7 +775,7 @@ def _is_ipv6_enabled(): # except UnicodeDecodeError: # TESTFN_UNDECODABLE = os.fsencode(TESTFN) + name # break -# +# # if FS_NONASCII: # TESTFN_NONASCII = TESTFN + '-' + FS_NONASCII # else: @@ -1667,15 +1667,15 @@ def case_pred(test): # We don't have sysconfig on Py2.6: # #======================================================================= # # Check for the presence of docstrings. -# +# # HAVE_DOCSTRINGS = (check_impl_detail(cpython=False) or # sys.platform == 'win32' or # sysconfig.get_config_var('WITH_DOC_STRINGS')) -# +# # requires_docstrings = unittest.skipUnless(HAVE_DOCSTRINGS, # "test requires docstrings") -# -# +# +# # #======================================================================= # doctest driver. diff --git a/ext2/future/backports/urllib/parse.py b/ext2/future/backports/urllib/parse.py index ada2f8bb4c..04e52d4925 100644 --- a/ext2/future/backports/urllib/parse.py +++ b/ext2/future/backports/urllib/parse.py @@ -727,14 +727,14 @@ def quote_from_bytes(bs, safe='/'): return str('') ### For Python-Future: bs = bytes(bs) - ### + ### if isinstance(safe, str): # Normalize 'safe' by converting to bytes and removing non-ASCII chars safe = str(safe).encode('ascii', 'ignore') else: ### For Python-Future: safe = bytes(safe) - ### + ### safe = bytes([c for c in safe if c < 128]) if not bs.rstrip(_ALWAYS_SAFE_BYTES + safe): return bs.decode() diff --git a/ext2/future/builtins/__init__.py b/ext2/future/builtins/__init__.py index 94011f9795..216465a155 100644 --- a/ext2/future/builtins/__init__.py +++ b/ext2/future/builtins/__init__.py @@ -38,9 +38,9 @@ if not utils.PY3: # We only import names that shadow the builtins on Py2. No other namespace # pollution on Py2. - + # Only shadow builtins on Py2; no new names - __all__ = ['filter', 'map', 'zip', + __all__ = ['filter', 'map', 'zip', 'ascii', 'chr', 'hex', 'input', 'next', 'oct', 'open', 'pow', 'round', 'super', 'bytes', 'dict', 'int', 'list', 'object', 'range', 'str', diff --git a/ext2/future/builtins/iterators.py b/ext2/future/builtins/iterators.py index b82f29f2a0..dff651e0f4 100644 --- a/ext2/future/builtins/iterators.py +++ b/ext2/future/builtins/iterators.py @@ -7,7 +7,7 @@ for i in range(10**15): pass - + for (a, b) in zip(range(10**15), range(-10**15, 0)): pass @@ -50,4 +50,3 @@ range = builtins.range zip = builtins.zip __all__ = [] - diff --git a/ext2/future/builtins/newnext.py b/ext2/future/builtins/newnext.py index 9364023aa2..097638ac11 100644 --- a/ext2/future/builtins/newnext.py +++ b/ext2/future/builtins/newnext.py @@ -43,7 +43,7 @@ def newnext(iterator, default=_SENTINEL): """ next(iterator[, default]) - + Return the next item from the iterator. If default is given and the iterator is exhausted, it is returned instead of raising StopIteration. """ @@ -68,4 +68,3 @@ def newnext(iterator, default=_SENTINEL): __all__ = ['newnext'] - diff --git a/ext2/future/builtins/newround.py b/ext2/future/builtins/newround.py index f59b35b325..3943ebb6e3 100644 --- a/ext2/future/builtins/newround.py +++ b/ext2/future/builtins/newround.py @@ -1,7 +1,7 @@ """ ``python-future``: pure Python implementation of Python 3 round(). """ - + from future.utils import PYPY, PY26, bind_method # Use the decimal module for simplicity of implementation (and @@ -12,13 +12,13 @@ def newround(number, ndigits=None): """ See Python 3 documentation: uses Banker's Rounding. - + Delegates to the __round__ method if for some reason this exists. - + If not, rounds a number to a given precision in decimal digits (default 0 digits). This returns an int when called with one argument, otherwise the same type as the number. ndigits may be negative. - + See the test_round method in future/tests/test_builtins.py for examples. """ @@ -28,7 +28,7 @@ def newround(number, ndigits=None): ndigits = 0 if hasattr(number, '__round__'): return number.__round__(ndigits) - + if ndigits < 0: raise NotImplementedError('negative ndigits not supported yet') exponent = Decimal('10') ** (-ndigits) @@ -48,7 +48,7 @@ def newround(number, ndigits=None): return int(d) else: return float(d) - + ### From Python 2.7's decimal.py. Only needed to support Py2.6: diff --git a/ext2/future/builtins/newsuper.py b/ext2/future/builtins/newsuper.py index a787f4bb9c..5d3402bd2f 100644 --- a/ext2/future/builtins/newsuper.py +++ b/ext2/future/builtins/newsuper.py @@ -24,7 +24,7 @@ def append(self, item): "Of course, you can still explicitly pass in the arguments if you want to do something strange. Sometimes you really do want that, e.g. to skip over some classes in the method resolution order. - + "How does it work? By inspecting the calling frame to determine the function object being executed and the object on which it's being called, and then walking the object's __mro__ chain to find out where @@ -51,14 +51,14 @@ def newsuper(typ=_SENTINEL, type_or_obj=_SENTINEL, framedepth=1): # Infer the correct call if used without arguments. if typ is _SENTINEL: # We'll need to do some frame hacking. - f = sys._getframe(framedepth) + f = sys._getframe(framedepth) try: # Get the function's first positional argument. type_or_obj = f.f_locals[f.f_code.co_varnames[0]] except (IndexError, KeyError,): raise RuntimeError('super() used in a function with no args') - + try: # Get the MRO so we can crawl it. mro = type_or_obj.__mro__ @@ -67,9 +67,9 @@ def newsuper(typ=_SENTINEL, type_or_obj=_SENTINEL, framedepth=1): mro = type_or_obj.__class__.__mro__ except AttributeError: raise RuntimeError('super() used with a non-newstyle class') - + # A ``for...else`` block? Yes! It's odd, but useful. - # If unfamiliar with for...else, see: + # If unfamiliar with for...else, see: # # http://psung.blogspot.com/2007/12/for-else-in-python.html for typ in mro: @@ -88,7 +88,7 @@ def newsuper(typ=_SENTINEL, type_or_obj=_SENTINEL, framedepth=1): try: meth = meth.__func__ except AttributeError: - meth = meth.__get__(type_or_obj) + meth = meth.__get__(type_or_obj, typ) except (AttributeError, TypeError): continue if meth.func_code is f.f_code: @@ -98,7 +98,7 @@ def newsuper(typ=_SENTINEL, type_or_obj=_SENTINEL, framedepth=1): break # Found! Break out of the search loop. else: raise RuntimeError('super() called outside a method') - + # Dispatch to builtin super(). if type_or_obj is not _SENTINEL: return _builtin_super(typ, type_or_obj) @@ -112,4 +112,3 @@ def superm(*args, **kwds): __all__ = ['newsuper'] - diff --git a/ext2/future/moves/test/support.py b/ext2/future/moves/test/support.py index ab189f4060..e9aa0f48f9 100644 --- a/ext2/future/moves/test/support.py +++ b/ext2/future/moves/test/support.py @@ -8,4 +8,3 @@ __future_module__ = True with suspend_hooks(): from test.test_support import * - diff --git a/ext2/future/moves/tkinter/__init__.py b/ext2/future/moves/tkinter/__init__.py index bc50b4c602..e40829663e 100644 --- a/ext2/future/moves/tkinter/__init__.py +++ b/ext2/future/moves/tkinter/__init__.py @@ -4,8 +4,24 @@ if not PY3: from Tkinter import * - from Tkinter import (_cnfmerge, _default_root, _flatten, _join, _setit, - _splitdict, _stringify, _support_default_root, _test, - _tkinter) + from Tkinter import (_cnfmerge, _default_root, _flatten, + _support_default_root, _test, + _tkinter, _setit) + + try: # >= 2.7.4 + from Tkinter import (_join) + except ImportError: + pass + + try: # >= 2.7.4 + from Tkinter import (_stringify) + except ImportError: + pass + + try: # >= 2.7.9 + from Tkinter import (_splitdict) + except ImportError: + pass + else: from tkinter import * diff --git a/ext2/future/moves/tkinter/colorchooser.py b/ext2/future/moves/tkinter/colorchooser.py index 5e7c97f444..6dde6e8d30 100644 --- a/ext2/future/moves/tkinter/colorchooser.py +++ b/ext2/future/moves/tkinter/colorchooser.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The tkColorChooser module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/future/moves/tkinter/commondialog.py b/ext2/future/moves/tkinter/commondialog.py index 7747a0ba0b..eb7ae8d607 100644 --- a/ext2/future/moves/tkinter/commondialog.py +++ b/ext2/future/moves/tkinter/commondialog.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The tkCommonDialog module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/future/moves/tkinter/constants.py b/ext2/future/moves/tkinter/constants.py index 99216f33d0..ffe098152f 100644 --- a/ext2/future/moves/tkinter/constants.py +++ b/ext2/future/moves/tkinter/constants.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The Tkconstants module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/future/moves/tkinter/dialog.py b/ext2/future/moves/tkinter/dialog.py index a5b777815a..113370ca2c 100644 --- a/ext2/future/moves/tkinter/dialog.py +++ b/ext2/future/moves/tkinter/dialog.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The Dialog module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/future/moves/tkinter/dnd.py b/ext2/future/moves/tkinter/dnd.py index 748b111a2b..1ab437917d 100644 --- a/ext2/future/moves/tkinter/dnd.py +++ b/ext2/future/moves/tkinter/dnd.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The Tkdnd module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/future/moves/tkinter/filedialog.py b/ext2/future/moves/tkinter/filedialog.py index 35e21ac0eb..973923e2c8 100644 --- a/ext2/future/moves/tkinter/filedialog.py +++ b/ext2/future/moves/tkinter/filedialog.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The FileDialog module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/future/moves/tkinter/font.py b/ext2/future/moves/tkinter/font.py index 63d86dc73c..628f399a35 100644 --- a/ext2/future/moves/tkinter/font.py +++ b/ext2/future/moves/tkinter/font.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The tkFont module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/future/moves/tkinter/messagebox.py b/ext2/future/moves/tkinter/messagebox.py index 3ed52e1fec..b43d8702f5 100644 --- a/ext2/future/moves/tkinter/messagebox.py +++ b/ext2/future/moves/tkinter/messagebox.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The tkMessageBox module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/future/moves/tkinter/scrolledtext.py b/ext2/future/moves/tkinter/scrolledtext.py index 13bd660d96..1c69db6067 100644 --- a/ext2/future/moves/tkinter/scrolledtext.py +++ b/ext2/future/moves/tkinter/scrolledtext.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The ScrolledText module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/future/moves/tkinter/simpledialog.py b/ext2/future/moves/tkinter/simpledialog.py index e952fa994e..dba93fbf25 100644 --- a/ext2/future/moves/tkinter/simpledialog.py +++ b/ext2/future/moves/tkinter/simpledialog.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The SimpleDialog module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/future/moves/tkinter/tix.py b/ext2/future/moves/tkinter/tix.py index 019df6f73f..8d1718ad0b 100644 --- a/ext2/future/moves/tkinter/tix.py +++ b/ext2/future/moves/tkinter/tix.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The Tix module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/future/moves/tkinter/ttk.py b/ext2/future/moves/tkinter/ttk.py index 22ac9774c4..081c1b4956 100644 --- a/ext2/future/moves/tkinter/ttk.py +++ b/ext2/future/moves/tkinter/ttk.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The ttk module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/future/moves/urllib/__init__.py b/ext2/future/moves/urllib/__init__.py index 8d1298c9f7..5cf428b6ec 100644 --- a/ext2/future/moves/urllib/__init__.py +++ b/ext2/future/moves/urllib/__init__.py @@ -3,4 +3,3 @@ if not PY3: __future_module__ = True - diff --git a/ext2/future/moves/urllib/error.py b/ext2/future/moves/urllib/error.py index ae49255f08..7d8ada73f8 100644 --- a/ext2/future/moves/urllib/error.py +++ b/ext2/future/moves/urllib/error.py @@ -7,10 +7,10 @@ from urllib.error import * else: __future_module__ = True - + # We use this method to get at the original Py2 urllib before any renaming magic # ContentTooShortError = sys.py2_modules['urllib'].ContentTooShortError - + with suspend_hooks(): from urllib import ContentTooShortError from urllib2 import URLError, HTTPError diff --git a/ext2/future/moves/urllib/parse.py b/ext2/future/moves/urllib/parse.py index 832dfb5164..9074b8163f 100644 --- a/ext2/future/moves/urllib/parse.py +++ b/ext2/future/moves/urllib/parse.py @@ -10,7 +10,7 @@ from urlparse import (ParseResult, SplitResult, parse_qs, parse_qsl, urldefrag, urljoin, urlparse, urlsplit, urlunparse, urlunsplit) - + # we use this method to get at the original py2 urllib before any renaming # quote = sys.py2_modules['urllib'].quote # quote_plus = sys.py2_modules['urllib'].quote_plus @@ -18,7 +18,7 @@ # unquote_plus = sys.py2_modules['urllib'].unquote_plus # urlencode = sys.py2_modules['urllib'].urlencode # splitquery = sys.py2_modules['urllib'].splitquery - + with suspend_hooks(): from urllib import (quote, quote_plus, diff --git a/ext2/future/moves/urllib/request.py b/ext2/future/moves/urllib/request.py index 375dc29f85..60e440a77e 100644 --- a/ext2/future/moves/urllib/request.py +++ b/ext2/future/moves/urllib/request.py @@ -51,7 +51,7 @@ # URLopener, # FancyURLopener, # proxy_bypass) - + # from urllib2 import ( # AbstractBasicAuthHandler, # AbstractDigestAuthHandler, @@ -80,7 +80,7 @@ # UnknownHandler, # urlopen, # ) - + # from urlparse import ( # urldefrag # urljoin, diff --git a/ext2/future/moves/urllib/response.py b/ext2/future/moves/urllib/response.py index 120ea13e7e..a287ae2833 100644 --- a/ext2/future/moves/urllib/response.py +++ b/ext2/future/moves/urllib/response.py @@ -10,4 +10,3 @@ addclosehook, addinfo, addinfourl) - diff --git a/ext2/future/standard_library/__init__.py b/ext2/future/standard_library/__init__.py index aca58c3e5c..cff02f9594 100644 --- a/ext2/future/standard_library/__init__.py +++ b/ext2/future/standard_library/__init__.py @@ -30,7 +30,7 @@ from itertools import filterfalse, zip_longest from sys import intern from collections import UserDict, UserList, UserString - from collections import OrderedDict, Counter # even on Py2.6 + from collections import OrderedDict, Counter, ChainMap # even on Py2.6 from subprocess import getoutput, getstatusoutput from subprocess import check_output # even on Py2.6 @@ -180,6 +180,7 @@ MOVES = [('collections', 'UserList', 'UserList', 'UserList'), ('collections', 'UserDict', 'UserDict', 'UserDict'), ('collections', 'UserString','UserString', 'UserString'), + ('collections', 'ChainMap', 'future.backports.misc', 'ChainMap'), ('itertools', 'filterfalse','itertools', 'ifilterfalse'), ('itertools', 'zip_longest','itertools', 'izip_longest'), ('sys', 'intern','__builtin__', 'intern'), @@ -195,6 +196,7 @@ ('math', 'ceil', 'future.backports.misc', 'ceil'), ('collections', 'OrderedDict', 'future.backports.misc', 'OrderedDict'), ('collections', 'Counter', 'future.backports.misc', 'Counter'), + ('collections', 'ChainMap', 'future.backports.misc', 'ChainMap'), ('itertools', 'count', 'future.backports.misc', 'count'), ('reprlib', 'recursive_repr', 'future.backports.misc', 'recursive_repr'), ('functools', 'cmp_to_key', 'future.backports.misc', 'cmp_to_key'), @@ -396,7 +398,7 @@ def scrub_future_sys_modules(): """ Deprecated. """ - return {} + return {} class suspend_hooks(object): """ diff --git a/ext2/future/tests/base.py b/ext2/future/tests/base.py index 546c779b09..9f4607b691 100644 --- a/ext2/future/tests/base.py +++ b/ext2/future/tests/base.py @@ -163,7 +163,7 @@ def convert(self, code, stages=(1, 2), all_imports=False, from3=False, """ Converts the code block using ``futurize`` and returns the resulting code. - + Passing stages=[1] or stages=[2] passes the flag ``--stage1`` or ``stage2`` to ``futurize``. Passing both stages runs ``futurize`` with both stages by default. @@ -259,10 +259,10 @@ def convert_check(self, before, expected, stages=(1, 2), all_imports=False, If ignore_imports is True, ignores the presence of any lines beginning: - + from __future__ import ... from future import ... - + for the purpose of the comparison. """ output = self.convert(before, stages=stages, all_imports=all_imports, diff --git a/ext2/future/types/__init__.py b/ext2/future/types/__init__.py index 71279bbb49..062507703e 100644 --- a/ext2/future/types/__init__.py +++ b/ext2/future/types/__init__.py @@ -15,7 +15,7 @@ to bring in the new semantics for these functions from Python 3. And then, for example:: - + b = bytes(b'ABCD') assert list(b) == [65, 66, 67, 68] assert repr(b) == "b'ABCD'" @@ -46,7 +46,7 @@ pass and:: - + class VerboseList(list): def append(self, item): print('Adding an item') @@ -112,7 +112,7 @@ def f(a, b): raises a TypeError when f is called if a unicode object is passed as `a` or a bytes object is passed as `b`. - This also skips over keyword arguments, so + This also skips over keyword arguments, so @disallow_types([0, 1], [unicode, bytes]) def g(a, b=None): @@ -130,7 +130,7 @@ def g(a, b=None): ... def __add__(self, other): ... pass - >>> newbytes('1234') + u'1234' #doctest: +IGNORE_EXCEPTION_DETAIL + >>> newbytes('1234') + u'1234' #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ... TypeError: can't concat 'bytes' to (unicode) str @@ -255,4 +255,3 @@ def issubset(list1, list2): unicode: newstr} __all__ = ['newbytes', 'newdict', 'newint', 'newlist', 'newrange', 'newstr', 'newtypes'] - diff --git a/ext2/future/types/newbytes.py b/ext2/future/types/newbytes.py index 85e6501cba..2a337c8645 100644 --- a/ext2/future/types/newbytes.py +++ b/ext2/future/types/newbytes.py @@ -100,6 +100,8 @@ def __new__(cls, *args, **kwargs): newargs.append(errors) value = args[0].encode(*newargs) ### + elif hasattr(args[0], '__bytes__'): + value = args[0].__bytes__() elif isinstance(args[0], Iterable): if len(args[0]) == 0: # This could be an empty list or tuple. Return b'' as on Py3. @@ -172,6 +174,29 @@ def __mul__(self, other): def __rmul__(self, other): return newbytes(super(newbytes, self).__rmul__(other)) + def __mod__(self, vals): + if isinstance(vals, newbytes): + vals = _builtin_bytes.__str__(vals) + + elif isinstance(vals, tuple): + newvals = [] + for v in vals: + if isinstance(v, newbytes): + v = _builtin_bytes.__str__(v) + newvals.append(v) + vals = tuple(newvals) + + elif (hasattr(vals.__class__, '__getitem__') and + hasattr(vals.__class__, 'iteritems')): + for k, v in vals.iteritems(): + if isinstance(v, newbytes): + vals[k] = _builtin_bytes.__str__(v) + + return _builtin_bytes.__mod__(self, vals) + + def __imod__(self, other): + return self.__mod__(other) + def join(self, iterable_of_bytes): errmsg = 'sequence item {0}: expected bytes, {1} found' if isbytes(iterable_of_bytes) or istext(iterable_of_bytes): @@ -348,24 +373,24 @@ def __ne__(self, other): unorderable_err = 'unorderable types: bytes() and {0}' def __lt__(self, other): - if not isbytes(other): - raise TypeError(self.unorderable_err.format(type(other))) - return super(newbytes, self).__lt__(other) + if isinstance(other, _builtin_bytes): + return super(newbytes, self).__lt__(other) + raise TypeError(self.unorderable_err.format(type(other))) def __le__(self, other): - if not isbytes(other): - raise TypeError(self.unorderable_err.format(type(other))) - return super(newbytes, self).__le__(other) + if isinstance(other, _builtin_bytes): + return super(newbytes, self).__le__(other) + raise TypeError(self.unorderable_err.format(type(other))) def __gt__(self, other): - if not isbytes(other): - raise TypeError(self.unorderable_err.format(type(other))) - return super(newbytes, self).__gt__(other) + if isinstance(other, _builtin_bytes): + return super(newbytes, self).__gt__(other) + raise TypeError(self.unorderable_err.format(type(other))) def __ge__(self, other): - if not isbytes(other): - raise TypeError(self.unorderable_err.format(type(other))) - return super(newbytes, self).__ge__(other) + if isinstance(other, _builtin_bytes): + return super(newbytes, self).__ge__(other) + raise TypeError(self.unorderable_err.format(type(other))) def __native__(self): # We can't just feed a newbytes object into str(), because diff --git a/ext2/future/types/newdict.py b/ext2/future/types/newdict.py index 5dbcc4b7df..3f3a559dd5 100644 --- a/ext2/future/types/newdict.py +++ b/ext2/future/types/newdict.py @@ -100,7 +100,7 @@ def __new__(cls, *args, **kwargs): else: value = args[0] return super(newdict, cls).__new__(cls, value) - + def __native__(self): """ Hook for the future.utils.native() function diff --git a/ext2/future/types/newobject.py b/ext2/future/types/newobject.py index 1ec09ff5b4..776d476647 100644 --- a/ext2/future/types/newobject.py +++ b/ext2/future/types/newobject.py @@ -15,10 +15,10 @@ def __str__(self): a = A() print(str(a)) - + # On Python 2, these relations hold: assert unicode(a) == my_unicode_string - assert str(a) == my_unicode_string.encode('utf-8') + assert str(a) == my_unicode_string.encode('utf-8') Another example:: @@ -32,29 +32,11 @@ def __next__(self): # note the Py3 interface return next(self._iter).upper() def __iter__(self): return self - + assert list(Upper('hello')) == list('HELLO') """ -import sys - -from future.utils import with_metaclass - - -_builtin_object = object -ver = sys.version_info[:2] - - -# We no longer define a metaclass for newobject because this breaks multiple -# inheritance and custom metaclass use with this exception: - -# TypeError: Error when calling the metaclass bases -# metaclass conflict: the metaclass of a derived class must be a -# (non-strict) subclass of the metaclasses of all its bases - -# See issues #91 and #96. - class newobject(object): """ @@ -62,7 +44,7 @@ class newobject(object): next __unicode__ __nonzero__ - + Subclasses of this class can merely define the Python 3 methods (__next__, __str__, and __bool__). """ @@ -70,7 +52,7 @@ def next(self): if hasattr(self, '__next__'): return type(self).__next__(self) raise TypeError('newobject is not an iterator') - + def __unicode__(self): # All subclasses of the builtin object should have __str__ defined. # Note that old-style classes do not have __str__ defined. @@ -123,7 +105,7 @@ def __long__(self): # else: # value = args[0] # return super(newdict, cls).__new__(cls, value) - + def __native__(self): """ Hook for the future.utils.native() function diff --git a/ext2/future/types/newopen.py b/ext2/future/types/newopen.py index 8da064274f..b75d45afb2 100644 --- a/ext2/future/types/newopen.py +++ b/ext2/future/types/newopen.py @@ -30,4 +30,3 @@ def __enter__(self): def __exit__(self, etype, value, traceback): self.f.close() - diff --git a/ext2/future/types/newrange.py b/ext2/future/types/newrange.py index c190ba9e3d..9173b05091 100644 --- a/ext2/future/types/newrange.py +++ b/ext2/future/types/newrange.py @@ -90,7 +90,10 @@ def __len__(self): def index(self, value): """Return the 0-based position of integer `value` in the sequence this range represents.""" - diff = value - self._start + try: + diff = value - self._start + except TypeError: + raise ValueError('%r is not in range' % value) quotient, remainder = divmod(diff, self._step) if remainder == 0 and 0 <= quotient < self._len: return abs(quotient) @@ -152,6 +155,9 @@ def __init__(self, range_): def __iter__(self): return self + def __next__(self): + return next(self._stepper) + def next(self): return next(self._stepper) diff --git a/ext2/future/types/newstr.py b/ext2/future/types/newstr.py index fd8615aff1..e6272fb900 100644 --- a/ext2/future/types/newstr.py +++ b/ext2/future/types/newstr.py @@ -37,7 +37,7 @@ ``__unicode__`` method on objects in Python 2. To define string representations of your objects portably across Py3 and Py2, use the :func:`python_2_unicode_compatible` decorator in :mod:`future.utils`. - + """ from collections import Iterable @@ -73,7 +73,7 @@ def __new__(cls, *args, **kwargs): str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str - + Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. @@ -81,7 +81,7 @@ def __new__(cls, *args, **kwargs): or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'. - + """ if len(args) == 0: return super(newstr, cls).__new__(cls) @@ -100,7 +100,7 @@ def __new__(cls, *args, **kwargs): else: value = args[0] return super(newstr, cls).__new__(cls, value) - + def __repr__(self): """ Without the u prefix @@ -128,7 +128,7 @@ def __contains__(self, key): else: raise TypeError(errmsg.format(type(key))) return issubset(list(newkey), list(self)) - + @no('newbytes') def __add__(self, other): return newstr(super(newstr, self).__add__(other)) @@ -302,24 +302,28 @@ def __ne__(self, other): unorderable_err = 'unorderable types: str() and {0}' def __lt__(self, other): - if not istext(other): - raise TypeError(self.unorderable_err.format(type(other))) - return super(newstr, self).__lt__(other) + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__lt__(other) + raise TypeError(self.unorderable_err.format(type(other))) def __le__(self, other): - if not istext(other): - raise TypeError(self.unorderable_err.format(type(other))) - return super(newstr, self).__le__(other) + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__le__(other) + raise TypeError(self.unorderable_err.format(type(other))) def __gt__(self, other): - if not istext(other): - raise TypeError(self.unorderable_err.format(type(other))) - return super(newstr, self).__gt__(other) + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__gt__(other) + raise TypeError(self.unorderable_err.format(type(other))) def __ge__(self, other): - if not istext(other): - raise TypeError(self.unorderable_err.format(type(other))) - return super(newstr, self).__ge__(other) + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__ge__(other) + raise TypeError(self.unorderable_err.format(type(other))) def __getattribute__(self, name): """ diff --git a/ext2/future/utils/__init__.py b/ext2/future/utils/__init__.py index cb4ade35df..906f1e4649 100644 --- a/ext2/future/utils/__init__.py +++ b/ext2/future/utils/__init__.py @@ -18,7 +18,7 @@ * types: * text_type: unicode in Python 2, str in Python 3 - * binary_type: str in Python 2, bythes in Python 3 + * binary_type: str in Python 2, bytes in Python 3 * string_types: basestring in Python 2, str in Python 3 * bchr(c): @@ -56,6 +56,8 @@ PY3 = sys.version_info[0] == 3 +PY35_PLUS = sys.version_info[0:2] >= (3, 5) +PY36_PLUS = sys.version_info[0:2] >= (3, 6) PY2 = sys.version_info[0] == 2 PY26 = sys.version_info[0:2] == (2, 6) PY27 = sys.version_info[0:2] == (2, 7) diff --git a/ext2/future/utils/surrogateescape.py b/ext2/future/utils/surrogateescape.py index 398c3531b6..0dcc9fa6e6 100644 --- a/ext2/future/utils/surrogateescape.py +++ b/ext2/future/utils/surrogateescape.py @@ -83,7 +83,7 @@ def replace_surrogate_encode(mystring): # The following magic comes from Py3.3's Python/codecs.c file: if not 0xD800 <= code <= 0xDCFF: # Not a surrogate. Fail with the original exception. - raise exc + raise NotASurrogateError # mybytes = [0xe0 | (code >> 12), # 0x80 | ((code >> 6) & 0x3f), # 0x80 | (code & 0x3f)] @@ -196,5 +196,3 @@ def register_surrogateescape(): # c = encodefilename(b) # assert c == fn, '%r != %r' % (c, fn) # # print("ok") - - diff --git a/ext2/github/AuthenticatedUser.py b/ext2/github/AuthenticatedUser.py index adb94d39f5..604eb0c8ed 100644 --- a/ext2/github/AuthenticatedUser.py +++ b/ext2/github/AuthenticatedUser.py @@ -14,12 +14,13 @@ # Copyright 2016 E. Dunham # # Copyright 2016 Jannis Gebauer # # Copyright 2016 Peter Buckley # -# Copyright 2017 Balázs Rostás # +# Copyright 2017 Balázs Rostás # # Copyright 2017 Jannis Gebauer # # Copyright 2017 Simon # # Copyright 2018 Wan Liuyang # # Copyright 2018 bryanhuntesl <31992054+bryanhuntesl@users.noreply.github.com> # # Copyright 2018 sfdye # +# Copyright 2018 itsbruce # # # # This file is part of PyGithub. # # http://pygithub.readthedocs.io/ # @@ -703,16 +704,21 @@ def get_following(self): None ) - def get_gists(self): + def get_gists(self, since=github.GithubObject.NotSet): """ :calls: `GET /gists `_ + :param since: datetime.datetime format YYYY-MM-DDTHH:MM:SSZ :rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Gist.Gist` """ + assert since is github.GithubObject.NotSet or isinstance(since, datetime.datetime), since + url_parameters = dict() + if since is not github.GithubObject.NotSet: + url_parameters["since"] = since.strftime("%Y-%m-%dT%H:%M:%SZ") return github.PaginatedList.PaginatedList( github.Gist.Gist, self._requester, "/gists", - None + url_parameters ) def get_issues(self, filter=github.GithubObject.NotSet, state=github.GithubObject.NotSet, labels=github.GithubObject.NotSet, sort=github.GithubObject.NotSet, direction=github.GithubObject.NotSet, since=github.GithubObject.NotSet): @@ -893,18 +899,26 @@ def get_repo(self, name): ) return github.Repository.Repository(self._requester, headers, data, completed=True) - def get_repos(self, type=github.GithubObject.NotSet, sort=github.GithubObject.NotSet, direction=github.GithubObject.NotSet): + def get_repos(self, visibility=github.GithubObject.NotSet, affiliation=github.GithubObject.NotSet, type=github.GithubObject.NotSet, sort=github.GithubObject.NotSet, direction=github.GithubObject.NotSet): """ - :calls: `GET /user/repos `_ + :calls: `GET /user/repos ` + :param visibility: string + :param affiliation: string :param type: string :param sort: string :param direction: string :rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository` """ + assert visibility is github.GithubObject.NotSet or isinstance(visibility, (str, unicode)), visibility + assert affiliation is github.GithubObject.NotSet or isinstance(affiliation, (str, unicode)), affiliation assert type is github.GithubObject.NotSet or isinstance(type, (str, unicode)), type assert sort is github.GithubObject.NotSet or isinstance(sort, (str, unicode)), sort assert direction is github.GithubObject.NotSet or isinstance(direction, (str, unicode)), direction url_parameters = dict() + if visibility is not github.GithubObject.NotSet: + url_parameters["visibility"] = visibility + if affiliation is not github.GithubObject.NotSet: + url_parameters["affiliation"] = affiliation if type is not github.GithubObject.NotSet: url_parameters["type"] = type if sort is not github.GithubObject.NotSet: @@ -1030,6 +1044,22 @@ def has_in_watched(self, watched): ) return status == 200 + def mark_notifications_as_read(self, last_read_at=datetime.datetime.utcnow()): + """ + :calls: `PUT /notifications `_ + :param last_read_at: datetime + """ + assert isinstance(last_read_at, datetime.datetime) + put_parameters = { + "last_read_at": last_read_at.strftime('%Y-%m-%dT%H:%M:%SZ') + } + + headers, data = self._requester.requestJsonAndCheck( + "PUT", + "/notifications", + input=put_parameters + ) + def remove_from_emails(self, *emails): """ :calls: `DELETE /user/emails `_ diff --git a/ext2/github/Branch.py b/ext2/github/Branch.py index 0cf690b835..72d9e8c00b 100644 --- a/ext2/github/Branch.py +++ b/ext2/github/Branch.py @@ -371,3 +371,34 @@ def remove_push_restrictions(self): "DELETE", self.protection_url + "/restrictions" ) + + def get_required_signatures(self): + """ + :calls: `GET /repos/:owner/:repo/branches/:branch/protection/required_signatures ` + """ + headers, data = self._requester.requestJsonAndCheck( + "GET", + self.protection_url + "/required_signatures", + headers={'Accept': Consts.signaturesProtectedBranchesPreview} + ) + return data["enabled"] + + def add_required_signatures(self): + """ + :calls: `POST /repos/:owner/:repo/branches/:branch/protection/required_signatures ` + """ + headers, data = self._requester.requestJsonAndCheck( + "POST", + self.protection_url + "/required_signatures", + headers={'Accept': Consts.signaturesProtectedBranchesPreview} + ) + + def remove_required_signatures(self): + """ + :calls: `DELETE /repos/:owner/:repo/branches/:branch/protection/required_signatures ` + """ + headers, data = self._requester.requestJsonAndCheck( + "DELETE", + self.protection_url + "/required_signatures", + headers={'Accept': Consts.signaturesProtectedBranchesPreview} + ) diff --git a/ext2/github/Consts.py b/ext2/github/Consts.py index d0125e995e..c59f419793 100644 --- a/ext2/github/Consts.py +++ b/ext2/github/Consts.py @@ -89,3 +89,9 @@ # https://developer.github.com/changes/2018-03-16-protected-branches-required-approving-reviews/ mediaTypeRequireMultipleApprovingReviews = "application/vnd.github.luke-cage-preview+json" + +# https://developer.github.com/v3/search/#highlighting-code-search-results-1 +highLightSearchPreview = "application/vnd.github.v3.text-match+json" + +# https://developer.github.com/changes/2018-02-22-protected-branches-required-signatures/ +signaturesProtectedBranchesPreview = "application/vnd.github.zzzax-preview+json" diff --git a/ext2/github/ContentFile.py b/ext2/github/ContentFile.py index 345b6fc66a..e597a09972 100644 --- a/ext2/github/ContentFile.py +++ b/ext2/github/ContentFile.py @@ -165,8 +165,17 @@ def url(self): self._completeIfNotSet(self._url) return self._url.value + @property + def text_matches(self): + """ + :type: string + """ + self._completeIfNotSet(self._text_matches) + return self._text_matches.value + def _initAttributes(self): self._content = github.GithubObject.NotSet + self._text_matches = github.GithubObject.NotSet self._encoding = github.GithubObject.NotSet self._download_url = github.GithubObject.NotSet self._git_url = github.GithubObject.NotSet @@ -206,3 +215,5 @@ def _useAttributes(self, attributes): self._type = self._makeStringAttribute(attributes["type"]) if "url" in attributes: # pragma no branch self._url = self._makeStringAttribute(attributes["url"]) + if "text_matches" in attributes: # pragma no branch + self._text_matches = self._makeListOfDictsAttribute(attributes["text_matches"]) diff --git a/ext2/github/GithubObject.py b/ext2/github/GithubObject.py index c3f2663824..f1ffa75292 100644 --- a/ext2/github/GithubObject.py +++ b/ext2/github/GithubObject.py @@ -188,6 +188,10 @@ def _makeListOfStringsAttribute(value): def _makeListOfIntsAttribute(value): return GithubObject.__makeSimpleListAttribute(value, int) + @staticmethod + def _makeListOfDictsAttribute(value): + return GithubObject.__makeSimpleListAttribute(value, dict) + @staticmethod def _makeListOfListOfStringsAttribute(value): return GithubObject.__makeSimpleListAttribute(value, list) diff --git a/ext2/github/MainClass.py b/ext2/github/MainClass.py index 48f7ab8c3a..ae77af89e3 100644 --- a/ext2/github/MainClass.py +++ b/ext2/github/MainClass.py @@ -27,6 +27,7 @@ # Copyright 2018 Svend Sorensen # # Copyright 2018 Wan Liuyang # # Copyright 2018 sfdye # +# Copyright 2018 itsbruce # # # # This file is part of PyGithub. # # http://pygithub.readthedocs.io/ # @@ -46,6 +47,8 @@ # # ################################################################################ +import datetime + import urllib import pickle import time @@ -91,7 +94,7 @@ class Github(object): This is the main class you instantiate to access the Github API v3. Optional parameters allow different authentication methods. """ - def __init__(self, login_or_token=None, password=None, base_url=DEFAULT_BASE_URL, timeout=DEFAULT_TIMEOUT, client_id=None, client_secret=None, user_agent='PyGithub/Python', per_page=DEFAULT_PER_PAGE, api_preview=False, verify=True): + def __init__(self, login_or_token=None, password=None, jwt=None, base_url=DEFAULT_BASE_URL, timeout=DEFAULT_TIMEOUT, client_id=None, client_secret=None, user_agent='PyGithub/Python', per_page=DEFAULT_PER_PAGE, api_preview=False, verify=True): """ :param login_or_token: string :param password: string @@ -106,13 +109,14 @@ def __init__(self, login_or_token=None, password=None, base_url=DEFAULT_BASE_URL assert login_or_token is None or isinstance(login_or_token, (str, unicode)), login_or_token assert password is None or isinstance(password, (str, unicode)), password + assert jwt is None or isinstance(jwt, (str, unicode)), jwt assert isinstance(base_url, (str, unicode)), base_url assert isinstance(timeout, (int, long)), timeout assert client_id is None or isinstance(client_id, (str, unicode)), client_id assert client_secret is None or isinstance(client_secret, (str, unicode)), client_secret assert user_agent is None or isinstance(user_agent, (str, unicode)), user_agent assert isinstance(api_preview, (bool)) - self.__requester = Requester(login_or_token, password, base_url, timeout, client_id, client_secret, user_agent, per_page, api_preview, verify) + self.__requester = Requester(login_or_token, password, jwt, base_url, timeout, client_id, client_secret, user_agent, per_page, api_preview, verify) def __get_FIX_REPO_GET_GIT_REF(self): """ @@ -334,16 +338,21 @@ def get_gist(self, id): ) return github.Gist.Gist(self.__requester, headers, data, completed=True) - def get_gists(self): + def get_gists(self, since=github.GithubObject.NotSet): """ :calls: `GET /gists/public `_ + :param since: datetime.datetime format YYYY-MM-DDTHH:MM:SSZ :rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Gist.Gist` """ + assert since is github.GithubObject.NotSet or isinstance(since, datetime.datetime), since + url_parameters = dict() + if since is not github.GithubObject.NotSet: + url_parameters["since"] = since.strftime("%Y-%m-%dT%H:%M:%SZ") return github.PaginatedList.PaginatedList( github.Gist.Gist, self.__requester, "/gists/public", - None + url_parameters ) def search_repositories(self, query, sort=github.GithubObject.NotSet, order=github.GithubObject.NotSet, **qualifiers): @@ -451,12 +460,13 @@ def search_issues(self, query, sort=github.GithubObject.NotSet, order=github.Git url_parameters ) - def search_code(self, query, sort=github.GithubObject.NotSet, order=github.GithubObject.NotSet, **qualifiers): + def search_code(self, query, sort=github.GithubObject.NotSet, order=github.GithubObject.NotSet, highlight=False, **qualifiers): """ :calls: `GET /search/code `_ :param query: string :param sort: string ('indexed') :param order: string ('asc', 'desc') + :param highlight: boolean (True, False) :param qualifiers: keyword dict query qualifiers :rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.ContentFile.ContentFile` """ @@ -479,14 +489,16 @@ def search_code(self, query, sort=github.GithubObject.NotSet, order=github.Githu url_parameters["q"] = ' '.join(query_chunks) assert url_parameters["q"], "need at least one qualifier" + headers = {"Accept": Consts.highLightSearchPreview} if highlight else None + return github.PaginatedList.PaginatedList( github.ContentFile.ContentFile, self.__requester, "/search/code", - url_parameters + url_parameters, + headers=headers ) - def search_commits(self, query, sort=github.GithubObject.NotSet, order=github.GithubObject.NotSet, **qualifiers): """ :calls: `GET /search/commits `_ diff --git a/ext2/github/Milestone.py b/ext2/github/Milestone.py index 41a41179ae..18d0e5ead7 100644 --- a/ext2/github/Milestone.py +++ b/ext2/github/Milestone.py @@ -45,7 +45,7 @@ class Milestone(github.GithubObject.CompletableGithubObject): """ def __repr__(self): - return self.get__repr__({"number": self._number.value}) + return self.get__repr__({"number": self._number.value, "title": self._title.value}) @property def closed_issues(self): diff --git a/ext2/github/NamedUser.py b/ext2/github/NamedUser.py index 63dd498815..43f73aacfb 100644 --- a/ext2/github/NamedUser.py +++ b/ext2/github/NamedUser.py @@ -17,6 +17,7 @@ # Copyright 2018 Wan Liuyang # # Copyright 2018 namc # # Copyright 2018 sfdye # +# Copyright 2018 itsbruce # # # # This file is part of PyGithub. # # http://pygithub.readthedocs.io/ # @@ -36,6 +37,8 @@ # # ################################################################################ +import datetime + import github.GithubObject import github.PaginatedList @@ -334,6 +337,14 @@ def subscriptions_url(self): self._completeIfNotSet(self._subscriptions_url) return self._subscriptions_url.value + @property + def suspended_at(self): + """ + :type: datetime.datetime + """ + self._completeIfNotSet(self._suspended_at) + return self._suspended_at.value + @property def total_private_repos(self): """ @@ -402,16 +413,21 @@ def get_following(self): None ) - def get_gists(self): + def get_gists(self, since=github.GithubObject.NotSet): """ :calls: `GET /users/:user/gists `_ + :param since: datetime.datetime format YYYY-MM-DDTHH:MM:SSZ :rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Gist.Gist` """ + assert since is github.GithubObject.NotSet or isinstance(since, datetime.datetime), since + url_parameters = dict() + if since is not github.GithubObject.NotSet: + url_parameters["since"] = since.strftime("%Y-%m-%dT%H:%M:%SZ") return github.PaginatedList.PaginatedList( github.Gist.Gist, self._requester, self.url + "/gists", - None + url_parameters ) def get_keys(self): @@ -601,6 +617,7 @@ def _initAttributes(self): self._site_admin = github.GithubObject.NotSet self._starred_url = github.GithubObject.NotSet self._subscriptions_url = github.GithubObject.NotSet + self._suspended_at = github.GithubObject.NotSet self._total_private_repos = github.GithubObject.NotSet self._type = github.GithubObject.NotSet self._updated_at = github.GithubObject.NotSet @@ -675,6 +692,8 @@ def _useAttributes(self, attributes): self._starred_url = self._makeStringAttribute(attributes["starred_url"]) if "subscriptions_url" in attributes: # pragma no branch self._subscriptions_url = self._makeStringAttribute(attributes["subscriptions_url"]) + if "suspended_at" in attributes: # pragma no branch + self._suspended_at = self._makeDatetimeAttribute(attributes["suspended_at"]) if "total_private_repos" in attributes: # pragma no branch self._total_private_repos = self._makeIntAttribute(attributes["total_private_repos"]) if "type" in attributes: # pragma no branch diff --git a/ext2/github/Notification.py b/ext2/github/Notification.py index f38f845b7e..b62d75f61d 100644 --- a/ext2/github/Notification.py +++ b/ext2/github/Notification.py @@ -115,6 +115,15 @@ def url(self): self._completeIfNotSet(self._url) return self._url.value + def mark_as_read(self): + """ + :calls: `PATCH /notifications/threads/:id `_ + """ + headers, data = self._requester.requestJsonAndCheck( + "PATCH", + self.url, + ) + def _initAttributes(self): self._id = github.GithubObject.NotSet self._last_read_at = github.GithubObject.NotSet diff --git a/ext2/github/PaginatedList.py b/ext2/github/PaginatedList.py index 8e25cf6000..586b4a8ed7 100644 --- a/ext2/github/PaginatedList.py +++ b/ext2/github/PaginatedList.py @@ -155,7 +155,7 @@ def totalCount(self): headers=self.__headers ) if 'link' not in headers: - self.__totalCount = len(data) + self.__totalCount = len(data) if data else 0 else: links = self.__parseLinkHeader(headers) lastUrl = links.get("last") @@ -175,7 +175,7 @@ def _getLastPageUrl(self): @property def reversed(self): - r = PaginatedList(self.__contentClass, self.__requester, self.__firstUrl, self.__firstParams) + r = PaginatedList(self.__contentClass, self.__requester, self.__firstUrl, self.__firstParams, self.__headers, self.__list_item) r.__reverse() return r diff --git a/ext2/github/PullRequest.py b/ext2/github/PullRequest.py index c855dfb80c..688e0625b9 100644 --- a/ext2/github/PullRequest.py +++ b/ext2/github/PullRequest.py @@ -434,6 +434,8 @@ def create_review(self, commit, body, event=github.GithubObject.NotSet, comments post_parameters['event'] = 'COMMENT' if event == github.GithubObject.NotSet else event if comments is github.GithubObject.NotSet: post_parameters['comments'] = [] + else: + post_parameters['comments'] = comments headers, data = self._requester.requestJsonAndCheck( "POST", self.url + "/reviews", diff --git a/ext2/github/Repository.py b/ext2/github/Repository.py index 4faec7bea6..59b1acd390 100644 --- a/ext2/github/Repository.py +++ b/ext2/github/Repository.py @@ -60,6 +60,7 @@ # Copyright 2018 per1234 # # Copyright 2018 sechastain # # Copyright 2018 sfdye # +# Copyright 2018 Vinay Hegde # # # This file is part of PyGithub. # # http://pygithub.readthedocs.io/ # @@ -1247,9 +1248,9 @@ def edit(self, name=None, description=github.GithubObject.NotSet, homepage=githu assert has_wiki is github.GithubObject.NotSet or isinstance(has_wiki, bool), has_wiki assert has_downloads is github.GithubObject.NotSet or isinstance(has_downloads, bool), has_downloads assert default_branch is github.GithubObject.NotSet or isinstance(default_branch, (str, unicode)), default_branch - assert allow_squash_merge is github.GithubObject.NotSet or (isinstance(allow_squash_merge, bool) and allow_squash_merge is True), allow_squash_merge - assert allow_merge_commit is github.GithubObject.NotSet or (isinstance(allow_merge_commit, bool) and allow_merge_commit is True), allow_merge_commit - assert allow_rebase_merge is github.GithubObject.NotSet or (isinstance(allow_rebase_merge, bool) and allow_rebase_merge is True), allow_rebase_merge + assert allow_squash_merge is github.GithubObject.NotSet or isinstance(allow_squash_merge, bool), allow_squash_merge + assert allow_merge_commit is github.GithubObject.NotSet or isinstance(allow_merge_commit, bool), allow_merge_commit + assert allow_rebase_merge is github.GithubObject.NotSet or isinstance(allow_rebase_merge, bool), allow_rebase_merge assert archived is github.GithubObject.NotSet or (isinstance(archived, bool) and archived is True), archived post_parameters = { "name": name, @@ -1340,16 +1341,26 @@ def get_branches(self): None ) - def get_collaborators(self): + def get_collaborators(self, affiliation=github.GithubObject.NotSet): """ :calls: `GET /repos/:owner/:repo/collaborators `_ + :param affiliation: string :rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.NamedUser.NamedUser` """ + + url_parameters = dict() + allowed_affiliations = ['outside', 'direct', 'all'] + if affiliation is not github.GithubObject.NotSet: + assert isinstance(affiliation, str), affiliation + assert affiliation in allowed_affiliations, \ + 'Affiliation can be one of ' + ', '.join(allowed_affiliations) + url_parameters['affiliation'] = affiliation + return github.PaginatedList.PaginatedList( github.NamedUser.NamedUser, self._requester, self.url + "/collaborators", - None + url_parameters ) def get_comment(self, id): @@ -1449,7 +1460,7 @@ def get_file_contents(self, path, ref=github.GithubObject.NotSet): url_parameters["ref"] = ref headers, data = self._requester.requestJsonAndCheck( "GET", - self.url + "/contents" + urllib.quote(path), + self.url + "/contents/" + urllib.quote(path), parameters=url_parameters ) if isinstance(data, list): @@ -1530,7 +1541,7 @@ def create_file(self, path, message, content, headers, data = self._requester.requestJsonAndCheck( "PUT", - self.url + "/contents" + urllib.quote(path), + self.url + "/contents/" + urllib.quote(path), input=put_parameters ) @@ -1594,7 +1605,7 @@ def update_file(self, path, message, content, sha, headers, data = self._requester.requestJsonAndCheck( "PUT", - self.url + "/contents" + urllib.quote(path), + self.url + "/contents/" + urllib.quote(path), input=put_parameters ) @@ -1644,7 +1655,7 @@ def delete_file(self, path, message, sha, headers, data = self._requester.requestJsonAndCheck( "DELETE", - self.url + "/contents" + urllib.quote(path), + self.url + "/contents/" + urllib.quote(path), input=url_parameters ) @@ -1665,7 +1676,7 @@ def get_dir_contents(self, path, ref=github.GithubObject.NotSet): url_parameters["ref"] = ref headers, data = self._requester.requestJsonAndCheck( "GET", - self.url + "/contents" + urllib.quote(path), + self.url + "/contents/" + urllib.quote(path), parameters=url_parameters ) @@ -2473,6 +2484,22 @@ def legacy_search_issues(self, state, keyword): for element in data["issues"] ] + def mark_notifications_as_read(self, last_read_at=datetime.datetime.utcnow()): + """ + :calls: `PUT /repos/:owner/:repo/notifications `_ + :param last_read_at: datetime + """ + assert isinstance(last_read_at, datetime.datetime) + put_parameters = { + "last_read_at": last_read_at.strftime('%Y-%m-%dT%H:%M:%SZ') + } + + headers, data = self._requester.requestJsonAndCheck( + "PUT", + self.url + "/notifications", + input=put_parameters + ) + def merge(self, base, head, commit_message=github.GithubObject.NotSet): """ :calls: `POST /repos/:owner/:repo/merges `_ diff --git a/ext2/github/Requester.py b/ext2/github/Requester.py index 5612eac367..85cbe4cffb 100644 --- a/ext2/github/Requester.py +++ b/ext2/github/Requester.py @@ -214,7 +214,7 @@ def _initializeDebugFeature(self): ############################################################# - def __init__(self, login_or_token, password, base_url, timeout, client_id, client_secret, user_agent, per_page, api_preview, verify): + def __init__(self, login_or_token, password, jwt, base_url, timeout, client_id, client_secret, user_agent, per_page, api_preview, verify): self._initializeDebugFeature() if password is not None: @@ -226,6 +226,8 @@ def __init__(self, login_or_token, password, base_url, timeout, client_id, clien elif login_or_token is not None: token = login_or_token self.__authorizationHeader = "token " + token + elif jwt is not None: + self.__authorizationHeader = "Bearer " + jwt else: self.__authorizationHeader = None @@ -293,7 +295,10 @@ def __createException(self, status, headers, output): cls = GithubException.TwoFactorException # pragma no cover (Should be covered) elif status == 403 and output.get("message").startswith("Missing or invalid User Agent string"): cls = GithubException.BadUserAgentException - elif status == 403 and output.get("message").lower().startswith("api rate limit exceeded"): + elif status == 403 and ( + output.get("message").lower().startswith("api rate limit exceeded") + or output.get("message").lower().endswith("please wait a few minutes before you try again.") + ): cls = GithubException.RateLimitExceededException elif status == 404 and output.get("message") == "Not Found": cls = GithubException.UnknownObjectException @@ -411,7 +416,8 @@ def __requestRaw(self, cnx, verb, url, requestHeaders, input): return self.__requestRaw(original_cnx, verb, url, requestHeaders, input) if status == 301 and 'location' in responseHeaders: - return self.__requestRaw(original_cnx, verb, responseHeaders['location'], requestHeaders, input) + o = urlparse.urlparse(responseHeaders['location']) + return self.__requestRaw(original_cnx, verb, o.path, requestHeaders, input) return status, responseHeaders, output @@ -465,6 +471,8 @@ def __log(self, verb, url, requestHeaders, input, status, responseHeaders, outpu requestHeaders["Authorization"] = "Basic (login and password removed)" elif requestHeaders["Authorization"].startswith("token"): requestHeaders["Authorization"] = "token (oauth token removed)" + elif requestHeaders["Authorization"].startswith("Bearer"): + requestHeaders["Authorization"] = "Bearer (jwt removed)" else: # pragma no cover (Cannot happen, but could if we add an authentication method => be prepared) requestHeaders["Authorization"] = "(unknown auth removed)" # pragma no cover (Cannot happen, but could if we add an authentication method => be prepared) logger.debug("%s %s://%s%s %s %s ==> %i %s %s", str(verb), self.__scheme, self.__hostname, str(url), str(requestHeaders), str(input), status, str(responseHeaders), str(output)) diff --git a/ext2/libfuturize/fixer_util.py b/ext2/libfuturize/fixer_util.py index ce1e9753b6..48e4689db9 100644 --- a/ext2/libfuturize/fixer_util.py +++ b/ext2/libfuturize/fixer_util.py @@ -62,7 +62,7 @@ def Minus(prefix=None): def commatize(leafs): """ - Accepts/turns: (Name, Name, ..., Name, Name) + Accepts/turns: (Name, Name, ..., Name, Name) Returns/into: (Name, Comma, Name, Comma, ..., Name, Comma, Name) """ new_leafs = [] @@ -272,7 +272,7 @@ def future_import2(feature, node): An alternative to future_import() which might not work ... """ root = find_root(node) - + if does_tree_import(u"__future__", feature, node): return @@ -304,7 +304,7 @@ def parse_args(arglist, scheme): Parse a list of arguments into a dict """ arglist = [i for i in arglist if i.type != token.COMMA] - + ret_mapping = dict([(k, None) for k in scheme]) for i, arg in enumerate(arglist): @@ -338,7 +338,7 @@ def touch_import_top(package, name_to_import, node): Based on lib2to3.fixer_util.touch_import() Calling this multiple times adds the imports in reverse order. - + Also adds "standard_library.install_aliases()" after "from future import standard_library". This should probably be factored into another function. """ @@ -415,7 +415,7 @@ def touch_import_top(package, name_to_import, node): children_hooks = [install_hooks, Newline()] else: children_hooks = [] - + # FromImport(package, [Leaf(token.NAME, name_to_import, prefix=u" ")]) children_import = [import_, Newline()] @@ -443,7 +443,10 @@ def check_future_import(node): hasattr(node.children[1], 'value') and node.children[1].value == u'__future__'): return set() - node = node.children[3] + if node.children[3].type == token.LPAR: + node = node.children[4] + else: + node = node.children[3] # now node is the import_as_name[s] # print(python_grammar.number2symbol[node.type]) # breaks sometimes if node.type == syms.import_as_names: @@ -504,15 +507,14 @@ def wrap_in_fn_call(fn_name, args, prefix=None): >>> wrap_in_fn_call("olddiv", (arg1, arg2)) olddiv(arg1, arg2) + + >>> wrap_in_fn_call("olddiv", [arg1, comma, arg2, comma, arg3]) + olddiv(arg1, arg2, arg3) """ assert len(args) > 0 - if len(args) == 1: - newargs = args - elif len(args) == 2: + if len(args) == 2: expr1, expr2 = args newargs = [expr1, Comma(), expr2] else: - assert NotImplementedError('write me') + newargs = args return Call(Name(fn_name), newargs, prefix=prefix) - - diff --git a/ext2/libfuturize/fixes/__init__.py b/ext2/libfuturize/fixes/__init__.py index a059c949dd..7de304da7f 100644 --- a/ext2/libfuturize/fixes/__init__.py +++ b/ext2/libfuturize/fixes/__init__.py @@ -94,4 +94,3 @@ # 'libfuturize.fixes.fix_unicode_literals_import', 'libfuturize.fixes.fix_xrange_with_import', # custom one because of a bug with Py3.3's lib2to3 ]) - diff --git a/ext2/libfuturize/fixes/fix_UserDict.py b/ext2/libfuturize/fixes/fix_UserDict.py index 73b1cfb88e..cb0cfacc6a 100644 --- a/ext2/libfuturize/fixes/fix_UserDict.py +++ b/ext2/libfuturize/fixes/fix_UserDict.py @@ -16,12 +16,12 @@ # def alternates(members): # return "(" + "|".join(map(repr, members)) + ")" -# -# +# +# # def build_pattern(mapping=MAPPING): # mod_list = ' | '.join(["module_name='%s'" % key for key in mapping]) # bare_names = alternates(mapping.keys()) -# +# # yield """name_import=import_name< 'import' ((%s) | # multiple_imports=dotted_as_names< any* (%s) any* >) > # """ % (mod_list, mod_list) @@ -33,7 +33,7 @@ # multiple_imports=dotted_as_names< # any* dotted_as_name< (%s) 'as' any > any* >) > # """ % (mod_list, mod_list) -# +# # # Find usages of module members in code e.g. thread.foo(bar) # yield "power< bare_with_attr=(%s) trailer<'.' any > any* >" % bare_names @@ -100,4 +100,3 @@ def transform(self, node, results): new_name = self.replace.get(bare_name.value) if new_name: bare_name.replace(Name(new_name, prefix=bare_name.prefix)) - diff --git a/ext2/libfuturize/fixes/fix_absolute_import.py b/ext2/libfuturize/fixes/fix_absolute_import.py index ab6a7647cb..eab9c527d3 100644 --- a/ext2/libfuturize/fixes/fix_absolute_import.py +++ b/ext2/libfuturize/fixes/fix_absolute_import.py @@ -89,4 +89,3 @@ def probably_a_local_import(self, imp_name): if exists(base_path + ext): return True return False - diff --git a/ext2/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py b/ext2/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py index 1904d37b4d..1d419a1c66 100644 --- a/ext2/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py +++ b/ext2/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py @@ -24,4 +24,3 @@ def transform(self, node, results): future_import(u"print_function", node) future_import(u"division", node) future_import(u"absolute_import", node) - diff --git a/ext2/libfuturize/fixes/fix_basestring.py b/ext2/libfuturize/fixes/fix_basestring.py index 8c6ec6ced3..5676d08fcd 100644 --- a/ext2/libfuturize/fixes/fix_basestring.py +++ b/ext2/libfuturize/fixes/fix_basestring.py @@ -15,4 +15,3 @@ class FixBasestring(fixer_base.BaseFix): def transform(self, node, results): touch_import_top(u'past.builtins', 'basestring', node) - diff --git a/ext2/libfuturize/fixes/fix_cmp.py b/ext2/libfuturize/fixes/fix_cmp.py index be56507eb5..762eb4b42e 100644 --- a/ext2/libfuturize/fixes/fix_cmp.py +++ b/ext2/libfuturize/fixes/fix_cmp.py @@ -31,4 +31,3 @@ class FixCmp(fixer_base.BaseFix): def transform(self, node, results): name = results["name"] touch_import_top(u'past.builtins', name.value, node) - diff --git a/ext2/libfuturize/fixes/fix_division.py b/ext2/libfuturize/fixes/fix_division.py index 4874550442..6975a52bb9 100644 --- a/ext2/libfuturize/fixes/fix_division.py +++ b/ext2/libfuturize/fixes/fix_division.py @@ -10,4 +10,3 @@ """ from libpasteurize.fixes.fix_division import FixDivision - diff --git a/ext2/libfuturize/fixes/fix_division_safe.py b/ext2/libfuturize/fixes/fix_division_safe.py index 5e16b0a514..7b0f3cbd7a 100644 --- a/ext2/libfuturize/fixes/fix_division_safe.py +++ b/ext2/libfuturize/fixes/fix_division_safe.py @@ -13,6 +13,9 @@ nothing. """ +import re +import lib2to3.pytree as pytree +from lib2to3.fixer_util import Leaf, Node, Comma from lib2to3 import fixer_base from lib2to3.fixer_util import syms, does_tree_import from libfuturize.fixer_util import (token, future_import, touch_import_top, @@ -28,6 +31,43 @@ def match_division(node): return node.type == slash and not node.next_sibling.type == slash and \ not node.prev_sibling.type == slash +const_re = re.compile('^[0-9]*[.][0-9]*$') + +def is_floaty(node, div_idx): + return _is_floaty(node.children[0:div_idx]) or _is_floaty(node.children[div_idx+1:]) + + +def _is_floaty(expr): + if isinstance(expr, list): + expr = expr[0] + + if isinstance(expr, Leaf): + # If it's a leaf, let's see if it's a numeric constant containing a '.' + return const_re.match(expr.value) + elif isinstance(expr, Node): + # If the expression is a node, let's see if it's a direct cast to float + if isinstance(expr.children[0], Leaf): + return expr.children[0].value == u'float' + return False + +def find_division(node): + for i, child in enumerate(node.children): + if match_division(child): + return i + return False + +def clone_div_operands(node, div_idx): + children = [] + for i, child in enumerate(node.children): + if i == div_idx: + children.append(Comma()) + else: + children.append(child.clone()) + + # Strip any leading space for the first number: + children[0].prefix = u'' + + return children class FixDivisionSafe(fixer_base.BaseFix): # BM_compatible = True @@ -51,22 +91,19 @@ def match(self, node): Since the tree needs to be fixed once and only once if and only if it matches, we can start discarding matches after the first. """ - if (node.type == self.syms.term and - len(node.children) == 3 and - match_division(node.children[1])): - expr1, expr2 = node.children[0], node.children[2] - return expr1, expr2 - else: - return False + if node.type == self.syms.term: + div_idx = find_division(node) + if div_idx is not False: + # if expr1 or expr2 are obviously floats, we don't need to wrap in + # old_div, as the behavior of division between any number and a float + # should be the same in 2 or 3 + if not is_floaty(node, div_idx): + return clone_div_operands(node, div_idx) + return False def transform(self, node, results): if self.skip: return future_import(u"division", node) - touch_import_top(u'past.utils', u'old_div', node) - expr1, expr2 = results[0].clone(), results[1].clone() - # Strip any leading space for the first number: - expr1.prefix = u'' - return wrap_in_fn_call("old_div", (expr1, expr2), prefix=node.prefix) - + return wrap_in_fn_call("old_div", results, prefix=node.prefix) diff --git a/ext2/libfuturize/fixes/fix_execfile.py b/ext2/libfuturize/fixes/fix_execfile.py index 2b794c882f..cfe9d8d0f6 100644 --- a/ext2/libfuturize/fixes/fix_execfile.py +++ b/ext2/libfuturize/fixes/fix_execfile.py @@ -35,4 +35,3 @@ class FixExecfile(fixer_base.BaseFix): def transform(self, node, results): name = results["name"] touch_import_top(u'past.builtins', name.value, node) - diff --git a/ext2/libfuturize/fixes/fix_future_builtins.py b/ext2/libfuturize/fixes/fix_future_builtins.py index bf3aba40e1..eea6c6a1ee 100644 --- a/ext2/libfuturize/fixes/fix_future_builtins.py +++ b/ext2/libfuturize/fixes/fix_future_builtins.py @@ -57,4 +57,3 @@ def transform(self, node, results): name = results["name"] touch_import_top(u'builtins', name.value, node) # name.replace(Name(u"input", prefix=name.prefix)) - diff --git a/ext2/libfuturize/fixes/fix_future_standard_library.py b/ext2/libfuturize/fixes/fix_future_standard_library.py index 501c2a940e..a1c3f3d4ea 100644 --- a/ext2/libfuturize/fixes/fix_future_standard_library.py +++ b/ext2/libfuturize/fixes/fix_future_standard_library.py @@ -22,5 +22,3 @@ def transform(self, node, results): # TODO: add a blank line between any __future__ imports and this? touch_import_top(u'future', u'standard_library', node) return result - - diff --git a/ext2/libfuturize/fixes/fix_future_standard_library_urllib.py b/ext2/libfuturize/fixes/fix_future_standard_library_urllib.py index 3d62959fd2..cf6738845f 100644 --- a/ext2/libfuturize/fixes/fix_future_standard_library_urllib.py +++ b/ext2/libfuturize/fixes/fix_future_standard_library_urllib.py @@ -26,5 +26,3 @@ def transform(self, node, results): # TODO: add a blank line between any __future__ imports and this? touch_import_top(u'future', u'standard_library', root) return result - - diff --git a/ext2/libfuturize/fixes/fix_order___future__imports.py b/ext2/libfuturize/fixes/fix_order___future__imports.py index 143126394d..00d7ef6061 100644 --- a/ext2/libfuturize/fixes/fix_order___future__imports.py +++ b/ext2/libfuturize/fixes/fix_order___future__imports.py @@ -34,4 +34,3 @@ class FixOrderFutureImports(fixer_base.BaseFix): def transform(self, node, results): # TODO # write me pass - diff --git a/ext2/libfuturize/fixes/fix_print_with_import.py b/ext2/libfuturize/fixes/fix_print_with_import.py index 5308d9252a..3449046101 100644 --- a/ext2/libfuturize/fixes/fix_print_with_import.py +++ b/ext2/libfuturize/fixes/fix_print_with_import.py @@ -20,4 +20,3 @@ def transform(self, node, results): future_import(u'print_function', node) n_stmt = super(FixPrintWithImport, self).transform(node, results) return n_stmt - diff --git a/ext2/libfuturize/fixes/fix_remove_old__future__imports.py b/ext2/libfuturize/fixes/fix_remove_old__future__imports.py index 060eb00417..9336f75f35 100644 --- a/ext2/libfuturize/fixes/fix_remove_old__future__imports.py +++ b/ext2/libfuturize/fixes/fix_remove_old__future__imports.py @@ -24,4 +24,3 @@ def transform(self, node, results): remove_future_import(u"with_statement", node) remove_future_import(u"nested_scopes", node) remove_future_import(u"generators", node) - diff --git a/ext2/libfuturize/fixes/fix_unicode_keep_u.py b/ext2/libfuturize/fixes/fix_unicode_keep_u.py index a6f70f092a..2e9a4e476a 100644 --- a/ext2/libfuturize/fixes/fix_unicode_keep_u.py +++ b/ext2/libfuturize/fixes/fix_unicode_keep_u.py @@ -22,4 +22,3 @@ def transform(self, node, results): new = node.clone() new.value = _mapping[node.value] return new - diff --git a/ext2/libfuturize/fixes/fix_unicode_literals_import.py b/ext2/libfuturize/fixes/fix_unicode_literals_import.py index 9f21d7c69f..51c50620b7 100644 --- a/ext2/libfuturize/fixes/fix_unicode_literals_import.py +++ b/ext2/libfuturize/fixes/fix_unicode_literals_import.py @@ -1,6 +1,6 @@ """ Adds this import: - + from __future__ import unicode_literals """ @@ -16,4 +16,3 @@ class FixUnicodeLiteralsImport(fixer_base.BaseFix): def transform(self, node, results): future_import(u"unicode_literals", node) - diff --git a/ext2/libfuturize/main.py b/ext2/libfuturize/main.py index 18f33ec0a4..634c2f25e6 100644 --- a/ext2/libfuturize/main.py +++ b/ext2/libfuturize/main.py @@ -70,7 +70,7 @@ import optparse import os -from lib2to3.main import main, warn, StdoutRefactoringTool +from lib2to3.main import warn, StdoutRefactoringTool from lib2to3 import refactor from libfuturize.fixes import (lib2to3_fix_names_stage1, @@ -91,7 +91,7 @@ def main(args=None): Returns a suggested exit status (0, 1, 2). """ - + # Set up option parser parser = optparse.OptionParser(usage="futurize [options] file|dir ...") parser.add_option("-V", "--version", action="store_true", @@ -205,7 +205,27 @@ def main(args=None): print("Use --help to show usage.", file=sys.stderr) return 2 - unwanted_fixes = set(fixer_pkg + ".fix_" + fix for fix in options.nofix) + unwanted_fixes = set() + for fix in options.nofix: + if ".fix_" in fix: + unwanted_fixes.add(fix) + else: + # Infer the full module name for the fixer. + # First ensure that no names clash (e.g. + # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah): + found = [f for f in avail_fixes + if f.endswith('fix_{0}'.format(fix))] + if len(found) > 1: + print("Ambiguous fixer name. Choose a fully qualified " + "module name instead from these:\n" + + "\n".join(" " + myf for myf in found), + file=sys.stderr) + return 2 + elif len(found) == 0: + print("Unknown fixer. Use --list-fixes or -l for a list.", + file=sys.stderr) + return 2 + unwanted_fixes.add(found[0]) extra_fixes = set() if options.all_imports: diff --git a/ext2/libpasteurize/fixes/__init__.py b/ext2/libpasteurize/fixes/__init__.py index c362ada23d..905aec47e7 100644 --- a/ext2/libpasteurize/fixes/__init__.py +++ b/ext2/libpasteurize/fixes/__init__.py @@ -52,4 +52,3 @@ 'libpasteurize.fixes.fix_unpacking', # yes, this is useful # 'libpasteurize.fixes.fix_with' # way out of date ]) - diff --git a/ext2/libpasteurize/fixes/feature_base.py b/ext2/libpasteurize/fixes/feature_base.py index 8a264964c9..c36d9a951b 100644 --- a/ext2/libpasteurize/fixes/feature_base.py +++ b/ext2/libpasteurize/fixes/feature_base.py @@ -40,7 +40,7 @@ def update_mapping(self): Called every time we care about the mapping of names to features. """ self.mapping = dict([(f.name, f) for f in iter(self)]) - + @property def PATTERN(self): u""" diff --git a/ext2/libpasteurize/fixes/fix_add_all__future__imports.py b/ext2/libpasteurize/fixes/fix_add_all__future__imports.py index 32f89ec18f..378979461f 100644 --- a/ext2/libpasteurize/fixes/fix_add_all__future__imports.py +++ b/ext2/libpasteurize/fixes/fix_add_all__future__imports.py @@ -22,4 +22,3 @@ def transform(self, node, results): future_import(u"print_function", node) future_import(u"division", node) future_import(u"absolute_import", node) - diff --git a/ext2/libpasteurize/fixes/fix_add_all_future_builtins.py b/ext2/libpasteurize/fixes/fix_add_all_future_builtins.py index 97204b584b..22911badae 100644 --- a/ext2/libpasteurize/fixes/fix_add_all_future_builtins.py +++ b/ext2/libpasteurize/fixes/fix_add_all_future_builtins.py @@ -35,4 +35,3 @@ def transform(self, node, results): # range round str super zip""" # for builtin in sorted(builtins.split(), reverse=True): # touch_import_top(u'builtins', builtin, node) - diff --git a/ext2/libpasteurize/fixes/fix_annotations.py b/ext2/libpasteurize/fixes/fix_annotations.py index 1926288c23..884b674111 100644 --- a/ext2/libpasteurize/fixes/fix_annotations.py +++ b/ext2/libpasteurize/fixes/fix_annotations.py @@ -19,7 +19,7 @@ def warn_once(self, node, reason): if not self.warned: self.warned = True self.warning(node, reason=reason) - + PATTERN = u""" funcdef< 'def' any parameters< '(' [params=any] ')' > ['->' ret=any] ':' any* > """ diff --git a/ext2/libpasteurize/fixes/fix_features.py b/ext2/libpasteurize/fixes/fix_features.py index 7e5f545aff..52630f9826 100644 --- a/ext2/libpasteurize/fixes/fix_features.py +++ b/ext2/libpasteurize/fixes/fix_features.py @@ -71,7 +71,7 @@ def match(self, node): # if it's there, so we don't care if it fails for normal reasons. pass return to_ret - + def transform(self, node, results): for feature_name in results: if feature_name in self.features_warned: diff --git a/ext2/libpasteurize/fixes/fix_fullargspec.py b/ext2/libpasteurize/fixes/fix_fullargspec.py index 489295f7b8..4bd37e1512 100644 --- a/ext2/libpasteurize/fixes/fix_fullargspec.py +++ b/ext2/libpasteurize/fixes/fix_fullargspec.py @@ -8,7 +8,7 @@ warn_msg = u"some of the values returned by getfullargspec are not valid in Python 2 and have no equivalent." class FixFullargspec(fixer_base.BaseFix): - + PATTERN = u"'getfullargspec'" def transform(self, node, results): diff --git a/ext2/libpasteurize/fixes/fix_future_builtins.py b/ext2/libpasteurize/fixes/fix_future_builtins.py index 27339abc3b..6849679987 100644 --- a/ext2/libpasteurize/fixes/fix_future_builtins.py +++ b/ext2/libpasteurize/fixes/fix_future_builtins.py @@ -44,4 +44,3 @@ def transform(self, node, results): name = results["name"] touch_import_top(u'builtins', name.value, node) # name.replace(Name(u"input", prefix=name.prefix)) - diff --git a/ext2/libpasteurize/fixes/fix_imports.py b/ext2/libpasteurize/fixes/fix_imports.py index 4db0d548f5..2d6718f166 100644 --- a/ext2/libpasteurize/fixes/fix_imports.py +++ b/ext2/libpasteurize/fixes/fix_imports.py @@ -110,4 +110,3 @@ class FixImports(fixer_base.BaseFix): def transform(self, node, results): touch_import_top(u'future', u'standard_library', node) - diff --git a/ext2/libpasteurize/fixes/fix_imports2.py b/ext2/libpasteurize/fixes/fix_imports2.py index 5b30b5f5db..70444e9e06 100644 --- a/ext2/libpasteurize/fixes/fix_imports2.py +++ b/ext2/libpasteurize/fixes/fix_imports2.py @@ -18,11 +18,11 @@ u'RADIOBUTTON', u'RAISED', u'READABLE', u'RIDGE', u'RIGHT', u'ROUND', u'S', u'SCROLL', u'SE', u'SEL', u'SEL_FIRST', u'SEL_LAST', u'SEPARATOR', u'SINGLE', u'SOLID', u'SUNKEN', u'SW', u'StringTypes', - u'TOP', u'TRUE', u'TclVersion', u'TkVersion', u'UNDERLINE', + u'TOP', u'TRUE', u'TclVersion', u'TkVersion', u'UNDERLINE', u'UNITS', u'VERTICAL', u'W', u'WORD', u'WRITABLE', u'X', u'Y', u'YES', u'wantobjects') -PY2MODULES = { +PY2MODULES = { u'urllib2' : ( u'AbstractBasicAuthHandler', u'AbstractDigestAuthHandler', u'AbstractHTTPHandler', u'BaseHandler', u'CacheFTPHandler', @@ -172,4 +172,3 @@ class FixImports2(fixer_base.BaseFix): def transform(self, node, results): touch_import_top(u'future', u'standard_library', node) - diff --git a/ext2/libpasteurize/fixes/fix_kwargs.py b/ext2/libpasteurize/fixes/fix_kwargs.py index 59a3043bef..290f991eef 100644 --- a/ext2/libpasteurize/fixes/fix_kwargs.py +++ b/ext2/libpasteurize/fixes/fix_kwargs.py @@ -61,7 +61,7 @@ def remove_params(raw_params, kwargs_default=_kwargs_default_name): return False else: return True - + def needs_fixing(raw_params, kwargs_default=_kwargs_default_name): u""" Returns string with the name of the kwargs dict if the params after the first star need fixing @@ -145,4 +145,3 @@ def transform(self, node, results): arglist.append_child(Comma()) arglist.append_child(DoubleStar(prefix=u" ")) arglist.append_child(Name(new_kwargs)) - diff --git a/ext2/libpasteurize/fixes/fix_metaclass.py b/ext2/libpasteurize/fixes/fix_metaclass.py index 5e6e64d812..52dd1d1454 100644 --- a/ext2/libpasteurize/fixes/fix_metaclass.py +++ b/ext2/libpasteurize/fixes/fix_metaclass.py @@ -61,7 +61,7 @@ def transform(self, node, results): name = meta name.prefix = u" " stmt_node = Node(syms.atom, [target, equal, name]) - + suitify(node) for item in node.children: if item.type == syms.suite: diff --git a/ext2/libpasteurize/fixes/fix_unpacking.py b/ext2/libpasteurize/fixes/fix_unpacking.py index 1e53a9bf77..c2d3207a20 100644 --- a/ext2/libpasteurize/fixes/fix_unpacking.py +++ b/ext2/libpasteurize/fixes/fix_unpacking.py @@ -60,7 +60,7 @@ def fix_explicit_context(self, node, results): setup_line = Assign(Name(self.LISTNAME), Call(Name(u"list"), [source.clone()])) power_line = Assign(target, assignment_source(len(pre), len(post), self.LISTNAME, self.ITERNAME)) return setup_line, power_line - + def fix_implicit_context(self, node, results): u""" Only example of the implicit context is diff --git a/ext2/libpasteurize/main.py b/ext2/libpasteurize/main.py index cc0e6ec352..4179174b56 100644 --- a/ext2/libpasteurize/main.py +++ b/ext2/libpasteurize/main.py @@ -114,8 +114,27 @@ def main(args=None): level = logging.DEBUG if options.verbose else logging.INFO logging.basicConfig(format='%(name)s: %(message)s', level=level) - # Initialize the refactoring tool - unwanted_fixes = set(fixer_pkg + ".fix_" + fix for fix in options.nofix) + unwanted_fixes = set() + for fix in options.nofix: + if ".fix_" in fix: + unwanted_fixes.add(fix) + else: + # Infer the full module name for the fixer. + # First ensure that no names clash (e.g. + # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah): + found = [f for f in avail_fixes + if f.endswith('fix_{0}'.format(fix))] + if len(found) > 1: + print("Ambiguous fixer name. Choose a fully qualified " + "module name instead from these:\n" + + "\n".join(" " + myf for myf in found), + file=sys.stderr) + return 2 + elif len(found) == 0: + print("Unknown fixer. Use --list-fixes or -l for a list.", + file=sys.stderr) + return 2 + unwanted_fixes.add(found[0]) extra_fixes = set() if options.all_imports: @@ -124,8 +143,45 @@ def main(args=None): extra_fixes.add(prefix + 'fix_add_future_standard_library_import') extra_fixes.add(prefix + 'fix_add_all_future_builtins') - fixer_names = avail_fixes | extra_fixes - unwanted_fixes + explicit = set() + if options.fix: + all_present = False + for fix in options.fix: + if fix == 'all': + all_present = True + else: + if ".fix_" in fix: + explicit.add(fix) + else: + # Infer the full module name for the fixer. + # First ensure that no names clash (e.g. + # lib2to3.fixes.fix_blah and libpasteurize.fixes.fix_blah): + found = [f for f in avail_fixes + if f.endswith('fix_{0}'.format(fix))] + if len(found) > 1: + print("Ambiguous fixer name. Choose a fully qualified " + "module name instead from these:\n" + + "\n".join(" " + myf for myf in found), + file=sys.stderr) + return 2 + elif len(found) == 0: + print("Unknown fixer. Use --list-fixes or -l for a list.", + file=sys.stderr) + return 2 + explicit.add(found[0]) + if len(explicit & unwanted_fixes) > 0: + print("Conflicting usage: the following fixers have been " + "simultaneously requested and disallowed:\n" + + "\n".join(" " + myf for myf in (explicit & unwanted_fixes)), + file=sys.stderr) + return 2 + requested = avail_fixes.union(explicit) if all_present else explicit + else: + requested = avail_fixes.union(explicit) + + fixer_names = requested | extra_fixes - unwanted_fixes + # Initialize the refactoring tool rt = StdoutRefactoringTool(sorted(fixer_names), flags, set(), options.nobackups, not options.no_diffs) @@ -146,4 +202,3 @@ def main(args=None): # Return error status (0 if rt.errors is zero) return int(bool(rt.errors)) - diff --git a/ext2/past/__init__.py b/ext2/past/__init__.py index 08eeb58d6d..3b5d9db178 100644 --- a/ext2/past/__init__.py +++ b/ext2/past/__init__.py @@ -67,7 +67,7 @@ until the authors of the Python 2 modules have upgraded their code. Then, for example:: - + >>> mypy2module.func_taking_py2_string(oldstr(b'abcd')) @@ -80,7 +80,7 @@ Licensing --------- -Copyright 2013-2016 Python Charmers Pty Ltd, Australia. +Copyright 2013-2018 Python Charmers Pty Ltd, Australia. The software is distributed under an MIT licence. See LICENSE.txt. """ @@ -90,4 +90,3 @@ __title__ = 'past' __author__ = 'Ed Schofield' - diff --git a/ext2/past/builtins/__init__.py b/ext2/past/builtins/__init__.py index a967736d03..1b19e373c8 100644 --- a/ext2/past/builtins/__init__.py +++ b/ext2/past/builtins/__init__.py @@ -59,9 +59,9 @@ if utils.PY3: # We only import names that shadow the builtins on Py3. No other namespace # pollution on Py3. - + # Only shadow builtins on Py3; no new names - __all__ = ['filter', 'map', 'range', 'reduce', 'zip', + __all__ = ['filter', 'map', 'range', 'reduce', 'zip', 'basestring', 'dict', 'str', 'long', 'unicode', 'apply', 'chr', 'cmp', 'execfile', 'intern', 'raw_input', 'reload', 'unichr', 'xrange' diff --git a/ext2/past/builtins/misc.py b/ext2/past/builtins/misc.py index 55dc63c629..06fbb92d20 100644 --- a/ext2/past/builtins/misc.py +++ b/ext2/past/builtins/misc.py @@ -87,4 +87,3 @@ def execfile(filename, myglobals=None, mylocals=None): 'reload', 'unichr', 'unicode', 'xrange'] else: __all__ = [] - diff --git a/ext2/past/builtins/noniterators.py b/ext2/past/builtins/noniterators.py index 66a4a8a575..5826b97c1e 100644 --- a/ext2/past/builtins/noniterators.py +++ b/ext2/past/builtins/noniterators.py @@ -6,7 +6,7 @@ And then, for example:: assert isinstance(range(5), list) - + The list-producing functions this brings in are:: - ``filter`` @@ -19,7 +19,7 @@ from __future__ import division, absolute_import, print_function -from itertools import chain, starmap +from itertools import chain, starmap import itertools # since zip_longest doesn't exist on Py2 from past.types import basestring from past.utils import PY3 @@ -36,7 +36,7 @@ def flatmap(f, items): def oldfilter(*args): """ filter(function or None, sequence) -> list, tuple, or string - + Return those items of sequence for which function(item) is true. If function is None, return the items that are true. If sequence is a tuple or string, return the same type, else return a list. @@ -56,7 +56,7 @@ def oldfilter(*args): def oldmap(func, *iterables): """ map(function, sequence[, sequence, ...]) -> list - + Return a list of the results of applying the function to the items of the argument sequence(s). If more than one sequence is given, the function is called with an argument list consisting of @@ -64,7 +64,7 @@ def oldmap(func, *iterables): missing values when not all sequences have the same length. If the function is None, return a list of the items of the sequence (or a list of tuples if more than one sequence). - + Test cases: >>> oldmap(None, 'hello world') ['h', 'e', 'l', 'l', 'o', ' ', 'w', 'o', 'r', 'l', 'd'] @@ -102,22 +102,22 @@ def oldmap(func, *iterables): # PyObject *it; /* the iterator object */ # int saw_StopIteration; /* bool: did the iterator end? */ # } sequence; - # + # # PyObject *func, *result; # sequence *seqs = NULL, *sqp; # Py_ssize_t n, len; # register int i, j; - # + # # n = PyTuple_Size(args); # if (n < 2) { # PyErr_SetString(PyExc_TypeError, # "map() requires at least two args"); # return NULL; # } - # + # # func = PyTuple_GetItem(args, 0); # n--; - # + # # if (func == Py_None) { # if (PyErr_WarnPy3k("map(None, ...) not supported in 3.x; " # "use list(...)", 1) < 0) @@ -127,7 +127,7 @@ def oldmap(func, *iterables): # return PySequence_List(PyTuple_GetItem(args, 1)); # } # } - # + # # /* Get space for sequence descriptors. Must NULL out the iterator # * pointers so that jumping to Fail_2 later doesn't see trash. # */ @@ -139,7 +139,7 @@ def oldmap(func, *iterables): # seqs[i].it = (PyObject*)NULL; # seqs[i].saw_StopIteration = 0; # } - # + # # /* Do a first pass to obtain iterators for the arguments, and set len # * to the largest of their lengths. # */ @@ -147,7 +147,7 @@ def oldmap(func, *iterables): # for (i = 0, sqp = seqs; i < n; ++i, ++sqp) { # PyObject *curseq; # Py_ssize_t curlen; - # + # # /* Get iterator. */ # curseq = PyTuple_GetItem(args, i+1); # sqp->it = PyObject_GetIter(curseq); @@ -159,27 +159,27 @@ def oldmap(func, *iterables): # PyErr_SetString(PyExc_TypeError, errbuf); # goto Fail_2; # } - # + # # /* Update len. */ # curlen = _PyObject_LengthHint(curseq, 8); # if (curlen > len) # len = curlen; # } - # + # # /* Get space for the result list. */ # if ((result = (PyObject *) PyList_New(len)) == NULL) # goto Fail_2; - # + # # /* Iterate over the sequences until all have stopped. */ # for (i = 0; ; ++i) { # PyObject *alist, *item=NULL, *value; # int numactive = 0; - # + # # if (func == Py_None && n == 1) # alist = NULL; # else if ((alist = PyTuple_New(n)) == NULL) # goto Fail_1; - # + # # for (j = 0, sqp = seqs; j < n; ++j, ++sqp) { # if (sqp->saw_StopIteration) { # Py_INCREF(Py_None); @@ -204,15 +204,15 @@ def oldmap(func, *iterables): # else # break; # } - # + # # if (!alist) # alist = item; - # + # # if (numactive == 0) { # Py_DECREF(alist); # break; # } - # + # # if (func == Py_None) # value = alist; # else { @@ -230,12 +230,12 @@ def oldmap(func, *iterables): # else if (PyList_SetItem(result, i, value) < 0) # goto Fail_1; # } - # + # # if (i < len && PyList_SetSlice(result, i, len, NULL) < 0) # goto Fail_1; - # + # # goto Succeed; - # + # # Fail_1: # Py_DECREF(result); # Fail_2: @@ -270,4 +270,3 @@ def oldzip(*args, **kwargs): reduce = __builtin__.reduce zip = __builtin__.zip __all__ = [] - diff --git a/ext2/past/translation/__init__.py b/ext2/past/translation/__init__.py index 7b21d9f5f1..c7ae2b7a08 100644 --- a/ext2/past/translation/__init__.py +++ b/ext2/past/translation/__init__.py @@ -28,7 +28,7 @@ >>> from past.translation import remove_hooks >>> remove_hooks() -Author: Ed Schofield. +Author: Ed Schofield. Inspired by and based on ``uprefix`` by Vinay M. Sajip. """ @@ -220,16 +220,16 @@ def detect_python2(source, pathname): # The above fixers made changes, so we conclude it's Python 2 code logger.debug('Detected Python 2 code: {0}'.format(pathname)) with open('/tmp/original_code.py', 'w') as f: - f.write('### Original code (detected as py2): %s\n%s' % + f.write('### Original code (detected as py2): %s\n%s' % (pathname, source)) with open('/tmp/py2_detection_code.py', 'w') as f: - f.write('### Code after running py3 detection (from %s)\n%s' % + f.write('### Code after running py3 detection (from %s)\n%s' % (pathname, str(tree)[:-1])) return True else: logger.debug('Detected Python 3 code: {0}'.format(pathname)) with open('/tmp/original_code.py', 'w') as f: - f.write('### Original code (detected as py3): %s\n%s' % + f.write('### Original code (detected as py3): %s\n%s' % (pathname, source)) try: os.remove('/tmp/futurize_code.py') @@ -359,7 +359,7 @@ def load_module(self, fullname): # Is the test in the next line more or less robust than the # following one? Presumably less ... # ispkg = self.pathname.endswith('__init__.py') - + if self.kind == imp.PKG_DIRECTORY: mod.__path__ = [ os.path.dirname(self.pathname) ] mod.__package__ = fullname @@ -367,7 +367,7 @@ def load_module(self, fullname): #else, regular module mod.__path__ = [] mod.__package__ = fullname.rpartition('.')[0] - + try: cachename = imp.cache_from_source(self.pathname) if not os.path.exists(cachename): @@ -396,15 +396,15 @@ def load_module(self, fullname): if detect_python2(source, self.pathname): source = self.transform(source) with open('/tmp/futurized_code.py', 'w') as f: - f.write('### Futurized code (from %s)\n%s' % + f.write('### Futurized code (from %s)\n%s' % (self.pathname, source)) code = compile(source, self.pathname, 'exec') dirname = os.path.dirname(cachename) - if not os.path.exists(dirname): - os.makedirs(dirname) try: + if not os.path.exists(dirname): + os.makedirs(dirname) with open(cachename, 'wb') as f: data = marshal.dumps(code) f.write(data) @@ -457,7 +457,7 @@ def detect_hooks(): class hooks(object): """ Acts as a context manager. Use like this: - + >>> from past import translation >>> with translation.hooks(): ... import mypy2module @@ -477,7 +477,7 @@ def __exit__(self, *args): class suspend_hooks(object): """ Acts as a context manager. Use like this: - + >>> from past import translation >>> translation.install_hooks() >>> import http.client @@ -495,4 +495,3 @@ def __enter__(self): def __exit__(self, *args): if self.hooks_were_installed: install_hooks() - diff --git a/ext2/past/types/__init__.py b/ext2/past/types/__init__.py index a31b2646cf..91dd270f2d 100644 --- a/ext2/past/types/__init__.py +++ b/ext2/past/types/__init__.py @@ -27,4 +27,3 @@ unicode = str # from .unicode import unicode __all__ = ['basestring', 'olddict', 'oldstr', 'long', 'unicode'] - diff --git a/ext2/past/types/basestring.py b/ext2/past/types/basestring.py index 15437bf7c8..1cab22f6ca 100644 --- a/ext2/past/types/basestring.py +++ b/ext2/past/types/basestring.py @@ -37,4 +37,3 @@ class basestring(with_metaclass(BaseBaseString)): __all__ = ['basestring'] - diff --git a/ext2/past/types/olddict.py b/ext2/past/types/olddict.py index b213e28fa9..f4f92a26a6 100644 --- a/ext2/past/types/olddict.py +++ b/ext2/past/types/olddict.py @@ -71,7 +71,7 @@ def has_key(self, k): # in the keyword argument list. For example: dict(one=1, two=2) # """ - # + # # if len(args) == 0: # return super(olddict, cls).__new__(cls) # # Was: elif isinstance(args[0], newbytes): @@ -85,7 +85,7 @@ def has_key(self, k): # else: # value = args[0] # return super(olddict, cls).__new__(cls, value) - + def __native__(self): """ Hook for the past.utils.native() function @@ -94,4 +94,3 @@ def __native__(self): __all__ = ['olddict'] - diff --git a/ext2/past/types/oldstr.py b/ext2/past/types/oldstr.py index 1b90e3e11e..7768d32847 100644 --- a/ext2/past/types/oldstr.py +++ b/ext2/past/types/oldstr.py @@ -32,7 +32,7 @@ def unescape(s): def """ return s.encode().decode('unicode_escape') - + class oldstr(with_metaclass(BaseOldStr, _builtin_bytes)): """ @@ -55,14 +55,14 @@ def __dir__(self): # bytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer # bytes(int) -> bytes object of size given by the parameter initialized with null bytes # bytes() -> empty bytes object - # + # # Construct an immutable array of bytes from: # - an iterable yielding integers in range(256) # - a text string encoded using the specified encoding # - any object implementing the buffer API. # - an integer # """ - # + # # if len(args) == 0: # return super(newbytes, cls).__new__(cls) # # Was: elif isinstance(args[0], newbytes): @@ -84,7 +84,7 @@ def __dir__(self): # if 'errors' in kwargs: # newargs.append(kwargs['errors']) # value = args[0].encode(*newargs) - # ### + # ### # elif isinstance(args[0], Iterable): # if len(args[0]) == 0: # # What is this? @@ -101,7 +101,7 @@ def __dir__(self): # else: # value = args[0] # return super(newbytes, cls).__new__(cls, value) - + def __repr__(self): s = super(oldstr, self).__repr__() # e.g. b'abc' on Py3, b'abc' on Py3 return s[1:] @@ -124,7 +124,7 @@ def __getslice__(self, *args): def __contains__(self, key): if isinstance(key, int): return False - + def __native__(self): return bytes(self) diff --git a/ext2/past/utils/__init__.py b/ext2/past/utils/__init__.py index 02f06d5957..c6606d0b98 100644 --- a/ext2/past/utils/__init__.py +++ b/ext2/past/utils/__init__.py @@ -26,13 +26,13 @@ def with_metaclass(meta, *bases): Function from jinja2/_compat.py. License: BSD. Use it like this:: - + class BaseForm(object): pass - + class FormType(type): pass - + class Form(with_metaclass(FormType, BaseForm)): pass @@ -42,7 +42,7 @@ class Form(with_metaclass(FormType, BaseForm)): we also need to make sure that we downgrade the custom metaclass for one level to something closer to type (that's why __call__ and __init__ comes back from type etc.). - + This has the advantage over six.with_metaclass of not introducing dummy classes into the final MRO. """ @@ -62,7 +62,7 @@ def native(obj): On Py3, returns the corresponding native Py3 types that are superclasses for forward-ported objects from Py2: - + >>> from past.builtins import str, dict >>> native(str(b'ABC')) # Output on Py3 follows. On Py2, output is 'ABC' diff --git a/ext2/tkinter/__init__.py b/ext2/tkinter/__init__.py index d0e1b0afec..bb730c3567 100644 --- a/ext2/tkinter/__init__.py +++ b/ext2/tkinter/__init__.py @@ -3,9 +3,25 @@ if sys.version_info[0] < 3: from Tkinter import * - from Tkinter import (_cnfmerge, _default_root, _flatten, _join, _setit, - _splitdict, _stringify, _support_default_root, _test, - _tkinter) + from Tkinter import (_cnfmerge, _default_root, _flatten, + _support_default_root, _test, + _tkinter, _setit) + + try: # >= 2.7.4 + from Tkinter import (_join) + except ImportError: + pass + + try: # >= 2.7.4 + from Tkinter import (_stringify) + except ImportError: + pass + + try: # >= 2.7.9 + from Tkinter import (_splitdict) + except ImportError: + pass + else: raise ImportError('This package should not be accessible on Python 3. ' 'Either you are trying to run from the python-future src folder ' diff --git a/ext2/tkinter/colorchooser.py b/ext2/tkinter/colorchooser.py index 5e7c97f444..6dde6e8d30 100644 --- a/ext2/tkinter/colorchooser.py +++ b/ext2/tkinter/colorchooser.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The tkColorChooser module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/tkinter/commondialog.py b/ext2/tkinter/commondialog.py index 7747a0ba0b..eb7ae8d607 100644 --- a/ext2/tkinter/commondialog.py +++ b/ext2/tkinter/commondialog.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The tkCommonDialog module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/tkinter/constants.py b/ext2/tkinter/constants.py index 99216f33d0..ffe098152f 100644 --- a/ext2/tkinter/constants.py +++ b/ext2/tkinter/constants.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The Tkconstants module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/tkinter/dialog.py b/ext2/tkinter/dialog.py index a5b777815a..113370ca2c 100644 --- a/ext2/tkinter/dialog.py +++ b/ext2/tkinter/dialog.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The Dialog module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/tkinter/dnd.py b/ext2/tkinter/dnd.py index 748b111a2b..1ab437917d 100644 --- a/ext2/tkinter/dnd.py +++ b/ext2/tkinter/dnd.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The Tkdnd module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/tkinter/font.py b/ext2/tkinter/font.py index 63d86dc73c..628f399a35 100644 --- a/ext2/tkinter/font.py +++ b/ext2/tkinter/font.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The tkFont module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/tkinter/messagebox.py b/ext2/tkinter/messagebox.py index 3ed52e1fec..b43d8702f5 100644 --- a/ext2/tkinter/messagebox.py +++ b/ext2/tkinter/messagebox.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The tkMessageBox module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/tkinter/scrolledtext.py b/ext2/tkinter/scrolledtext.py index 13bd660d96..1c69db6067 100644 --- a/ext2/tkinter/scrolledtext.py +++ b/ext2/tkinter/scrolledtext.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The ScrolledText module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/tkinter/simpledialog.py b/ext2/tkinter/simpledialog.py index e952fa994e..dba93fbf25 100644 --- a/ext2/tkinter/simpledialog.py +++ b/ext2/tkinter/simpledialog.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The SimpleDialog module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/tkinter/tix.py b/ext2/tkinter/tix.py index 019df6f73f..8d1718ad0b 100644 --- a/ext2/tkinter/tix.py +++ b/ext2/tkinter/tix.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The Tix module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext2/tkinter/ttk.py b/ext2/tkinter/ttk.py index 22ac9774c4..081c1b4956 100644 --- a/ext2/tkinter/ttk.py +++ b/ext2/tkinter/ttk.py @@ -10,4 +10,3 @@ except ImportError: raise ImportError('The ttk module is missing. Does your Py2 ' 'installation include tkinter?') - diff --git a/ext3/github/AuthenticatedUser.py b/ext3/github/AuthenticatedUser.py index e60acbd26e..9541df2dad 100644 --- a/ext3/github/AuthenticatedUser.py +++ b/ext3/github/AuthenticatedUser.py @@ -14,12 +14,13 @@ # Copyright 2016 E. Dunham # # Copyright 2016 Jannis Gebauer # # Copyright 2016 Peter Buckley # -# Copyright 2017 Balázs Rostás # +# Copyright 2017 Balázs Rostás # # Copyright 2017 Jannis Gebauer # # Copyright 2017 Simon # # Copyright 2018 Wan Liuyang # # Copyright 2018 bryanhuntesl <31992054+bryanhuntesl@users.noreply.github.com> # # Copyright 2018 sfdye # +# Copyright 2018 itsbruce # # # # This file is part of PyGithub. # # http://pygithub.readthedocs.io/ # @@ -703,16 +704,21 @@ def get_following(self): None ) - def get_gists(self): + def get_gists(self, since=github.GithubObject.NotSet): """ :calls: `GET /gists `_ + :param since: datetime.datetime format YYYY-MM-DDTHH:MM:SSZ :rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Gist.Gist` """ + assert since is github.GithubObject.NotSet or isinstance(since, datetime.datetime), since + url_parameters = dict() + if since is not github.GithubObject.NotSet: + url_parameters["since"] = since.strftime("%Y-%m-%dT%H:%M:%SZ") return github.PaginatedList.PaginatedList( github.Gist.Gist, self._requester, "/gists", - None + url_parameters ) def get_issues(self, filter=github.GithubObject.NotSet, state=github.GithubObject.NotSet, labels=github.GithubObject.NotSet, sort=github.GithubObject.NotSet, direction=github.GithubObject.NotSet, since=github.GithubObject.NotSet): @@ -893,18 +899,26 @@ def get_repo(self, name): ) return github.Repository.Repository(self._requester, headers, data, completed=True) - def get_repos(self, type=github.GithubObject.NotSet, sort=github.GithubObject.NotSet, direction=github.GithubObject.NotSet): + def get_repos(self, visibility=github.GithubObject.NotSet, affiliation=github.GithubObject.NotSet, type=github.GithubObject.NotSet, sort=github.GithubObject.NotSet, direction=github.GithubObject.NotSet): """ - :calls: `GET /user/repos `_ + :calls: `GET /user/repos ` + :param visibility: string + :param affiliation: string :param type: string :param sort: string :param direction: string :rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository` """ + assert visibility is github.GithubObject.NotSet or isinstance(visibility, str), visibility + assert affiliation is github.GithubObject.NotSet or isinstance(affiliation, str), affiliation assert type is github.GithubObject.NotSet or isinstance(type, str), type assert sort is github.GithubObject.NotSet or isinstance(sort, str), sort assert direction is github.GithubObject.NotSet or isinstance(direction, str), direction url_parameters = dict() + if visibility is not github.GithubObject.NotSet: + url_parameters["visibility"] = visibility + if affiliation is not github.GithubObject.NotSet: + url_parameters["affiliation"] = affiliation if type is not github.GithubObject.NotSet: url_parameters["type"] = type if sort is not github.GithubObject.NotSet: @@ -1030,6 +1044,22 @@ def has_in_watched(self, watched): ) return status == 200 + def mark_notifications_as_read(self, last_read_at=datetime.datetime.utcnow()): + """ + :calls: `PUT /notifications `_ + :param last_read_at: datetime + """ + assert isinstance(last_read_at, datetime.datetime) + put_parameters = { + "last_read_at": last_read_at.strftime('%Y-%m-%dT%H:%M:%SZ') + } + + headers, data = self._requester.requestJsonAndCheck( + "PUT", + "/notifications", + input=put_parameters + ) + def remove_from_emails(self, *emails): """ :calls: `DELETE /user/emails `_ diff --git a/ext3/github/Branch.py b/ext3/github/Branch.py index cbd7a23d7b..512b4eb391 100644 --- a/ext3/github/Branch.py +++ b/ext3/github/Branch.py @@ -371,3 +371,34 @@ def remove_push_restrictions(self): "DELETE", self.protection_url + "/restrictions" ) + + def get_required_signatures(self): + """ + :calls: `GET /repos/:owner/:repo/branches/:branch/protection/required_signatures ` + """ + headers, data = self._requester.requestJsonAndCheck( + "GET", + self.protection_url + "/required_signatures", + headers={'Accept': Consts.signaturesProtectedBranchesPreview} + ) + return data["enabled"] + + def add_required_signatures(self): + """ + :calls: `POST /repos/:owner/:repo/branches/:branch/protection/required_signatures ` + """ + headers, data = self._requester.requestJsonAndCheck( + "POST", + self.protection_url + "/required_signatures", + headers={'Accept': Consts.signaturesProtectedBranchesPreview} + ) + + def remove_required_signatures(self): + """ + :calls: `DELETE /repos/:owner/:repo/branches/:branch/protection/required_signatures ` + """ + headers, data = self._requester.requestJsonAndCheck( + "DELETE", + self.protection_url + "/required_signatures", + headers={'Accept': Consts.signaturesProtectedBranchesPreview} + ) diff --git a/ext3/github/Consts.py b/ext3/github/Consts.py index d0125e995e..c59f419793 100644 --- a/ext3/github/Consts.py +++ b/ext3/github/Consts.py @@ -89,3 +89,9 @@ # https://developer.github.com/changes/2018-03-16-protected-branches-required-approving-reviews/ mediaTypeRequireMultipleApprovingReviews = "application/vnd.github.luke-cage-preview+json" + +# https://developer.github.com/v3/search/#highlighting-code-search-results-1 +highLightSearchPreview = "application/vnd.github.v3.text-match+json" + +# https://developer.github.com/changes/2018-02-22-protected-branches-required-signatures/ +signaturesProtectedBranchesPreview = "application/vnd.github.zzzax-preview+json" diff --git a/ext3/github/ContentFile.py b/ext3/github/ContentFile.py index 345b6fc66a..e597a09972 100644 --- a/ext3/github/ContentFile.py +++ b/ext3/github/ContentFile.py @@ -165,8 +165,17 @@ def url(self): self._completeIfNotSet(self._url) return self._url.value + @property + def text_matches(self): + """ + :type: string + """ + self._completeIfNotSet(self._text_matches) + return self._text_matches.value + def _initAttributes(self): self._content = github.GithubObject.NotSet + self._text_matches = github.GithubObject.NotSet self._encoding = github.GithubObject.NotSet self._download_url = github.GithubObject.NotSet self._git_url = github.GithubObject.NotSet @@ -206,3 +215,5 @@ def _useAttributes(self, attributes): self._type = self._makeStringAttribute(attributes["type"]) if "url" in attributes: # pragma no branch self._url = self._makeStringAttribute(attributes["url"]) + if "text_matches" in attributes: # pragma no branch + self._text_matches = self._makeListOfDictsAttribute(attributes["text_matches"]) diff --git a/ext3/github/GithubObject.py b/ext3/github/GithubObject.py index 9b94d83430..16e903b168 100644 --- a/ext3/github/GithubObject.py +++ b/ext3/github/GithubObject.py @@ -188,6 +188,10 @@ def _makeListOfStringsAttribute(value): def _makeListOfIntsAttribute(value): return GithubObject.__makeSimpleListAttribute(value, int) + @staticmethod + def _makeListOfDictsAttribute(value): + return GithubObject.__makeSimpleListAttribute(value, dict) + @staticmethod def _makeListOfListOfStringsAttribute(value): return GithubObject.__makeSimpleListAttribute(value, list) diff --git a/ext3/github/MainClass.py b/ext3/github/MainClass.py index e228c42beb..ad8d58773d 100644 --- a/ext3/github/MainClass.py +++ b/ext3/github/MainClass.py @@ -27,6 +27,7 @@ # Copyright 2018 Svend Sorensen # # Copyright 2018 Wan Liuyang # # Copyright 2018 sfdye # +# Copyright 2018 itsbruce # # # # This file is part of PyGithub. # # http://pygithub.readthedocs.io/ # @@ -46,6 +47,8 @@ # # ################################################################################ +import datetime + import urllib.request, urllib.parse, urllib.error import pickle import time @@ -91,7 +94,7 @@ class Github(object): This is the main class you instantiate to access the Github API v3. Optional parameters allow different authentication methods. """ - def __init__(self, login_or_token=None, password=None, base_url=DEFAULT_BASE_URL, timeout=DEFAULT_TIMEOUT, client_id=None, client_secret=None, user_agent='PyGithub/Python', per_page=DEFAULT_PER_PAGE, api_preview=False, verify=True): + def __init__(self, login_or_token=None, password=None, jwt=None, base_url=DEFAULT_BASE_URL, timeout=DEFAULT_TIMEOUT, client_id=None, client_secret=None, user_agent='PyGithub/Python', per_page=DEFAULT_PER_PAGE, api_preview=False, verify=True): """ :param login_or_token: string :param password: string @@ -106,13 +109,14 @@ def __init__(self, login_or_token=None, password=None, base_url=DEFAULT_BASE_URL assert login_or_token is None or isinstance(login_or_token, str), login_or_token assert password is None or isinstance(password, str), password + assert jwt is None or isinstance(jwt, str), jwt assert isinstance(base_url, str), base_url assert isinstance(timeout, int), timeout assert client_id is None or isinstance(client_id, str), client_id assert client_secret is None or isinstance(client_secret, str), client_secret assert user_agent is None or isinstance(user_agent, str), user_agent assert isinstance(api_preview, (bool)) - self.__requester = Requester(login_or_token, password, base_url, timeout, client_id, client_secret, user_agent, per_page, api_preview, verify) + self.__requester = Requester(login_or_token, password, jwt, base_url, timeout, client_id, client_secret, user_agent, per_page, api_preview, verify) def __get_FIX_REPO_GET_GIT_REF(self): """ @@ -334,16 +338,21 @@ def get_gist(self, id): ) return github.Gist.Gist(self.__requester, headers, data, completed=True) - def get_gists(self): + def get_gists(self, since=github.GithubObject.NotSet): """ :calls: `GET /gists/public `_ + :param since: datetime.datetime format YYYY-MM-DDTHH:MM:SSZ :rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Gist.Gist` """ + assert since is github.GithubObject.NotSet or isinstance(since, datetime.datetime), since + url_parameters = dict() + if since is not github.GithubObject.NotSet: + url_parameters["since"] = since.strftime("%Y-%m-%dT%H:%M:%SZ") return github.PaginatedList.PaginatedList( github.Gist.Gist, self.__requester, "/gists/public", - None + url_parameters ) def search_repositories(self, query, sort=github.GithubObject.NotSet, order=github.GithubObject.NotSet, **qualifiers): @@ -451,12 +460,13 @@ def search_issues(self, query, sort=github.GithubObject.NotSet, order=github.Git url_parameters ) - def search_code(self, query, sort=github.GithubObject.NotSet, order=github.GithubObject.NotSet, **qualifiers): + def search_code(self, query, sort=github.GithubObject.NotSet, order=github.GithubObject.NotSet, highlight=False, **qualifiers): """ :calls: `GET /search/code `_ :param query: string :param sort: string ('indexed') :param order: string ('asc', 'desc') + :param highlight: boolean (True, False) :param qualifiers: keyword dict query qualifiers :rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.ContentFile.ContentFile` """ @@ -479,14 +489,16 @@ def search_code(self, query, sort=github.GithubObject.NotSet, order=github.Githu url_parameters["q"] = ' '.join(query_chunks) assert url_parameters["q"], "need at least one qualifier" + headers = {"Accept": Consts.highLightSearchPreview} if highlight else None + return github.PaginatedList.PaginatedList( github.ContentFile.ContentFile, self.__requester, "/search/code", - url_parameters + url_parameters, + headers=headers ) - def search_commits(self, query, sort=github.GithubObject.NotSet, order=github.GithubObject.NotSet, **qualifiers): """ :calls: `GET /search/commits `_ diff --git a/ext3/github/Milestone.py b/ext3/github/Milestone.py index 992e303283..be5f5c37c2 100644 --- a/ext3/github/Milestone.py +++ b/ext3/github/Milestone.py @@ -45,7 +45,7 @@ class Milestone(github.GithubObject.CompletableGithubObject): """ def __repr__(self): - return self.get__repr__({"number": self._number.value}) + return self.get__repr__({"number": self._number.value, "title": self._title.value}) @property def closed_issues(self): diff --git a/ext3/github/NamedUser.py b/ext3/github/NamedUser.py index 9cbe282001..fe1909285b 100644 --- a/ext3/github/NamedUser.py +++ b/ext3/github/NamedUser.py @@ -17,6 +17,7 @@ # Copyright 2018 Wan Liuyang # # Copyright 2018 namc # # Copyright 2018 sfdye # +# Copyright 2018 itsbruce # # # # This file is part of PyGithub. # # http://pygithub.readthedocs.io/ # @@ -36,6 +37,8 @@ # # ################################################################################ +import datetime + import github.GithubObject import github.PaginatedList @@ -334,6 +337,14 @@ def subscriptions_url(self): self._completeIfNotSet(self._subscriptions_url) return self._subscriptions_url.value + @property + def suspended_at(self): + """ + :type: datetime.datetime + """ + self._completeIfNotSet(self._suspended_at) + return self._suspended_at.value + @property def total_private_repos(self): """ @@ -402,16 +413,21 @@ def get_following(self): None ) - def get_gists(self): + def get_gists(self, since=github.GithubObject.NotSet): """ :calls: `GET /users/:user/gists `_ + :param since: datetime.datetime format YYYY-MM-DDTHH:MM:SSZ :rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Gist.Gist` """ + assert since is github.GithubObject.NotSet or isinstance(since, datetime.datetime), since + url_parameters = dict() + if since is not github.GithubObject.NotSet: + url_parameters["since"] = since.strftime("%Y-%m-%dT%H:%M:%SZ") return github.PaginatedList.PaginatedList( github.Gist.Gist, self._requester, self.url + "/gists", - None + url_parameters ) def get_keys(self): @@ -601,6 +617,7 @@ def _initAttributes(self): self._site_admin = github.GithubObject.NotSet self._starred_url = github.GithubObject.NotSet self._subscriptions_url = github.GithubObject.NotSet + self._suspended_at = github.GithubObject.NotSet self._total_private_repos = github.GithubObject.NotSet self._type = github.GithubObject.NotSet self._updated_at = github.GithubObject.NotSet @@ -675,6 +692,8 @@ def _useAttributes(self, attributes): self._starred_url = self._makeStringAttribute(attributes["starred_url"]) if "subscriptions_url" in attributes: # pragma no branch self._subscriptions_url = self._makeStringAttribute(attributes["subscriptions_url"]) + if "suspended_at" in attributes: # pragma no branch + self._suspended_at = self._makeDatetimeAttribute(attributes["suspended_at"]) if "total_private_repos" in attributes: # pragma no branch self._total_private_repos = self._makeIntAttribute(attributes["total_private_repos"]) if "type" in attributes: # pragma no branch diff --git a/ext3/github/Notification.py b/ext3/github/Notification.py index f38f845b7e..b62d75f61d 100644 --- a/ext3/github/Notification.py +++ b/ext3/github/Notification.py @@ -115,6 +115,15 @@ def url(self): self._completeIfNotSet(self._url) return self._url.value + def mark_as_read(self): + """ + :calls: `PATCH /notifications/threads/:id `_ + """ + headers, data = self._requester.requestJsonAndCheck( + "PATCH", + self.url, + ) + def _initAttributes(self): self._id = github.GithubObject.NotSet self._last_read_at = github.GithubObject.NotSet diff --git a/ext3/github/PaginatedList.py b/ext3/github/PaginatedList.py index aab47edf42..8817bd5333 100644 --- a/ext3/github/PaginatedList.py +++ b/ext3/github/PaginatedList.py @@ -155,7 +155,7 @@ def totalCount(self): headers=self.__headers ) if 'link' not in headers: - self.__totalCount = len(data) + self.__totalCount = len(data) if data else 0 else: links = self.__parseLinkHeader(headers) lastUrl = links.get("last") @@ -175,7 +175,7 @@ def _getLastPageUrl(self): @property def reversed(self): - r = PaginatedList(self.__contentClass, self.__requester, self.__firstUrl, self.__firstParams) + r = PaginatedList(self.__contentClass, self.__requester, self.__firstUrl, self.__firstParams, self.__headers, self.__list_item) r.__reverse() return r diff --git a/ext3/github/PullRequest.py b/ext3/github/PullRequest.py index 5e62eaf7c0..da58e632a3 100644 --- a/ext3/github/PullRequest.py +++ b/ext3/github/PullRequest.py @@ -434,6 +434,8 @@ def create_review(self, commit, body, event=github.GithubObject.NotSet, comments post_parameters['event'] = 'COMMENT' if event == github.GithubObject.NotSet else event if comments is github.GithubObject.NotSet: post_parameters['comments'] = [] + else: + post_parameters['comments'] = comments headers, data = self._requester.requestJsonAndCheck( "POST", self.url + "/reviews", diff --git a/ext3/github/Repository.py b/ext3/github/Repository.py index a712fdd8e7..623d941650 100644 --- a/ext3/github/Repository.py +++ b/ext3/github/Repository.py @@ -60,6 +60,7 @@ # Copyright 2018 per1234 # # Copyright 2018 sechastain # # Copyright 2018 sfdye # +# Copyright 2018 Vinay Hegde # # # This file is part of PyGithub. # # http://pygithub.readthedocs.io/ # @@ -1247,9 +1248,9 @@ def edit(self, name=None, description=github.GithubObject.NotSet, homepage=githu assert has_wiki is github.GithubObject.NotSet or isinstance(has_wiki, bool), has_wiki assert has_downloads is github.GithubObject.NotSet or isinstance(has_downloads, bool), has_downloads assert default_branch is github.GithubObject.NotSet or isinstance(default_branch, str), default_branch - assert allow_squash_merge is github.GithubObject.NotSet or (isinstance(allow_squash_merge, bool) and allow_squash_merge is True), allow_squash_merge - assert allow_merge_commit is github.GithubObject.NotSet or (isinstance(allow_merge_commit, bool) and allow_merge_commit is True), allow_merge_commit - assert allow_rebase_merge is github.GithubObject.NotSet or (isinstance(allow_rebase_merge, bool) and allow_rebase_merge is True), allow_rebase_merge + assert allow_squash_merge is github.GithubObject.NotSet or isinstance(allow_squash_merge, bool), allow_squash_merge + assert allow_merge_commit is github.GithubObject.NotSet or isinstance(allow_merge_commit, bool), allow_merge_commit + assert allow_rebase_merge is github.GithubObject.NotSet or isinstance(allow_rebase_merge, bool), allow_rebase_merge assert archived is github.GithubObject.NotSet or (isinstance(archived, bool) and archived is True), archived post_parameters = { "name": name, @@ -1340,16 +1341,26 @@ def get_branches(self): None ) - def get_collaborators(self): + def get_collaborators(self, affiliation=github.GithubObject.NotSet): """ :calls: `GET /repos/:owner/:repo/collaborators `_ + :param affiliation: string :rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.NamedUser.NamedUser` """ + + url_parameters = dict() + allowed_affiliations = ['outside', 'direct', 'all'] + if affiliation is not github.GithubObject.NotSet: + assert isinstance(affiliation, str), affiliation + assert affiliation in allowed_affiliations, \ + 'Affiliation can be one of ' + ', '.join(allowed_affiliations) + url_parameters['affiliation'] = affiliation + return github.PaginatedList.PaginatedList( github.NamedUser.NamedUser, self._requester, self.url + "/collaborators", - None + url_parameters ) def get_comment(self, id): @@ -1449,7 +1460,7 @@ def get_file_contents(self, path, ref=github.GithubObject.NotSet): url_parameters["ref"] = ref headers, data = self._requester.requestJsonAndCheck( "GET", - self.url + "/contents" + urllib.parse.quote(path), + self.url + "/contents/" + urllib.parse.quote(path), parameters=url_parameters ) if isinstance(data, list): @@ -1530,7 +1541,7 @@ def create_file(self, path, message, content, headers, data = self._requester.requestJsonAndCheck( "PUT", - self.url + "/contents" + urllib.parse.quote(path), + self.url + "/contents/" + urllib.parse.quote(path), input=put_parameters ) @@ -1594,7 +1605,7 @@ def update_file(self, path, message, content, sha, headers, data = self._requester.requestJsonAndCheck( "PUT", - self.url + "/contents" + urllib.parse.quote(path), + self.url + "/contents/" + urllib.parse.quote(path), input=put_parameters ) @@ -1644,7 +1655,7 @@ def delete_file(self, path, message, sha, headers, data = self._requester.requestJsonAndCheck( "DELETE", - self.url + "/contents" + urllib.parse.quote(path), + self.url + "/contents/" + urllib.parse.quote(path), input=url_parameters ) @@ -1665,7 +1676,7 @@ def get_dir_contents(self, path, ref=github.GithubObject.NotSet): url_parameters["ref"] = ref headers, data = self._requester.requestJsonAndCheck( "GET", - self.url + "/contents" + urllib.parse.quote(path), + self.url + "/contents/" + urllib.parse.quote(path), parameters=url_parameters ) @@ -2473,6 +2484,22 @@ def legacy_search_issues(self, state, keyword): for element in data["issues"] ] + def mark_notifications_as_read(self, last_read_at=datetime.datetime.utcnow()): + """ + :calls: `PUT /repos/:owner/:repo/notifications `_ + :param last_read_at: datetime + """ + assert isinstance(last_read_at, datetime.datetime) + put_parameters = { + "last_read_at": last_read_at.strftime('%Y-%m-%dT%H:%M:%SZ') + } + + headers, data = self._requester.requestJsonAndCheck( + "PUT", + self.url + "/notifications", + input=put_parameters + ) + def merge(self, base, head, commit_message=github.GithubObject.NotSet): """ :calls: `POST /repos/:owner/:repo/merges `_ diff --git a/ext3/github/Requester.py b/ext3/github/Requester.py index 4c3467666a..59c4912f36 100644 --- a/ext3/github/Requester.py +++ b/ext3/github/Requester.py @@ -214,7 +214,7 @@ def _initializeDebugFeature(self): ############################################################# - def __init__(self, login_or_token, password, base_url, timeout, client_id, client_secret, user_agent, per_page, api_preview, verify): + def __init__(self, login_or_token, password, jwt, base_url, timeout, client_id, client_secret, user_agent, per_page, api_preview, verify): self._initializeDebugFeature() if password is not None: @@ -226,6 +226,8 @@ def __init__(self, login_or_token, password, base_url, timeout, client_id, clien elif login_or_token is not None: token = login_or_token self.__authorizationHeader = "token " + token + elif jwt is not None: + self.__authorizationHeader = "Bearer " + jwt else: self.__authorizationHeader = None @@ -293,7 +295,10 @@ def __createException(self, status, headers, output): cls = GithubException.TwoFactorException # pragma no cover (Should be covered) elif status == 403 and output.get("message").startswith("Missing or invalid User Agent string"): cls = GithubException.BadUserAgentException - elif status == 403 and output.get("message").lower().startswith("api rate limit exceeded"): + elif status == 403 and ( + output.get("message").lower().startswith("api rate limit exceeded") + or output.get("message").lower().endswith("please wait a few minutes before you try again.") + ): cls = GithubException.RateLimitExceededException elif status == 404 and output.get("message") == "Not Found": cls = GithubException.UnknownObjectException @@ -411,7 +416,8 @@ def __requestRaw(self, cnx, verb, url, requestHeaders, input): return self.__requestRaw(original_cnx, verb, url, requestHeaders, input) if status == 301 and 'location' in responseHeaders: - return self.__requestRaw(original_cnx, verb, responseHeaders['location'], requestHeaders, input) + o = urllib.parse.urlparse(responseHeaders['location']) + return self.__requestRaw(original_cnx, verb, o.path, requestHeaders, input) return status, responseHeaders, output @@ -465,6 +471,8 @@ def __log(self, verb, url, requestHeaders, input, status, responseHeaders, outpu requestHeaders["Authorization"] = "Basic (login and password removed)" elif requestHeaders["Authorization"].startswith("token"): requestHeaders["Authorization"] = "token (oauth token removed)" + elif requestHeaders["Authorization"].startswith("Bearer"): + requestHeaders["Authorization"] = "Bearer (jwt removed)" else: # pragma no cover (Cannot happen, but could if we add an authentication method => be prepared) requestHeaders["Authorization"] = "(unknown auth removed)" # pragma no cover (Cannot happen, but could if we add an authentication method => be prepared) logger.debug("%s %s://%s%s %s %s ==> %i %s %s", str(verb), self.__scheme, self.__hostname, str(url), str(requestHeaders), str(input), status, str(responseHeaders), str(output)) diff --git a/medusa/__init__.py b/medusa/__init__.py index e34fc9ab46..4f8bf80267 100644 --- a/medusa/__init__.py +++ b/medusa/__init__.py @@ -17,7 +17,7 @@ # along with Medusa. If not, see . from __future__ import unicode_literals -from medusa import app +from medusa.app import app from medusa.init import initialize # Initialize functions replacements diff --git a/medusa/__main__.py b/medusa/__main__.py index 12f95acc39..b055deb36b 100755 --- a/medusa/__main__.py +++ b/medusa/__main__.py @@ -95,34 +95,6 @@ logger = logging.getLogger(__name__) -def fix_incorrect_list_values(data): - """ - @TODO: Remove this in a future version. - - Due to a bug introduced in v0.2.9, the value might be a string representing a Python dict. - See: https://github.com/pymedusa/Medusa/issues/5155 - - Example: `"{u'id': 0, u'value': u'!sync'}"` to `"!sync"` - """ - import ast - - result = [] - for item in data: - if not item: - continue - if not (item.startswith('{') and item.endswith('}')): - # Simple value, don't do anything to it - result.append(item) - continue - try: - # Get the value: `{u'id': 0, u'value': u'!sync'}` => `!sync` - result.append(ast.literal_eval(item)['value']) - except (SyntaxError, KeyError): - pass - - return result - - class Application(object): """Main application module.""" @@ -435,7 +407,7 @@ def initialize(self, console_logging=True): sections = [ 'General', 'Blackhole', 'Newzbin', 'SABnzbd', 'NZBget', 'KODI', 'PLEX', 'Emby', 'Growl', 'Prowl', 'Twitter', - 'Boxcar2', 'NMJ', 'NMJv2', 'Synology', 'Slack', 'SynologyNotifier', 'pyTivo', 'Pushalot', 'Pushbullet', + 'Boxcar2', 'NMJ', 'NMJv2', 'Synology', 'Slack', 'SynologyNotifier', 'pyTivo', 'Pushalot', 'Pushbullet', 'Join', 'Subtitles', 'pyTivo', ] @@ -616,11 +588,7 @@ def initialize(self, console_logging=True): app.RANDOMIZE_PROVIDERS = bool(check_setting_int(app.CFG, 'General', 'randomize_providers', 0)) app.ALLOW_HIGH_PRIORITY = bool(check_setting_int(app.CFG, 'General', 'allow_high_priority', 1)) app.SKIP_REMOVED_FILES = bool(check_setting_int(app.CFG, 'General', 'skip_removed_files', 0)) - app.ALLOWED_EXTENSIONS = check_setting_list(app.CFG, 'General', 'allowed_extensions', app.ALLOWED_EXTENSIONS) - # @TODO: Remove this in a future version. - app.ALLOWED_EXTENSIONS = fix_incorrect_list_values(app.ALLOWED_EXTENSIONS) - app.USENET_RETENTION = check_setting_int(app.CFG, 'General', 'usenet_retention', 500) app.CACHE_TRIMMING = bool(check_setting_int(app.CFG, 'General', 'cache_trimming', 0)) app.MAX_CACHE_AGE = check_setting_int(app.CFG, 'General', 'max_cache_age', 30) @@ -661,11 +629,7 @@ def initialize(self, console_logging=True): app.MOVE_ASSOCIATED_FILES = bool(check_setting_int(app.CFG, 'General', 'move_associated_files', 0)) app.POSTPONE_IF_SYNC_FILES = bool(check_setting_int(app.CFG, 'General', 'postpone_if_sync_files', 1)) app.POSTPONE_IF_NO_SUBS = bool(check_setting_int(app.CFG, 'General', 'postpone_if_no_subs', 0)) - app.SYNC_FILES = check_setting_list(app.CFG, 'General', 'sync_files', app.SYNC_FILES) - # @TODO: Remove this in a future version. - app.SYNC_FILES = fix_incorrect_list_values(app.SYNC_FILES) - app.NFO_RENAME = bool(check_setting_int(app.CFG, 'General', 'nfo_rename', 1)) app.CREATE_MISSING_SHOW_DIRS = bool(check_setting_int(app.CFG, 'General', 'create_missing_show_dirs', 0)) app.ADD_SHOWS_WO_DIR = bool(check_setting_int(app.CFG, 'General', 'add_shows_wo_dir', 0)) @@ -796,7 +760,8 @@ def initialize(self, console_logging=True): app.PUSHOVER_USERKEY = check_setting_str(app.CFG, 'Pushover', 'pushover_userkey', '', censor_log='normal') app.PUSHOVER_APIKEY = check_setting_str(app.CFG, 'Pushover', 'pushover_apikey', '', censor_log='low') app.PUSHOVER_DEVICE = check_setting_list(app.CFG, 'Pushover', 'pushover_device', '') - app.PUSHOVER_SOUND = check_setting_str(app.CFG, 'Pushover', 'pushover_sound', 'pushover') + app.PUSHOVER_SOUND = check_setting_str(app.CFG, 'Pushover', 'pushover_sound', 'default') + app.PUSHOVER_PRIORITY = check_setting_str(app.CFG, 'Pushover', 'pushover_priority', '0') app.USE_LIBNOTIFY = bool(check_setting_int(app.CFG, 'Libnotify', 'use_libnotify', 0)) app.LIBNOTIFY_NOTIFY_ONSNATCH = bool(check_setting_int(app.CFG, 'Libnotify', 'libnotify_notify_onsnatch', 0)) @@ -875,6 +840,13 @@ def initialize(self, console_logging=True): app.PUSHBULLET_API = check_setting_str(app.CFG, 'Pushbullet', 'pushbullet_api', '', censor_log='low') app.PUSHBULLET_DEVICE = check_setting_str(app.CFG, 'Pushbullet', 'pushbullet_device', '') + app.USE_JOIN = bool(check_setting_int(app.CFG, 'Join', 'use_join', 0)) + app.JOIN_NOTIFY_ONSNATCH = bool(check_setting_int(app.CFG, 'Join', 'join_notify_onsnatch', 0)) + app.JOIN_NOTIFY_ONDOWNLOAD = bool(check_setting_int(app.CFG, 'Join', 'join_notify_ondownload', 0)) + app.JOIN_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(app.CFG, 'Join', 'join_notify_onsubtitledownload', 0)) + app.JOIN_API = check_setting_str(app.CFG, 'Join', 'join_api', '', censor_log='low') + app.JOIN_DEVICE = check_setting_str(app.CFG, 'Join', 'join_device', '') + app.USE_EMAIL = bool(check_setting_int(app.CFG, 'Email', 'use_email', 0)) app.EMAIL_NOTIFY_ONSNATCH = bool(check_setting_int(app.CFG, 'Email', 'email_notify_onsnatch', 0)) app.EMAIL_NOTIFY_ONDOWNLOAD = bool(check_setting_int(app.CFG, 'Email', 'email_notify_ondownload', 0)) @@ -938,8 +910,6 @@ def initialize(self, console_logging=True): app.NO_RESTART = bool(check_setting_int(app.CFG, 'General', 'no_restart', 0)) app.EXTRA_SCRIPTS = [x.strip() for x in check_setting_list(app.CFG, 'General', 'extra_scripts')] - # @TODO: Remove this in a future version. - app.EXTRA_SCRIPTS = fix_incorrect_list_values(app.EXTRA_SCRIPTS) app.USE_LISTVIEW = bool(check_setting_int(app.CFG, 'General', 'use_listview', 0)) @@ -1773,6 +1743,7 @@ def save_config(): new_config['Pushover']['pushover_apikey'] = app.PUSHOVER_APIKEY new_config['Pushover']['pushover_device'] = app.PUSHOVER_DEVICE new_config['Pushover']['pushover_sound'] = app.PUSHOVER_SOUND + new_config['Pushover']['pushover_priority'] = app.PUSHOVER_PRIORITY new_config['Libnotify'] = {} new_config['Libnotify']['use_libnotify'] = int(app.USE_LIBNOTIFY) @@ -1852,6 +1823,14 @@ def save_config(): new_config['Pushbullet']['pushbullet_api'] = app.PUSHBULLET_API new_config['Pushbullet']['pushbullet_device'] = app.PUSHBULLET_DEVICE + new_config['Join'] = {} + new_config['Join']['use_join'] = int(app.USE_JOIN) + new_config['Join']['join_notify_onsnatch'] = int(app.JOIN_NOTIFY_ONSNATCH) + new_config['Join']['join_notify_ondownload'] = int(app.JOIN_NOTIFY_ONDOWNLOAD) + new_config['Join']['join_notify_onsubtitledownload'] = int(app.JOIN_NOTIFY_ONSUBTITLEDOWNLOAD) + new_config['Join']['join_api'] = app.JOIN_API + new_config['Join']['join_device'] = app.JOIN_DEVICE + new_config['Email'] = {} new_config['Email']['use_email'] = int(app.USE_EMAIL) new_config['Email']['email_notify_onsnatch'] = int(app.EMAIL_NOTIFY_ONSNATCH) diff --git a/medusa/app.py b/medusa/app.py index cf3ff5d848..d8b0bcd2db 100644 --- a/medusa/app.py +++ b/medusa/app.py @@ -3,642 +3,672 @@ from __future__ import unicode_literals import random +import sys from threading import Lock -# Application instance -instance = None - -# Fixed values -__title__ = __name__ -SRC_FOLDER = __name__ -LEGACY_SRC_FOLDERS = ('sickbeard', 'sickrage', 'gui') -LIB_FOLDER = 'lib' -LIB2_FOLDER = 'lib2' -LIB3_FOLDER = 'lib3' -EXT_FOLDER = 'ext' -EXT2_FOLDER = 'ext2' -EXT3_FOLDER = 'ext3' -STATIC_FOLDER = 'static' -UNKNOWN_RELEASE_GROUP = 'Medusa' -BACKUP_DIR = 'backup' -BACKUP_FILENAME_PREFIX = 'backup' -BACKUP_FILENAME = BACKUP_FILENAME_PREFIX + '-{timestamp}.zip' -LEGACY_DB = 'sickbeard.db' -APPLICATION_DB = 'main.db' -FAILED_DB = 'failed.db' -CACHE_DB = 'cache.db' -LOG_FILENAME = 'application.log' -CONFIG_INI = 'config.ini' -GIT_ORG = 'pymedusa' -GIT_REPO = 'Medusa' -BASE_PYMEDUSA_URL = 'https://cdn.pymedusa.com' -CHANGES_URL = '{base_url}/news/CHANGELOG.md'.format(base_url=BASE_PYMEDUSA_URL) -APPLICATION_URL = 'https://github.com/{org}/{repo}'.format(org=GIT_ORG, repo=GIT_REPO) -DONATIONS_URL = '{0}/wiki/Donations'.format(APPLICATION_URL) -WIKI_URL = '{0}/wiki'.format(APPLICATION_URL) -GITHUB_IO_URL = 'http://github.com/pymedusa/medusa.github.io/' -EXTRA_SCRIPTS_URL = '{0}/wiki/Post-Processing#extra-scripts'.format(APPLICATION_URL) -SUBTITLES_URL = '{0}/wiki/Subtitle%20Scripts'.format(APPLICATION_URL) -RARBG_APPID = 'medusa' -SECURE_TOKEN = 'medusa_user' - -# static configuration -LOCALE = None, None -OS_USER = None -OPENSSL_VERSION = None -APP_VERSION = None -MAJOR_DB_VERSION = None -MINOR_DB_VERSION = None - -PID = None -CFG = None -CONFIG_FILE = None - -# This is the version of the config we EXPECT to find -CONFIG_VERSION = 10 - -# Default encryption version (0 for None) -ENCRYPTION_VERSION = 0 -ENCRYPTION_SECRET = None - -PROG_DIR = '.' -MY_FULLNAME = None -MY_NAME = None -MY_ARGS = [] -SYS_ENCODING = '' -DATA_DIR = '' -CREATEPID = False -PIDFILE = '' - -DAEMON = None -NO_RESIZE = False - -# system events -events = None - -# schedulers -daily_search_scheduler = None -backlog_search_scheduler = None -show_update_scheduler = None -version_check_scheduler = None -show_queue_scheduler = None -search_queue_scheduler = None -forced_search_queue_scheduler = None -manual_snatch_scheduler = None -proper_finder_scheduler = None -auto_post_processor_scheduler = None -subtitles_finder_scheduler = None -trakt_checker_scheduler = None -torrent_checker_scheduler = None - -showList = [] - -providerList = [] -newznabProviderList = [] -torrentRssProviderList = [] -torznab_providers_list = [] -metadata_provider_dict = {} - -NEWEST_VERSION = None -NEWEST_VERSION_STRING = None -VERSION_NOTIFY = False -AUTO_UPDATE = False -NOTIFY_ON_UPDATE = False -CUR_COMMIT_HASH = None -BRANCH = '' - -GIT_RESET = True -GIT_RESET_BRANCHES = ['develop', 'master'] -GIT_REMOTE_BRANCHES = [] -GIT_REMOTE = '' -GIT_REMOTE_URL = '' -CUR_COMMIT_BRANCH = '' -GIT_AUTH_TYPE = 0 -GIT_USERNAME = None -GIT_PASSWORD = None -GIT_TOKEN = None -GIT_PATH = None -DEVELOPER = False - -NEWS_URL = '{base_url}/news/news.md'.format(base_url=BASE_PYMEDUSA_URL) -LOGO_URL = '{base_url}/images/ico/favicon-64.png'.format(base_url=BASE_PYMEDUSA_URL) - -NEWS_LAST_READ = None -NEWS_LATEST = None -NEWS_UNREAD = 0 - -BROKEN_PROVIDERS = [] -BROKEN_PROVIDERS_UPDATE = None - -INIT_LOCK = Lock() -started = False - -ACTUAL_LOG_DIR = None -LOG_DIR = None -LOG_NR = 5 -LOG_SIZE = 10.0 - -SOCKET_TIMEOUT = None - -WEB_PORT = None -WEB_LOG = None -WEB_ROOT = None -WEB_USERNAME = None -WEB_PASSWORD = None -WEB_HOST = None -WEB_IPV6 = None -WEB_COOKIE_SECRET = None -WEB_USE_GZIP = True - -SUBLIMINAL_LOG = False - -DOWNLOAD_URL = None - -HANDLE_REVERSE_PROXY = False -PROXY_SETTING = None -PROXY_INDEXERS = True -SSL_VERIFY = True -SSL_CA_BUNDLE = None - -LOCALHOST_IP = None - -CPU_PRESET = None - -ANON_REDIRECT = None - -API_KEY = None -API_ROOT = None - -ENABLE_HTTPS = False -NOTIFY_ON_LOGIN = False -HTTPS_CERT = None -HTTPS_KEY = None - -INDEXER_DEFAULT_LANGUAGE = None -EP_DEFAULT_DELETED_STATUS = None -LAUNCH_BROWSER = False -CACHE_DIR = None -ACTUAL_CACHE_DIR = None -ROOT_DIRS = [] -TVDB_DVD_ORDER_EP_IGNORE = False - -TRASH_REMOVE_SHOW = False -TRASH_ROTATE_LOGS = False -SORT_ARTICLE = False -DEBUG = False -DBDEBUG = False -DISPLAY_ALL_SEASONS = True -DEFAULT_PAGE = 'home' -SEEDERS_LEECHERS_IN_NOTIFY = True -SHOW_LIST_ORDER = ['Anime', 'Series'] - -USE_LISTVIEW = False -METADATA_KODI = [] -METADATA_KODI_12PLUS = [] -METADATA_MEDIABROWSER = [] -METADATA_PS3 = [] -METADATA_WDTV = [] -METADATA_TIVO = [] -METADATA_MEDE8ER = [] - -QUALITY_DEFAULT = None -STATUS_DEFAULT = None -STATUS_DEFAULT_AFTER = None -SEASON_FOLDERS_DEFAULT = True -SUBTITLES_DEFAULT = False -INDEXER_DEFAULT = None -INDEXER_TIMEOUT = None -SCENE_DEFAULT = False -ANIME_DEFAULT = False -PROVIDER_ORDER = [] - -NAMING_MULTI_EP = False -NAMING_ANIME_MULTI_EP = False -NAMING_PATTERN = None -NAMING_ABD_PATTERN = None -NAMING_CUSTOM_ABD = False -NAMING_SPORTS_PATTERN = None -NAMING_CUSTOM_SPORTS = False -NAMING_ANIME_PATTERN = None -NAMING_CUSTOM_ANIME = False -NAMING_FORCE_FOLDERS = False -NAMING_STRIP_YEAR = False -NAMING_ANIME = None - -USE_NZBS = False -USE_TORRENTS = False - -NZB_METHOD = None -NZB_DIR = None -USENET_RETENTION = None -CACHE_TRIMMING = None -MAX_CACHE_AGE = None -TORRENT_METHOD = None -TORRENT_DIR = None -DOWNLOAD_PROPERS = False -CHECK_PROPERS_INTERVAL = None -PROPERS_SEARCH_DAYS = 2 -REMOVE_FROM_CLIENT = False -ALLOW_HIGH_PRIORITY = False -SAB_FORCED = False -RANDOMIZE_PROVIDERS = False - -AUTOPOSTPROCESSOR_FREQUENCY = 10 -DAILYSEARCH_FREQUENCY = None -UPDATE_FREQUENCY = None -BACKLOG_FREQUENCY = None -SHOWUPDATE_HOUR = None - -DEFAULT_TORRENT_CHECKER_FREQUENCY = 60 -DEFAULT_DAILYSEARCH_FREQUENCY = 40 -DEFAULT_BACKLOG_FREQUENCY = 21 -DEFAULT_UPDATE_FREQUENCY = 1 -DEFAULT_SHOWUPDATE_HOUR = random.randint(2, 4) - -MIN_AUTOPOSTPROCESSOR_FREQUENCY = 1 -MIN_TORRENT_CHECKER_FREQUENCY = 30 -MIN_DAILYSEARCH_FREQUENCY = 10 -MIN_BACKLOG_FREQUENCY = 10 -MIN_UPDATE_FREQUENCY = 1 - -BACKLOG_DAYS = 7 - -ADD_SHOWS_WO_DIR = False -CREATE_MISSING_SHOW_DIRS = False -RENAME_EPISODES = False -AIRDATE_EPISODES = False -FILE_TIMESTAMP_TIMEZONE = None -PROCESS_AUTOMATICALLY = False -NO_DELETE = False -KEEP_PROCESSED_DIR = False -PROCESS_METHOD = None -DELRARCONTENTS = False -MOVE_ASSOCIATED_FILES = False -POSTPONE_IF_SYNC_FILES = True -POSTPONE_IF_NO_SUBS = False -NFO_RENAME = True -TV_DOWNLOAD_DIR = None -UNPACK = False -SKIP_REMOVED_FILES = False -ALLOWED_EXTENSIONS = {'srt', 'nfo', 'sub', 'idx'} - -NZBS = False -NZBS_UID = None -NZBS_HASH = None - -OMGWTFNZBS = False -OMGWTFNZBS_USERNAME = None -OMGWTFNZBS_APIKEY = None - -NEWZBIN = False -NEWZBIN_USERNAME = None -NEWZBIN_PASSWORD = None - -SAB_USERNAME = None -SAB_PASSWORD = None -SAB_APIKEY = None -SAB_CATEGORY = None -SAB_CATEGORY_BACKLOG = None -SAB_CATEGORY_ANIME = None -SAB_CATEGORY_ANIME_BACKLOG = None -SAB_HOST = '' - -NZBGET_USERNAME = None -NZBGET_PASSWORD = None -NZBGET_CATEGORY = None -NZBGET_CATEGORY_BACKLOG = None -NZBGET_CATEGORY_ANIME = None -NZBGET_CATEGORY_ANIME_BACKLOG = None -NZBGET_HOST = None -NZBGET_USE_HTTPS = False -NZBGET_PRIORITY = 100 - -TORRENT_USERNAME = None -TORRENT_PASSWORD = None -TORRENT_HOST = '' -TORRENT_PATH = '' -TORRENT_SEED_TIME = None -TORRENT_PAUSED = False -TORRENT_HIGH_BANDWIDTH = False -TORRENT_LABEL = '' -TORRENT_LABEL_ANIME = '' -TORRENT_VERIFY_CERT = False -TORRENT_RPCURL = 'transmission' -TORRENT_AUTH_TYPE = 'none' -TORRENT_SEED_LOCATION = None -TORRENT_CHECKER_FREQUENCY = None - -USE_KODI = False -KODI_ALWAYS_ON = True -KODI_NOTIFY_ONSNATCH = False -KODI_NOTIFY_ONDOWNLOAD = False -KODI_NOTIFY_ONSUBTITLEDOWNLOAD = False -KODI_UPDATE_LIBRARY = False -KODI_UPDATE_FULL = False -KODI_UPDATE_ONLYFIRST = False -KODI_HOST = [] -KODI_USERNAME = None -KODI_PASSWORD = None -KODI_LIBRARY_CLEAN_PENDING = False -KODI_CLEAN_LIBRARY = False - -USE_PLEX_SERVER = False -PLEX_NOTIFY_ONSNATCH = False -PLEX_NOTIFY_ONDOWNLOAD = False -PLEX_NOTIFY_ONSUBTITLEDOWNLOAD = False -PLEX_UPDATE_LIBRARY = False -PLEX_SERVER_HOST = [] -PLEX_SERVER_TOKEN = None -PLEX_CLIENT_HOST = [] -PLEX_SERVER_USERNAME = None -PLEX_SERVER_PASSWORD = None - -USE_PLEX_CLIENT = False -PLEX_CLIENT_USERNAME = None -PLEX_CLIENT_PASSWORD = None -PLEX_SERVER_HTTPS = None - -USE_EMBY = False -EMBY_HOST = None -EMBY_APIKEY = None - -USE_GROWL = False -GROWL_NOTIFY_ONSNATCH = False -GROWL_NOTIFY_ONDOWNLOAD = False -GROWL_NOTIFY_ONSUBTITLEDOWNLOAD = False -GROWL_HOST = '' -GROWL_PASSWORD = None - -USE_FREEMOBILE = False -FREEMOBILE_NOTIFY_ONSNATCH = False -FREEMOBILE_NOTIFY_ONDOWNLOAD = False -FREEMOBILE_NOTIFY_ONSUBTITLEDOWNLOAD = False -FREEMOBILE_ID = '' -FREEMOBILE_APIKEY = '' - -USE_TELEGRAM = False -TELEGRAM_NOTIFY_ONSNATCH = False -TELEGRAM_NOTIFY_ONDOWNLOAD = False -TELEGRAM_NOTIFY_ONSUBTITLEDOWNLOAD = False -TELEGRAM_ID = '' -TELEGRAM_APIKEY = '' - -USE_PROWL = False -PROWL_NOTIFY_ONSNATCH = False -PROWL_NOTIFY_ONDOWNLOAD = False -PROWL_NOTIFY_ONSUBTITLEDOWNLOAD = False -PROWL_API = [] -PROWL_PRIORITY = 0 -PROWL_MESSAGE_TITLE = 'Medusa' - -USE_TWITTER = False -TWITTER_NOTIFY_ONSNATCH = False -TWITTER_NOTIFY_ONDOWNLOAD = False -TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD = False -TWITTER_USERNAME = None -TWITTER_PASSWORD = None -TWITTER_PREFIX = None -TWITTER_DMTO = None -TWITTER_USEDM = False - -USE_BOXCAR2 = False -BOXCAR2_NOTIFY_ONSNATCH = False -BOXCAR2_NOTIFY_ONDOWNLOAD = False -BOXCAR2_NOTIFY_ONSUBTITLEDOWNLOAD = False -BOXCAR2_ACCESSTOKEN = None - -USE_PUSHOVER = False -PUSHOVER_NOTIFY_ONSNATCH = False -PUSHOVER_NOTIFY_ONDOWNLOAD = False -PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD = False -PUSHOVER_USERKEY = None -PUSHOVER_APIKEY = None -PUSHOVER_DEVICE = [] -PUSHOVER_SOUND = None - -USE_LIBNOTIFY = False -LIBNOTIFY_NOTIFY_ONSNATCH = False -LIBNOTIFY_NOTIFY_ONDOWNLOAD = False -LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD = False - -USE_NMJ = False -NMJ_HOST = None -NMJ_DATABASE = None -NMJ_MOUNT = None - -ANIMESUPPORT = False -USE_ANIDB = False -ANIDB_USERNAME = None -ANIDB_PASSWORD = None -ANIDB_USE_MYLIST = False -ADBA_CONNECTION = None -ANIME_SPLIT_HOME = False -ANIME_SPLIT_HOME_IN_TABS = False - -USE_SYNOINDEX = False - -USE_NMJv2 = False -NMJv2_HOST = None -NMJv2_DATABASE = None -NMJv2_DBLOC = None - -USE_SYNOLOGYNOTIFIER = False -SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH = False -SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD = False -SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD = False - -USE_SLACK = False -SLACK_NOTIFY_SNATCH = None -SLACK_NOTIFY_DOWNLOAD = None -SLACK_NOTIFY_SUBTITLEDOWNLOAD = None -SLACK_WEBHOOK = None - -USE_TRAKT = False -TRAKT_USERNAME = None -TRAKT_ACCESS_TOKEN = None -TRAKT_REFRESH_TOKEN = None -TRAKT_REMOVE_WATCHLIST = False -TRAKT_REMOVE_SERIESLIST = False -TRAKT_REMOVE_SHOW_FROM_APPLICATION = False -TRAKT_SYNC_WATCHLIST = False -TRAKT_METHOD_ADD = None -TRAKT_START_PAUSED = False -TRAKT_USE_RECOMMENDED = False -TRAKT_SYNC = False -TRAKT_SYNC_REMOVE = False -TRAKT_DEFAULT_INDEXER = None -TRAKT_TIMEOUT = None -TRAKT_BLACKLIST_NAME = None - -USE_PYTIVO = False -PYTIVO_NOTIFY_ONSNATCH = False -PYTIVO_NOTIFY_ONDOWNLOAD = False -PYTIVO_NOTIFY_ONSUBTITLEDOWNLOAD = False -PYTIVO_UPDATE_LIBRARY = False -PYTIVO_HOST = '' -PYTIVO_SHARE_NAME = '' -PYTIVO_TIVO_NAME = '' - -USE_PUSHALOT = False -PUSHALOT_NOTIFY_ONSNATCH = False -PUSHALOT_NOTIFY_ONDOWNLOAD = False -PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD = False -PUSHALOT_AUTHORIZATIONTOKEN = None - -USE_PUSHBULLET = False -PUSHBULLET_NOTIFY_ONSNATCH = False -PUSHBULLET_NOTIFY_ONDOWNLOAD = False -PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD = False -PUSHBULLET_API = None -PUSHBULLET_DEVICE = None - -USE_EMAIL = False -EMAIL_NOTIFY_ONSNATCH = False -EMAIL_NOTIFY_ONDOWNLOAD = False -EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD = False -EMAIL_HOST = None -EMAIL_PORT = 25 -EMAIL_TLS = False -EMAIL_USER = None -EMAIL_PASSWORD = None -EMAIL_FROM = None -EMAIL_LIST = [] -EMAIL_SUBJECT = None - -HOME_LAYOUT = None -HISTORY_LAYOUT = None -HISTORY_LIMIT = 0 -DISPLAY_SHOW_SPECIALS = False -COMING_EPS_LAYOUT = None -COMING_EPS_DISPLAY_PAUSED = False -COMING_EPS_SORT = None -COMING_EPS_MISSED_RANGE = None -FUZZY_DATING = False -TRIM_ZERO = False -DATE_PRESET = None -TIME_PRESET = None -TIME_PRESET_W_SECONDS = None -TIMEZONE_DISPLAY = None - -# UI -THEME_NAME = None -AVAILABLE_THEMES = [] -DATA_ROOT = None -THEME = 'dark' -THEME_PATH = None -THEME_DATA_ROOT = None -POSTER_SORTBY = None -POSTER_SORTDIR = None -FANART_BACKGROUND = True -FANART_BACKGROUND_OPACITY = None -SELECTED_ROOT = None -BACKLOG_PERIOD = None -BACKLOG_STATUS = None -LAYOUT_WIDE = False - -USE_SUBTITLES = False -SUBTITLES_LANGUAGES = [] -SUBTITLES_DIR = '' -SUBTITLES_SERVICES_LIST = [] -SUBTITLES_SERVICES_ENABLED = [] -SUBTITLES_HISTORY = False -SUBTITLES_PERFECT_MATCH = False -IGNORE_EMBEDDED_SUBS = False -ACCEPT_UNKNOWN_EMBEDDED_SUBS = False -SUBTITLES_STOP_AT_FIRST = False -SUBTITLES_HEARING_IMPAIRED = False -SUBTITLES_FINDER_FREQUENCY = 1 -SUBTITLES_MULTI = False -SUBTITLES_EXTRA_SCRIPTS = [] -SUBTITLES_PRE_SCRIPTS = [] -SUBTITLES_KEEP_ONLY_WANTED = False -SUBTITLES_ERASE_CACHE = False - -ADDIC7ED_USER = None -ADDIC7ED_PASS = None - -ITASA_USER = None -ITASA_PASS = None - -LEGENDASTV_USER = None -LEGENDASTV_PASS = None - -OPENSUBTITLES_USER = None -OPENSUBTITLES_PASS = None - -USE_FAILED_DOWNLOADS = False -DELETE_FAILED = False - -EXTRA_SCRIPTS = [] - -IGNORE_WORDS = ['german', 'french', 'core2hd', 'dutch', 'swedish', 'reenc', 'MrLss', 'dubbed'] - -PREFERRED_WORDS = [] - -UNDESIRED_WORDS = ['internal', 'xvid'] - -TRACKERS_LIST = ['udp://tracker.coppersurfer.tk:6969/announce', - 'udp://tracker.leechers-paradise.org:6969/announce', - 'udp://tracker.zer0day.to:1337/announce', 'udp://tracker.opentrackr.org:1337/announce', - 'http://tracker.opentrackr.org:1337/announce', 'udp://p4p.arenabg.com:1337/announce', - 'http://p4p.arenabg.com:1337/announce', 'udp://explodie.org:6969/announce', - 'udp://9.rarbg.com:2710/announce', 'http://explodie.org:6969/announce', - 'http://tracker.dler.org:6969/announce', 'udp://public.popcorn-tracker.org:6969/announce', - 'udp://tracker.internetwarriors.net:1337/announce', 'udp://ipv4.tracker.harry.lu:80/announce', - 'http://ipv4.tracker.harry.lu:80/announce', 'udp://mgtracker.org:2710/announce', - 'http://mgtracker.org:6969/announce', 'udp://tracker.mg64.net:6969/announce', - 'http://tracker.mg64.net:6881/announce', 'http://torrentsmd.com:8080/announce'] - -REQUIRE_WORDS = [] -IGNORED_SUBS_LIST = ['dk', 'fin', 'heb', 'kor', 'nor', 'nordic', 'pl', 'swe'] -IGNORE_UND_SUBS = False -SYNC_FILES = ['!sync', 'lftp-pget-status', 'part', 'bts', '!qb', '!qB'] - -CALENDAR_UNPROTECTED = False -CALENDAR_ICONS = False -NO_RESTART = False - -TMDB_API_KEY = 'edc5f123313769de83a71e157758030b' -# TRAKT_API_KEY = 'd4161a7a106424551add171e5470112e4afdaf2438e6ef2fe0548edc75924868' - -TRAKT_API_KEY = '5c65f55e11d48c35385d9e8670615763a605fad28374c8ae553a7b7a50651ddd' -TRAKT_API_SECRET = 'b53e32045ac122a445ef163e6d859403301ffe9b17fb8321d428531b69022a82' -TRAKT_PIN_URL = 'https://trakt.tv/pin/4562' -TRAKT_OAUTH_URL = 'https://trakt.tv/' -TRAKT_API_URL = 'https://api.trakt.tv/' - -FANART_API_KEY = '9b3afaf26f6241bdb57d6cc6bd798da7' - -SHOWS_RECENT = [] - -__INITIALIZED__ = False - -NEWZNAB_PROVIDERS = [] - -TORRENTRSS_PROVIDERS = [] - -TORZNAB_PROVIDERS = [] - -RECENTLY_DELETED = set() - -RECENTLY_POSTPROCESSED = {} - -RELEASES_IN_PP = [] - -PRIVACY_LEVEL = 'normal' - -PROPERS_SEARCH_INTERVAL = {'15m': 15, '45m': 45, '90m': 90, '4h': 4 * 60, 'daily': 24 * 60} -PROPERS_INTERVAL_LABELS = {'daily': '24 hours', - '4h': '4 hours', - '90m': '90 mins', - '45m': '45 mins', - '15m': '15 mins' - } +class MedusaApp(object): + """Medusa app config.""" + + def __init__(self): + """Initialize Medusa application config.""" + # Application instance + self.instance = None + + # Fixed values + self.__title__ = __name__ + self.SRC_FOLDER = __name__ + self.LEGACY_SRC_FOLDERS = ('sickbeard', 'sickrage', 'gui') + self.LIB_FOLDER = 'lib' + self.LIB2_FOLDER = 'lib2' + self.LIB3_FOLDER = 'lib3' + self.EXT_FOLDER = 'ext' + self.EXT2_FOLDER = 'ext2' + self.EXT3_FOLDER = 'ext3' + self.STATIC_FOLDER = 'static' + self.UNKNOWN_RELEASE_GROUP = 'Medusa' + self.BACKUP_DIR = 'backup' + self.BACKUP_FILENAME_PREFIX = 'backup' + self.BACKUP_FILENAME = self.BACKUP_FILENAME_PREFIX + '-{timestamp}.zip' + self.LEGACY_DB = 'sickbeard.db' + self.APPLICATION_DB = 'main.db' + self.FAILED_DB = 'failed.db' + self.CACHE_DB = 'cache.db' + self.LOG_FILENAME = 'application.log' + self.CONFIG_INI = 'config.ini' + self.GIT_ORG = 'pymedusa' + self.GIT_REPO = 'Medusa' + self.BASE_PYMEDUSA_URL = 'https://cdn.pymedusa.com' + self.CHANGES_URL = '{base_url}/news/CHANGELOG.md'.format(base_url=self.BASE_PYMEDUSA_URL) + self.APPLICATION_URL = 'https://github.com/{org}/{repo}'.format(org=self.GIT_ORG, repo=self.GIT_REPO) + self.DONATIONS_URL = '{0}/wiki/Donations'.format(self.APPLICATION_URL) + self.WIKI_URL = '{0}/wiki'.format(self.APPLICATION_URL) + self.GITHUB_IO_URL = 'http://github.com/pymedusa/medusa.github.io/' + self.EXTRA_SCRIPTS_URL = '{0}/wiki/Post-Processing#extra-scripts'.format(self.APPLICATION_URL) + self.SUBTITLES_URL = '{0}/wiki/Subtitle%20Scripts'.format(self.APPLICATION_URL) + self.RARBG_APPID = 'medusa' + self.SECURE_TOKEN = 'medusa_user' + + # static configuration + self.LOCALE = None, None + self.OS_USER = None + self.OPENSSL_VERSION = None + self.APP_VERSION = None + self.MAJOR_DB_VERSION = None + self.MINOR_DB_VERSION = None + + self.PID = None + self.CFG = None + self.CONFIG_FILE = None + + # This is the version of the config we EXPECT to find + self.CONFIG_VERSION = 10 + + # Default encryption version (0 for None) + self.ENCRYPTION_VERSION = 0 + self.ENCRYPTION_SECRET = None + + self.PROG_DIR = '.' + self.MY_FULLNAME = None + self.MY_NAME = None + self.MY_ARGS = [] + self.SYS_ENCODING = '' + self.DATA_DIR = '' + self.CREATEPID = False + self.PIDFILE = '' + self.RUNS_IN_DOCKER = None + + self.DAEMON = None + self.NO_RESIZE = False + + # system events + self.events = None + + # schedulers + self.daily_search_scheduler = None + self.backlog_search_scheduler = None + self.show_update_scheduler = None + self.version_check_scheduler = None + self.show_queue_scheduler = None + self.search_queue_scheduler = None + self.forced_search_queue_scheduler = None + self.manual_snatch_scheduler = None + self.proper_finder_scheduler = None + self.auto_post_processor_scheduler = None + self.subtitles_finder_scheduler = None + self.trakt_checker_scheduler = None + self.torrent_checker_scheduler = None + + self.showList = [] + + self.providerList = [] + self.newznabProviderList = [] + self.torrentRssProviderList = [] + self.torznab_providers_list = [] + self.metadata_provider_dict = {} + + self.NEWEST_VERSION = None + self.NEWEST_VERSION_STRING = None + self.VERSION_NOTIFY = False + self.AUTO_UPDATE = False + self.NOTIFY_ON_UPDATE = False + self.CUR_COMMIT_HASH = None + self.BRANCH = '' + + self.GIT_RESET = True + self.GIT_RESET_BRANCHES = ['develop', 'master'] + self.GIT_REMOTE_BRANCHES = [] + self.GIT_REMOTE = '' + self.GIT_REMOTE_URL = '' + self.CUR_COMMIT_BRANCH = '' + self.GIT_AUTH_TYPE = 0 + self.GIT_USERNAME = None + self.GIT_PASSWORD = None + self.GIT_TOKEN = None + self.GIT_PATH = None + self.DEVELOPER = False + + self.NEWS_URL = '{base_url}/news/news.md'.format(base_url=self.BASE_PYMEDUSA_URL) + self.LOGO_URL = '{base_url}/images/ico/favicon-64.png'.format(base_url=self.BASE_PYMEDUSA_URL) + + self.NEWS_LAST_READ = None + self.NEWS_LATEST = None + self.NEWS_UNREAD = 0 + + self.BROKEN_PROVIDERS = [] + self.BROKEN_PROVIDERS_UPDATE = None + + self.INIT_LOCK = Lock() + self.started = False + + self.ACTUAL_LOG_DIR = None + self.LOG_DIR = None + self.LOG_NR = 5 + self.LOG_SIZE = 10.0 + + self.SOCKET_TIMEOUT = None + + self.WEB_PORT = None + self.WEB_LOG = None + self.WEB_ROOT = None + self.WEB_USERNAME = None + self.WEB_PASSWORD = None + self.WEB_HOST = None + self.WEB_IPV6 = None + self.WEB_COOKIE_SECRET = None + self.WEB_USE_GZIP = True + + self.SUBLIMINAL_LOG = False + + self.DOWNLOAD_URL = None + + self.HANDLE_REVERSE_PROXY = False + self.PROXY_SETTING = None + self.PROXY_INDEXERS = True + self.SSL_VERIFY = True + self.SSL_CA_BUNDLE = None + + self.LOCALHOST_IP = None + + self.CPU_PRESET = None + + self.ANON_REDIRECT = None + + self.API_KEY = None + self.API_ROOT = None + + self.ENABLE_HTTPS = False + self.NOTIFY_ON_LOGIN = False + self.HTTPS_CERT = None + self.HTTPS_KEY = None + + self.INDEXER_DEFAULT_LANGUAGE = None + self.EP_DEFAULT_DELETED_STATUS = None + self.LAUNCH_BROWSER = False + self.CACHE_DIR = None + self.ACTUAL_CACHE_DIR = None + self.ROOT_DIRS = [] + self.TVDB_DVD_ORDER_EP_IGNORE = False + + self.TRASH_REMOVE_SHOW = False + self.TRASH_ROTATE_LOGS = False + self.SORT_ARTICLE = False + self.DEBUG = False + self.DBDEBUG = False + self.DISPLAY_ALL_SEASONS = True + self.DEFAULT_PAGE = 'home' + self.SEEDERS_LEECHERS_IN_NOTIFY = True + self.SHOW_LIST_ORDER = ['Anime', 'Series'] + + self.USE_LISTVIEW = False + self.METADATA_KODI = [] + self.METADATA_KODI_12PLUS = [] + self.METADATA_MEDIABROWSER = [] + self.METADATA_PS3 = [] + self.METADATA_WDTV = [] + self.METADATA_TIVO = [] + self.METADATA_MEDE8ER = [] + + self.QUALITY_DEFAULT = None + self.STATUS_DEFAULT = None + self.STATUS_DEFAULT_AFTER = None + self.SEASON_FOLDERS_DEFAULT = True + self.SUBTITLES_DEFAULT = False + self.INDEXER_DEFAULT = None + self.INDEXER_TIMEOUT = None + self.SCENE_DEFAULT = False + self.ANIME_DEFAULT = False + self.PROVIDER_ORDER = [] + + self.NAMING_MULTI_EP = False + self.NAMING_ANIME_MULTI_EP = False + self.NAMING_PATTERN = None + self.NAMING_ABD_PATTERN = None + self.NAMING_CUSTOM_ABD = False + self.NAMING_SPORTS_PATTERN = None + self.NAMING_CUSTOM_SPORTS = False + self.NAMING_ANIME_PATTERN = None + self.NAMING_CUSTOM_ANIME = False + self.NAMING_FORCE_FOLDERS = False + self.NAMING_STRIP_YEAR = False + self.NAMING_ANIME = None + + self.USE_NZBS = False + self.USE_TORRENTS = False + + self.NZB_METHOD = None + self.NZB_DIR = None + self.USENET_RETENTION = None + self.CACHE_TRIMMING = None + self.MAX_CACHE_AGE = None + self.TORRENT_METHOD = None + self.TORRENT_DIR = None + self.DOWNLOAD_PROPERS = False + self.CHECK_PROPERS_INTERVAL = None + self.PROPERS_SEARCH_DAYS = 2 + self.REMOVE_FROM_CLIENT = False + self.ALLOW_HIGH_PRIORITY = False + self.SAB_FORCED = False + self.RANDOMIZE_PROVIDERS = False + + self.AUTOPOSTPROCESSOR_FREQUENCY = 10 + self.DAILYSEARCH_FREQUENCY = None + self.UPDATE_FREQUENCY = None + self.BACKLOG_FREQUENCY = None + self.SHOWUPDATE_HOUR = None + + self.DEFAULT_TORRENT_CHECKER_FREQUENCY = 60 + self.DEFAULT_DAILYSEARCH_FREQUENCY = 40 + self.DEFAULT_BACKLOG_FREQUENCY = 21 + self.DEFAULT_UPDATE_FREQUENCY = 1 + self.DEFAULT_SHOWUPDATE_HOUR = random.randint(2, 4) + + self.MIN_AUTOPOSTPROCESSOR_FREQUENCY = 1 + self.MIN_TORRENT_CHECKER_FREQUENCY = 30 + self.MIN_DAILYSEARCH_FREQUENCY = 10 + self.MIN_BACKLOG_FREQUENCY = 10 + self.MIN_UPDATE_FREQUENCY = 1 + + self.BACKLOG_DAYS = 7 + + self.ADD_SHOWS_WO_DIR = False + self.CREATE_MISSING_SHOW_DIRS = False + self.RENAME_EPISODES = False + self.AIRDATE_EPISODES = False + self.FILE_TIMESTAMP_TIMEZONE = None + self.PROCESS_AUTOMATICALLY = False + self.NO_DELETE = False + self.KEEP_PROCESSED_DIR = False + self.PROCESS_METHOD = None + self.DELRARCONTENTS = False + self.MOVE_ASSOCIATED_FILES = False + self.POSTPONE_IF_SYNC_FILES = True + self.POSTPONE_IF_NO_SUBS = False + self.NFO_RENAME = True + self.TV_DOWNLOAD_DIR = None + self.UNPACK = False + self.SKIP_REMOVED_FILES = False + self.ALLOWED_EXTENSIONS = {'srt', 'nfo', 'sub', 'idx'} + + self.NZBS = False + self.NZBS_UID = None + self.NZBS_HASH = None + + self.OMGWTFNZBS = False + self.OMGWTFNZBS_USERNAME = None + self.OMGWTFNZBS_APIKEY = None + + self.NEWZBIN = False + self.NEWZBIN_USERNAME = None + self.NEWZBIN_PASSWORD = None + + self.SAB_USERNAME = None + self.SAB_PASSWORD = None + self.SAB_APIKEY = None + self.SAB_CATEGORY = None + self.SAB_CATEGORY_BACKLOG = None + self.SAB_CATEGORY_ANIME = None + self.SAB_CATEGORY_ANIME_BACKLOG = None + self.SAB_HOST = '' + + self.NZBGET_USERNAME = None + self.NZBGET_PASSWORD = None + self.NZBGET_CATEGORY = None + self.NZBGET_CATEGORY_BACKLOG = None + self.NZBGET_CATEGORY_ANIME = None + self.NZBGET_CATEGORY_ANIME_BACKLOG = None + self.NZBGET_HOST = None + self.NZBGET_USE_HTTPS = False + self.NZBGET_PRIORITY = 100 + + self.TORRENT_USERNAME = None + self.TORRENT_PASSWORD = None + self.TORRENT_HOST = '' + self.TORRENT_PATH = '' + self.TORRENT_SEED_TIME = None + self.TORRENT_PAUSED = False + self.TORRENT_HIGH_BANDWIDTH = False + self.TORRENT_LABEL = '' + self.TORRENT_LABEL_ANIME = '' + self.TORRENT_VERIFY_CERT = False + self.TORRENT_RPCURL = 'transmission' + self.TORRENT_AUTH_TYPE = 'none' + self.TORRENT_SEED_LOCATION = None + self.TORRENT_CHECKER_FREQUENCY = None + + self.USE_KODI = False + self.KODI_ALWAYS_ON = True + self.KODI_NOTIFY_ONSNATCH = False + self.KODI_NOTIFY_ONDOWNLOAD = False + self.KODI_NOTIFY_ONSUBTITLEDOWNLOAD = False + self.KODI_UPDATE_LIBRARY = False + self.KODI_UPDATE_FULL = False + self.KODI_UPDATE_ONLYFIRST = False + self.KODI_HOST = [] + self.KODI_USERNAME = None + self.KODI_PASSWORD = None + self.KODI_LIBRARY_CLEAN_PENDING = False + self.KODI_CLEAN_LIBRARY = False + + self.USE_PLEX_SERVER = False + self.PLEX_NOTIFY_ONSNATCH = False + self.PLEX_NOTIFY_ONDOWNLOAD = False + self.PLEX_NOTIFY_ONSUBTITLEDOWNLOAD = False + self.PLEX_UPDATE_LIBRARY = False + self.PLEX_SERVER_HOST = [] + self.PLEX_SERVER_TOKEN = None + self.PLEX_CLIENT_HOST = [] + self.PLEX_SERVER_USERNAME = None + self.PLEX_SERVER_PASSWORD = None + + self.USE_PLEX_CLIENT = False + self.PLEX_CLIENT_USERNAME = None + self.PLEX_CLIENT_PASSWORD = None + self.PLEX_SERVER_HTTPS = None + + self.USE_EMBY = False + self.EMBY_HOST = None + self.EMBY_APIKEY = None + + self.USE_GROWL = False + self.GROWL_NOTIFY_ONSNATCH = False + self.GROWL_NOTIFY_ONDOWNLOAD = False + self.GROWL_NOTIFY_ONSUBTITLEDOWNLOAD = False + self.GROWL_HOST = '' + self.GROWL_PASSWORD = None + + self.USE_FREEMOBILE = False + self.FREEMOBILE_NOTIFY_ONSNATCH = False + self.FREEMOBILE_NOTIFY_ONDOWNLOAD = False + self.FREEMOBILE_NOTIFY_ONSUBTITLEDOWNLOAD = False + self.FREEMOBILE_ID = '' + self.FREEMOBILE_APIKEY = '' + + self.USE_TELEGRAM = False + self.TELEGRAM_NOTIFY_ONSNATCH = False + self.TELEGRAM_NOTIFY_ONDOWNLOAD = False + self.TELEGRAM_NOTIFY_ONSUBTITLEDOWNLOAD = False + self.TELEGRAM_ID = '' + self.TELEGRAM_APIKEY = '' + + self.USE_PROWL = False + self.PROWL_NOTIFY_ONSNATCH = False + self.PROWL_NOTIFY_ONDOWNLOAD = False + self.PROWL_NOTIFY_ONSUBTITLEDOWNLOAD = False + self.PROWL_API = [] + self.PROWL_PRIORITY = 0 + self.PROWL_MESSAGE_TITLE = 'Medusa' + + self.USE_TWITTER = False + self.TWITTER_NOTIFY_ONSNATCH = False + self.TWITTER_NOTIFY_ONDOWNLOAD = False + self.TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD = False + self.TWITTER_USERNAME = None + self.TWITTER_PASSWORD = None + self.TWITTER_PREFIX = None + self.TWITTER_DMTO = None + self.TWITTER_USEDM = False + + self.USE_BOXCAR2 = False + self.BOXCAR2_NOTIFY_ONSNATCH = False + self.BOXCAR2_NOTIFY_ONDOWNLOAD = False + self.BOXCAR2_NOTIFY_ONSUBTITLEDOWNLOAD = False + self.BOXCAR2_ACCESSTOKEN = None + + self.USE_PUSHOVER = False + self.PUSHOVER_NOTIFY_ONSNATCH = False + self.PUSHOVER_NOTIFY_ONDOWNLOAD = False + self.PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD = False + self.PUSHOVER_USERKEY = None + self.PUSHOVER_APIKEY = None + self.PUSHOVER_DEVICE = [] + self.PUSHOVER_SOUND = None + self.PUSHOVER_PRIORITY = 0 + + self.USE_LIBNOTIFY = False + self.LIBNOTIFY_NOTIFY_ONSNATCH = False + self.LIBNOTIFY_NOTIFY_ONDOWNLOAD = False + self.LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD = False + + self.USE_NMJ = False + self.NMJ_HOST = None + self.NMJ_DATABASE = None + self.NMJ_MOUNT = None + + self.ANIMESUPPORT = False + self.USE_ANIDB = False + self.ANIDB_USERNAME = None + self.ANIDB_PASSWORD = None + self.ANIDB_USE_MYLIST = False + self.ADBA_CONNECTION = None + self.ANIME_SPLIT_HOME = False + self.ANIME_SPLIT_HOME_IN_TABS = False + + self.USE_SYNOINDEX = False + + self.USE_NMJv2 = False + self.NMJv2_HOST = None + self.NMJv2_DATABASE = None + self.NMJv2_DBLOC = None + + self.USE_SYNOLOGYNOTIFIER = False + self.SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH = False + self.SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD = False + self.SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD = False + + self.USE_SLACK = False + self.SLACK_NOTIFY_SNATCH = None + self.SLACK_NOTIFY_DOWNLOAD = None + self.SLACK_NOTIFY_SUBTITLEDOWNLOAD = None + self.SLACK_WEBHOOK = None + + self.USE_TRAKT = False + self.TRAKT_USERNAME = None + self.TRAKT_ACCESS_TOKEN = None + self.TRAKT_REFRESH_TOKEN = None + self.TRAKT_REMOVE_WATCHLIST = False + self.TRAKT_REMOVE_SERIESLIST = False + self.TRAKT_REMOVE_SHOW_FROM_APPLICATION = False + self.TRAKT_SYNC_WATCHLIST = False + self.TRAKT_METHOD_ADD = None + self.TRAKT_START_PAUSED = False + self.TRAKT_USE_RECOMMENDED = False + self.TRAKT_SYNC = False + self.TRAKT_SYNC_REMOVE = False + self.TRAKT_DEFAULT_INDEXER = None + self.TRAKT_TIMEOUT = None + self.TRAKT_BLACKLIST_NAME = None + + self.USE_PYTIVO = False + self.PYTIVO_NOTIFY_ONSNATCH = False + self.PYTIVO_NOTIFY_ONDOWNLOAD = False + self.PYTIVO_NOTIFY_ONSUBTITLEDOWNLOAD = False + self.PYTIVO_UPDATE_LIBRARY = False + self.PYTIVO_HOST = '' + self.PYTIVO_SHARE_NAME = '' + self.PYTIVO_TIVO_NAME = '' + + self.USE_PUSHALOT = False + self.PUSHALOT_NOTIFY_ONSNATCH = False + self.PUSHALOT_NOTIFY_ONDOWNLOAD = False + self.PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD = False + self.PUSHALOT_AUTHORIZATIONTOKEN = None + + self.USE_PUSHBULLET = False + self.PUSHBULLET_NOTIFY_ONSNATCH = False + self.PUSHBULLET_NOTIFY_ONDOWNLOAD = False + self.PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD = False + self.PUSHBULLET_API = None + self.PUSHBULLET_DEVICE = None + + self.USE_JOIN = False + self.JOIN_NOTIFY_ONSNATCH = False + self.JOIN_NOTIFY_ONDOWNLOAD = False + self.JOIN_NOTIFY_ONSUBTITLEDOWNLOAD = False + self.JOIN_API = None + self.JOIN_DEVICE = None + + self.USE_EMAIL = False + self.EMAIL_NOTIFY_ONSNATCH = False + self.EMAIL_NOTIFY_ONDOWNLOAD = False + self.EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD = False + self.EMAIL_HOST = None + self.EMAIL_PORT = 25 + self.EMAIL_TLS = False + self.EMAIL_USER = None + self.EMAIL_PASSWORD = None + self.EMAIL_FROM = None + self.EMAIL_LIST = [] + self.EMAIL_SUBJECT = None + + self.HOME_LAYOUT = None + self.HISTORY_LAYOUT = None + self.HISTORY_LIMIT = 0 + self.DISPLAY_SHOW_SPECIALS = False + self.COMING_EPS_LAYOUT = None + self.COMING_EPS_DISPLAY_PAUSED = False + self.COMING_EPS_SORT = None + self.COMING_EPS_MISSED_RANGE = None + self.FUZZY_DATING = False + self.TRIM_ZERO = False + self.DATE_PRESET = None + self.TIME_PRESET = None + self.TIME_PRESET_W_SECONDS = None + self.TIMEZONE_DISPLAY = None + + # UI + self.THEME_NAME = None + self.AVAILABLE_THEMES = [] + self.DATA_ROOT = None + self.THEME = 'dark' + self.THEME_PATH = None + self.THEME_DATA_ROOT = None + self.POSTER_SORTBY = None + self.POSTER_SORTDIR = None + self.FANART_BACKGROUND = True + self.FANART_BACKGROUND_OPACITY = None + self.SELECTED_ROOT = None + self.BACKLOG_PERIOD = None + self.BACKLOG_STATUS = None + self.LAYOUT_WIDE = False + + self.USE_SUBTITLES = False + self.SUBTITLES_LANGUAGES = [] + self.SUBTITLES_DIR = '' + self.SUBTITLES_SERVICES_LIST = [] + self.SUBTITLES_SERVICES_ENABLED = [] + self.SUBTITLES_HISTORY = False + self.SUBTITLES_PERFECT_MATCH = False + self.IGNORE_EMBEDDED_SUBS = False + self.ACCEPT_UNKNOWN_EMBEDDED_SUBS = False + self.SUBTITLES_STOP_AT_FIRST = False + self.SUBTITLES_HEARING_IMPAIRED = False + self.SUBTITLES_FINDER_FREQUENCY = 1 + self.SUBTITLES_MULTI = False + self.SUBTITLES_EXTRA_SCRIPTS = [] + self.SUBTITLES_PRE_SCRIPTS = [] + self.SUBTITLES_KEEP_ONLY_WANTED = False + self.SUBTITLES_ERASE_CACHE = False + + self.ADDIC7ED_USER = None + self.ADDIC7ED_PASS = None + + self.ITASA_USER = None + self.ITASA_PASS = None + + self.LEGENDASTV_USER = None + self.LEGENDASTV_PASS = None + + self.OPENSUBTITLES_USER = None + self.OPENSUBTITLES_PASS = None + + self.USE_FAILED_DOWNLOADS = False + self.DELETE_FAILED = False + + self.EXTRA_SCRIPTS = [] + + self.IGNORE_WORDS = ['german', 'french', 'core2hd', 'dutch', 'swedish', 'reenc', 'MrLss', 'dubbed'] + + self.PREFERRED_WORDS = [] + + self.UNDESIRED_WORDS = ['internal', 'xvid'] + + self.TRACKERS_LIST = [ + 'udp://tracker.coppersurfer.tk:6969/announce', + 'udp://tracker.leechers-paradise.org:6969/announce', + 'udp://tracker.zer0day.to:1337/announce', 'udp://tracker.opentrackr.org:1337/announce', + 'http://tracker.opentrackr.org:1337/announce', 'udp://p4p.arenabg.com:1337/announce', + 'http://p4p.arenabg.com:1337/announce', 'udp://explodie.org:6969/announce', + 'udp://9.rarbg.com:2710/announce', 'http://explodie.org:6969/announce', + 'http://tracker.dler.org:6969/announce', 'udp://public.popcorn-tracker.org:6969/announce', + 'udp://tracker.internetwarriors.net:1337/announce', 'udp://ipv4.tracker.harry.lu:80/announce', + 'http://ipv4.tracker.harry.lu:80/announce', 'udp://mgtracker.org:2710/announce', + 'http://mgtracker.org:6969/announce', 'udp://tracker.mg64.net:6969/announce', + 'http://tracker.mg64.net:6881/announce', 'http://torrentsmd.com:8080/announce' + ] + + self.REQUIRE_WORDS = [] + self.IGNORED_SUBS_LIST = ['dk', 'fin', 'heb', 'kor', 'nor', 'nordic', 'pl', 'swe'] + self.IGNORE_UND_SUBS = False + self.SYNC_FILES = ['!sync', 'lftp-pget-status', 'part', 'bts', '!qb', '!qB'] + + self.CALENDAR_UNPROTECTED = False + self.CALENDAR_ICONS = False + self.NO_RESTART = False + + self.TMDB_API_KEY = 'edc5f123313769de83a71e157758030b' + # TRAKT_API_KEY = 'd4161a7a106424551add171e5470112e4afdaf2438e6ef2fe0548edc75924868' + + self.TRAKT_API_KEY = '5c65f55e11d48c35385d9e8670615763a605fad28374c8ae553a7b7a50651ddd' + self.TRAKT_API_SECRET = 'b53e32045ac122a445ef163e6d859403301ffe9b17fb8321d428531b69022a82' + self.TRAKT_PIN_URL = 'https://trakt.tv/pin/4562' + self.TRAKT_OAUTH_URL = 'https://trakt.tv/' + self.TRAKT_API_URL = 'https://api.trakt.tv/' + + self.FANART_API_KEY = '9b3afaf26f6241bdb57d6cc6bd798da7' + + self.SHOWS_RECENT = [] + + self.__INITIALIZED__ = False + + self.NEWZNAB_PROVIDERS = [] + + self.TORRENTRSS_PROVIDERS = [] + + self.TORZNAB_PROVIDERS = [] + + self.RECENTLY_DELETED = set() + + self.RECENTLY_POSTPROCESSED = {} + + self.RELEASES_IN_PP = [] + + self.PRIVACY_LEVEL = 'normal' + + self.PROPERS_SEARCH_INTERVAL = { + '15m': 15, + '45m': 45, + '90m': 90, + '4h': 4 * 60, + 'daily': 24 * 60 + } + + self.PROPERS_INTERVAL_LABELS = { + 'daily': '24 hours', + '4h': '4 hours', + '90m': '90 mins', + '45m': '45 mins', + '15m': '15 mins' + } -# Plex fallback settings -FALLBACK_PLEX_ENABLE = True -FALLBACK_PLEX_NOTIFICATIONS = True -FALLBACK_PLEX_TIMEOUT = 3 -FALLBACK_PLEX_API_URL = 'https://tvdb2.plex.tv' -TVDB_API_KEY = '0629B785CE550C8D' + # Plex fallback settings + self.FALLBACK_PLEX_ENABLE = True + self.FALLBACK_PLEX_NOTIFICATIONS = True + self.FALLBACK_PLEX_TIMEOUT = 3 + self.FALLBACK_PLEX_API_URL = 'https://tvdb2.plex.tv' + self.TVDB_API_KEY = '0629B785CE550C8D' + + +app = MedusaApp() +for app_key, app_value in app.__dict__.items(): + setattr(sys.modules[__name__], app_key, app_value) diff --git a/medusa/classes.py b/medusa/classes.py index 8637084838..c8326e4eab 100644 --- a/medusa/classes.py +++ b/medusa/classes.py @@ -25,6 +25,7 @@ from medusa import app from medusa.common import Quality from medusa.logger.adapters.style import BraceAdapter +from medusa.search import SearchType from six import itervalues @@ -85,7 +86,7 @@ def __init__(self, episodes=None, provider=None): self.proper_tags = '' # manually_searched - self.manually_searched = False + self._manually_searched = False # content self.content = None @@ -127,7 +128,8 @@ def __init__(self, episodes=None, provider=None): # to store a single episode number, as an int. self._actual_episode = None - # Search type. For example MANUAL_SEARCH, FORCED_SEARCH, DAILY_SEARCH, PROPER_SEARCH + # Search type. Use the medusa.search.SearchType enum, as value. + # For example SearchType.MANUAL_SEARCH, SearchType.FORCED_SEARCH, SearchType.DAILY_SEARCH, SearchType.PROPER_SEARCH self.search_type = None @property @@ -164,6 +166,24 @@ def show(self, value): ) self.series = value + @property + def manually_searched(self): + """ + Shortcut to check if the result was retrieved using a manual search. + + Preferably this property is not used, and the self.search_type property is directly evaluated. + """ + return self._manually_searched or self.search_type == SearchType.MANUAL_SEARCH + + @manually_searched.setter + def manually_searched(self, value): + """ + Shortcut to check if the result was retrieved using a manual search. + + Preferably this property is not used, and the self.search_type property is directly evaluated. + """ + self._manually_searched = value + def __str__(self): if self.provider is None: @@ -222,6 +242,21 @@ def finish_search_result(self, provider): self.size = provider._get_size(self.item) self.pubdate = provider._get_pubdate(self.item) + def update_from_db(self, show, episodes, cached_result): + """Update local attributes from the cached result, recovered from db.""" + self.series = show + self.episodes = episodes + self.url = cached_result['url'] + self.quality = int(cached_result['quality']) + self.name = cached_result['name'] + self.size = int(cached_result['size']) + self.seeders = int(cached_result['seeders']) + self.leechers = int(cached_result['leechers']) + self.release_group = cached_result['release_group'] + self.version = int(cached_result['version']) + self.proper_tags = cached_result['proper_tags'].split('|') \ + if cached_result['proper_tags'] else '' + def __eq__(self, other): return self.__dict__ == other.__dict__ diff --git a/medusa/clients/torrent/download_station_client.py b/medusa/clients/torrent/downloadstation_client.py similarity index 98% rename from medusa/clients/torrent/download_station_client.py rename to medusa/clients/torrent/downloadstation_client.py index 6d9475b8c5..c975ffec39 100644 --- a/medusa/clients/torrent/download_station_client.py +++ b/medusa/clients/torrent/downloadstation_client.py @@ -9,6 +9,7 @@ from __future__ import unicode_literals +import json import logging import os import re @@ -120,7 +121,7 @@ def _add_torrent_uri(self, result): if torrent_path: data['destination'] = torrent_path - log.debug('Add torrent URI with data: {}'.format(data)) + log.debug('Add torrent URI with data: {0}', json.dumps(data)) self._request(method='post', data=data) return self._check_response() @@ -143,7 +144,7 @@ def _add_torrent_file(self, result): files = {'file': ('{name}.torrent'.format(name=result.name), result.content)} - log.debug('Add torrent files with data: {}'.format(data)) + log.debug('Add torrent files with data: {0}', json.dumps(data)) self._request(method='post', data=data, files=files) return self._check_response() diff --git a/medusa/common.py b/medusa/common.py index c3a4ac93b3..c013edbd9f 100644 --- a/medusa/common.py +++ b/medusa/common.py @@ -39,7 +39,7 @@ long = int INSTANCE_ID = str(uuid.uuid1()) -VERSION = '0.2.11' +VERSION = '0.2.12' USER_AGENT = 'Medusa/{version} ({system}; {release}; {instance})'.format( version=VERSION, system=platform.system(), release=platform.release(), instance=INSTANCE_ID) diff --git a/medusa/helper/encoding.py b/medusa/helper/encoding.py deleted file mode 100644 index 1513869a5f..0000000000 --- a/medusa/helper/encoding.py +++ /dev/null @@ -1,79 +0,0 @@ -# coding=utf-8 - -from __future__ import unicode_literals - -from builtins import map - -from chardet import detect - -from medusa import app - -from six import text_type - - -def ss(var): - """ - Converts string to Unicode, fallback encoding is forced UTF-8 - - :param var: String to convert - :return: Converted string - """ - - var = _to_unicode(var) - - try: - var = var.encode(app.SYS_ENCODING) - except Exception: - try: - var = var.encode('utf-8') - except Exception: - try: - var = var.encode(app.SYS_ENCODING, 'replace') - except Exception: - var = var.encode('utf-8', 'ignore') - - return var - - -def _fix_list_encoding(var): - """ - Converts each item in a list to Unicode - - :param var: List or tuple to convert to Unicode - :return: Unicode converted input - """ - - if isinstance(var, (list, tuple)): - return [x for x in map(_to_unicode, var) if x is not None] - - return var - - -def _to_unicode(var): - """ - Converts string to Unicode, using in order: UTF-8, Latin-1, System encoding or finally what chardet wants - - :param var: String to convert - :return: Converted string as unicode, fallback is System encoding - """ - - if isinstance(var, str): - try: - var = text_type(var) - except Exception: - try: - var = text_type(var, 'utf-8') - except Exception: - try: - var = text_type(var, 'latin-1') - except Exception: - try: - var = text_type(var, app.SYS_ENCODING) - except Exception: - try: - # Chardet can be wrong, so try it last - var = text_type(var, detect(var).get('encoding')) - except Exception: - var = text_type(var, app.SYS_ENCODING, 'replace') - - return var diff --git a/medusa/helper/exceptions.py b/medusa/helper/exceptions.py index 76b0bb6f99..d729c8384f 100644 --- a/medusa/helper/exceptions.py +++ b/medusa/helper/exceptions.py @@ -2,10 +2,6 @@ from __future__ import unicode_literals -from builtins import str - -from medusa.helper.encoding import ss - from six import text_type @@ -14,19 +10,17 @@ def ex(e): :param e: The exception to convert into a unicode string :return: A unicode string from the exception text if it exists """ - - message = u'' - + message = '' if not e or not e.args: return message for arg in e.args: if arg is not None: - if isinstance(arg, (str, text_type)): - fixed_arg = ss(arg) + if isinstance(arg, text_type): + fixed_arg = arg else: try: - fixed_arg = u'error %s' % ss(str(arg)) + fixed_arg = 'Error: {0!r}'.format(arg) except Exception: fixed_arg = None @@ -35,9 +29,9 @@ def ex(e): message = fixed_arg else: try: - message = u'{} : {}'.format(message, fixed_arg) - except UnicodeError: - message = u'{} : {}'.format( + message = '{0}: {1}'.format(message, fixed_arg) + except UnicodeDecodeError: + message = '{0}: {1}'.format( text_type(message, errors='replace'), text_type(fixed_arg, errors='replace')) diff --git a/medusa/history.py b/medusa/history.py index 9698f90415..0426e64a4d 100644 --- a/medusa/history.py +++ b/medusa/history.py @@ -22,7 +22,6 @@ from medusa import db from medusa.common import FAILED, SNATCHED, SUBTITLED -from medusa.helper.encoding import ss from medusa.show.history import History @@ -37,8 +36,7 @@ def _log_history_item(action, ep_obj, resource, provider, version=-1, proper_tag :param provider: provider used :param version: tracked version of file (defaults to -1) """ - logDate = datetime.datetime.today().strftime(History.date_format) - resource = ss(resource) + log_date = datetime.datetime.today().strftime(History.date_format) main_db_con = db.DBConnection() main_db_con.action( @@ -46,8 +44,8 @@ def _log_history_item(action, ep_obj, resource, provider, version=-1, proper_tag '(action, date, indexer_id, showid, season, episode, quality, ' 'resource, provider, version, proper_tags, manually_searched, info_hash, size) ' 'VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)', - [action, logDate, ep_obj.series.indexer, ep_obj.series.series_id, ep_obj.season, ep_obj.episode, ep_obj.quality, - resource, provider, version, proper_tags, manually_searched, info_hash, size]) + [action, log_date, ep_obj.series.indexer, ep_obj.series.series_id, ep_obj.season, ep_obj.episode, + ep_obj.quality, resource, provider, version, proper_tags, manually_searched, info_hash, size]) def log_snatch(search_result): diff --git a/medusa/metadata/kodi_12plus.py b/medusa/metadata/kodi_12plus.py index 6db2fde655..6c8c96f4fa 100644 --- a/medusa/metadata/kodi_12plus.py +++ b/medusa/metadata/kodi_12plus.py @@ -5,7 +5,7 @@ import datetime import logging import re -from builtins import str + from babelfish import Country @@ -19,7 +19,7 @@ from medusa.logger.adapters.style import BraceAdapter from medusa.metadata import generic -from six import string_types +from six import string_types, text_type try: import xml.etree.cElementTree as etree @@ -117,11 +117,11 @@ def _show_data(self, series_obj): if getattr(my_show, 'rating', None): rating = etree.SubElement(tv_node, 'rating') - rating.text = str(my_show['rating']) + rating.text = text_type(my_show['rating']) if getattr(my_show, 'firstaired', None): try: - year_text = str(datetime.datetime.strptime(my_show['firstaired'], dateFormat).year) + year_text = text_type(datetime.datetime.strptime(my_show['firstaired'], dateFormat).year) if year_text: year = etree.SubElement(tv_node, 'year') year.text = year_text @@ -148,7 +148,7 @@ def _show_data(self, series_obj): if getattr(my_show, 'id', None): indexer_id = etree.SubElement(tv_node, 'id') - indexer_id.text = str(my_show['id']) + indexer_id.text = text_type(my_show['id']) if getattr(my_show, 'genre', None) and isinstance(my_show['genre'], string_types): for genre in self._split_info(my_show['genre']): @@ -242,7 +242,7 @@ def _ep_data(self, ep_obj): return None if not getattr(my_ep, 'firstaired', None): - my_ep['firstaired'] = str(datetime.date.fromordinal(1)) + my_ep['firstaired'] = text_type(datetime.date.fromordinal(1)) if not getattr(my_ep, 'episodename', None): log.debug(u'Not generating nfo because the ep has no title') @@ -265,17 +265,17 @@ def _ep_data(self, ep_obj): showtitle.text = series_obj['seriesname'] season = etree.SubElement(episode, 'season') - season.text = str(ep_to_write.season) + season.text = text_type(ep_to_write.season) episodenum = etree.SubElement(episode, 'episode') - episodenum.text = str(ep_to_write.episode) + episodenum.text = text_type(ep_to_write.episode) uniqueid = etree.SubElement(episode, 'uniqueid') - uniqueid.text = str(ep_to_write.indexerid) + uniqueid.text = text_type(ep_to_write.indexerid) if ep_to_write.airdate != datetime.date.fromordinal(1): aired = etree.SubElement(episode, 'aired') - aired.text = str(ep_to_write.airdate) + aired.text = text_type(ep_to_write.airdate) if getattr(my_ep, 'overview', None): plot = etree.SubElement(episode, 'plot') @@ -283,7 +283,7 @@ def _ep_data(self, ep_obj): if ep_to_write.season and getattr(series_obj, 'runtime', None): runtime = etree.SubElement(episode, 'runtime') - runtime.text = str(series_obj['runtime']) + runtime.text = text_type(series_obj['runtime']) if getattr(my_ep, 'airsbefore_season', None): displayseason = etree.SubElement(episode, 'displayseason') @@ -302,7 +302,7 @@ def _ep_data(self, ep_obj): if getattr(my_ep, 'rating', None): rating = etree.SubElement(episode, 'rating') - rating.text = str(my_ep['rating']) + rating.text = text_type(my_ep['rating']) if getattr(my_ep, 'writer', None) and isinstance(my_ep['writer'], string_types): for writer in self._split_info(my_ep['writer']): diff --git a/medusa/name_parser/rules/rules.py b/medusa/name_parser/rules/rules.py index 7a434b6fdb..fe34920a80 100644 --- a/medusa/name_parser/rules/rules.py +++ b/medusa/name_parser/rules/rules.py @@ -984,6 +984,9 @@ def when(self, matches, context): return titles = matches.named('title') + if not titles: + return + first_title = titles[0] last_title = titles[-1] @@ -1080,13 +1083,20 @@ def when(self, matches, context): second_part = fileparts[parts_len - 2].value if self.ends_with_digit.search(second_part): title = matches.named('title') - if not title or second_part.startswith(title[0].value): + if not title: episode_title[0].name = 'title' to_append = episode_title - to_remove = title - + to_remove = None return to_remove, to_append + if second_part.startswith(title[0].value): + season = matches.named('season') + if season and not second_part.endswith(season[-1].initiator.value): + episode_title[0].name = 'title' + to_append = episode_title + to_remove = title + return to_remove, to_append + class FixMultipleSources(Rule): """Fix multiple sources. diff --git a/medusa/notifiers/__init__.py b/medusa/notifiers/__init__.py index 2c914056ab..56d845f118 100644 --- a/medusa/notifiers/__init__.py +++ b/medusa/notifiers/__init__.py @@ -6,6 +6,11 @@ import socket from medusa import app +from medusa.common import ( + NOTIFY_SNATCH, + NOTIFY_SNATCH_PROPER, + notifyStrings, +) from medusa.logger.adapters.style import BraceAdapter from medusa.notifiers import ( boxcar2, @@ -13,6 +18,7 @@ emby, freemobile, growl, + join, kodi, libnotify, nmj, @@ -54,6 +60,7 @@ boxcar2_notifier = boxcar2.Notifier() pushalot_notifier = pushalot.Notifier() pushbullet_notifier = pushbullet.Notifier() +join_notifier = join.Notifier() freemobile_notifier = freemobile.Notifier() telegram_notifier = telegram.Notifier() # social @@ -79,6 +86,7 @@ boxcar2_notifier, pushalot_notifier, pushbullet_notifier, + join_notifier, twitter_notifier, trakt_notifier, email_notifier, @@ -86,26 +94,37 @@ ] -def notify_download(ep_name): +def notify_download(ep_obj): for n in notifiers: try: - n.notify_download(ep_name) + n.notify_download(ep_obj) except (RequestException, socket.gaierror, socket.timeout) as error: log.debug(u'Unable to send download notification. Error: {0}', error.message) -def notify_subtitle_download(ep_name, lang): +def notify_subtitle_download(ep_obj, lang): for n in notifiers: try: - n.notify_subtitle_download(ep_name, lang) + n.notify_subtitle_download(ep_obj, lang) except (RequestException, socket.gaierror, socket.timeout) as error: - log.debug(u'Unable to send download notification. Error: {0}', error.message) + log.debug(u'Unable to send subtitle download notification. Error: {0}', error.message) + + +def notify_snatch(ep_obj, result): + ep_name = ep_obj.pretty_name_with_quality() + is_proper = bool(result.proper_tags) + title = notifyStrings[(NOTIFY_SNATCH, NOTIFY_SNATCH_PROPER)[is_proper]] + if all([app.SEEDERS_LEECHERS_IN_NOTIFY, result.seeders not in (-1, None), + result.leechers not in (-1, None)]): + message = u'{0} with {1} seeders and {2} leechers from {3}'.format( + ep_name, result.seeders, result.leechers, result.provider.name) + else: + message = u'{0} from {1}'.format(ep_name, result.provider.name) -def notify_snatch(ep_name, is_proper): for n in notifiers: try: - n.notify_snatch(ep_name, is_proper) + n.notify_snatch(title, message) except (RequestException, socket.gaierror, socket.timeout) as error: log.debug(u'Unable to send snatch notification. Error: {0}', error.message) diff --git a/medusa/notifiers/boxcar2.py b/medusa/notifiers/boxcar2.py index 6c22080fe6..a350e44212 100644 --- a/medusa/notifiers/boxcar2.py +++ b/medusa/notifiers/boxcar2.py @@ -56,21 +56,20 @@ def _send_boxcar2(self, msg, title, accesstoken): log.debug('Boxcar2 notification successful.') return True - def notify_snatch(self, ep_name, is_proper): + def notify_snatch(self, title, message): """Send the snatch message.""" - title = common.notifyStrings[(common.NOTIFY_SNATCH, common.NOTIFY_SNATCH_PROPER)[is_proper]] if app.BOXCAR2_NOTIFY_ONSNATCH: - self._notify_boxcar2(title, ep_name) + self._notify_boxcar2(title, message) - def notify_download(self, ep_name, title=common.notifyStrings[common.NOTIFY_DOWNLOAD]): + def notify_download(self, ep_obj, title=common.notifyStrings[common.NOTIFY_DOWNLOAD]): """Send the download message.""" if app.BOXCAR2_NOTIFY_ONDOWNLOAD: - self._notify_boxcar2(title, ep_name) + self._notify_boxcar2(title, ep_obj.pretty_name_with_quality()) - def notify_subtitle_download(self, ep_name, lang, title=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD]): + def notify_subtitle_download(self, ep_obj, lang, title=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD]): """Send the subtitle download message.""" if app.BOXCAR2_NOTIFY_ONSUBTITLEDOWNLOAD: - self._notify_boxcar2(title, ep_name + ': ' + lang) + self._notify_boxcar2(title, ep_obj.pretty_name() + ': ' + lang) def notify_git_update(self, new_version='??'): """Send update available message.""" diff --git a/medusa/notifiers/emailnotify.py b/medusa/notifiers/emailnotify.py index 436b1d7064..110df60ae2 100644 --- a/medusa/notifiers/emailnotify.py +++ b/medusa/notifiers/emailnotify.py @@ -13,7 +13,13 @@ from email.utils import formatdate from medusa import app, db -from medusa.helper.encoding import ss +from medusa.common import ( + NOTIFY_DOWNLOAD, + NOTIFY_GIT_UPDATE, + NOTIFY_LOGIN, + NOTIFY_SUBTITLE_DOWNLOAD, + notifyStrings, +) from medusa.logger.adapters.style import BraceAdapter log = BraceAdapter(logging.getLogger(__name__)) @@ -51,7 +57,7 @@ def test_notify(self, host, port, smtp_from, use_tls, user, pwd, to): """ msg = MIMEText('This is a test message from Medusa. If you\'re reading this, the test succeeded.') if app.EMAIL_SUBJECT: - msg['Subject'] = '[TEST] ' + app.EMAIL_SUBJECT + msg['Subject'] = '[TEST] {0}'.format(app.EMAIL_SUBJECT) else: msg['Subject'] = 'Medusa: Test Message' msg['From'] = smtp_from @@ -59,17 +65,14 @@ def test_notify(self, host, port, smtp_from, use_tls, user, pwd, to): msg['Date'] = formatdate(localtime=True) return self._sendmail(host, port, smtp_from, use_tls, user, pwd, [to], msg, True) - def notify_snatch(self, ep_name, is_proper, title='Snatched:'): + def notify_snatch(self, title, message): """ Send a notification that an episode was snatched. ep_name: The name of the episode that was snatched - title: The title of the notification (optional) """ - ep_name = ss(ep_name) - if app.USE_EMAIL and app.EMAIL_NOTIFY_ONSNATCH: - parsed = self._parse_name(ep_name) + parsed = self._parse_name(message) to = self._generate_recipients(parsed['show']) if not to: log.debug('Skipping email notify because there are no configured recipients') @@ -92,34 +95,36 @@ def notify_snatch(self, ep_name, is_proper, title='Snatched:'): except Exception: try: - msg = MIMEText(ep_name) + msg = MIMEText(message) except Exception: - msg = MIMEText('Episode Snatched') + msg = MIMEText(title) if app.EMAIL_SUBJECT: - msg['Subject'] = '[SN] ' + app.EMAIL_SUBJECT + msg['Subject'] = '{0}: {1}'.format(title, app.EMAIL_SUBJECT) else: - msg['Subject'] = 'Snatched: ' + ep_name + msg['Subject'] = '{0}: {1}'.format(title, message) msg['From'] = app.EMAIL_FROM msg['To'] = ','.join(to) msg['Date'] = formatdate(localtime=True) + if self._sendmail(app.EMAIL_HOST, app.EMAIL_PORT, app.EMAIL_FROM, app.EMAIL_TLS, app.EMAIL_USER, app.EMAIL_PASSWORD, to, msg): log.debug('Snatch notification sent to {recipient} for {episode}', - {'recipient': to, 'episode': ep_name}) + {'recipient': to, 'episode': message}) else: log.warning('Snatch notification error: {0}', self.last_err) - def notify_download(self, ep_name, title='Completed:'): + def notify_download(self, ep_obj, title='Completed:'): """ Send a notification that an episode was downloaded. ep_name: The name of the episode that was downloaded title: The title of the notification (optional) """ - ep_name = ss(ep_name) - if app.USE_EMAIL and app.EMAIL_NOTIFY_ONDOWNLOAD: + title = notifyStrings[NOTIFY_DOWNLOAD] + ep_name = ep_obj.pretty_name_with_quality() + parsed = self._parse_name(ep_name) to = self._generate_recipients(parsed['show']) if not to: @@ -145,15 +150,16 @@ def notify_download(self, ep_name, title='Completed:'): try: msg = MIMEText(ep_name) except Exception: - msg = MIMEText('Episode Downloaded') + msg = MIMEText(title) if app.EMAIL_SUBJECT: - msg['Subject'] = '[DL] ' + app.EMAIL_SUBJECT + msg['Subject'] = '{0}: {1}'.format(title, app.EMAIL_SUBJECT) else: - msg['Subject'] = 'Downloaded: ' + ep_name + msg['Subject'] = '{0}: {1}'.format(title, ep_name) msg['From'] = app.EMAIL_FROM msg['To'] = ','.join(to) msg['Date'] = formatdate(localtime=True) + if self._sendmail(app.EMAIL_HOST, app.EMAIL_PORT, app.EMAIL_FROM, app.EMAIL_TLS, app.EMAIL_USER, app.EMAIL_PASSWORD, to, msg): log.debug('Download notification sent to {recipient} for {episode}', @@ -161,16 +167,17 @@ def notify_download(self, ep_name, title='Completed:'): else: log.warning('Download notification error: {0}', self.last_err) - def notify_subtitle_download(self, ep_name, lang, title='Downloaded subtitle:'): + def notify_subtitle_download(self, ep_obj, lang): """ Send a notification that a subtitle was downloaded. ep_name: The name of the episode that was downloaded lang: Subtitle language wanted """ - ep_name = ss(ep_name) - if app.USE_EMAIL and app.EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD: + title = notifyStrings[NOTIFY_SUBTITLE_DOWNLOAD] + ep_name = ep_obj.pretty_name() + parsed = self._parse_name(ep_name) to = self._generate_recipients(parsed['show']) if not to: @@ -195,16 +202,17 @@ def notify_subtitle_download(self, ep_name, lang, title='Downloaded subtitle:'): 'html')) except Exception: try: - msg = MIMEText(ep_name + ': ' + lang) + msg = MIMEText('{0}: {1}'.format(ep_name, lang)) except Exception: - msg = MIMEText('Episode Subtitle Downloaded') + msg = MIMEText(title) if app.EMAIL_SUBJECT: - msg['Subject'] = '[ST] ' + app.EMAIL_SUBJECT + msg['Subject'] = '{0} [{1}]: {2}'.format(title, lang, app.EMAIL_SUBJECT) else: - msg['Subject'] = lang + ' Subtitle Downloaded: ' + ep_name + msg['Subject'] = '{0} [{1}]: {2}'.format(title, lang, ep_name) msg['From'] = app.EMAIL_FROM msg['To'] = ','.join(to) + if self._sendmail(app.EMAIL_HOST, app.EMAIL_PORT, app.EMAIL_FROM, app.EMAIL_TLS, app.EMAIL_USER, app.EMAIL_PASSWORD, to, msg): log.debug('Download notification sent to {recipient} for {episode}', @@ -219,6 +227,7 @@ def notify_git_update(self, new_version='??'): new_version: The commit Medusa was updated to """ if app.USE_EMAIL: + title = notifyStrings[NOTIFY_GIT_UPDATE] to = self._generate_recipients(None) if not to: log.debug('Skipping email notify because there are no configured recipients') @@ -238,12 +247,13 @@ def notify_git_update(self, new_version='??'): try: msg = MIMEText(new_version) except Exception: - msg = MIMEText('Medusa updated') + msg = MIMEText(title) - msg['Subject'] = 'Updated: {0}'.format(new_version) + msg['Subject'] = '{0}: {1}'.format(title, new_version) msg['From'] = app.EMAIL_FROM msg['To'] = ','.join(to) msg['Date'] = formatdate(localtime=True) + if self._sendmail(app.EMAIL_HOST, app.EMAIL_PORT, app.EMAIL_FROM, app.EMAIL_TLS, app.EMAIL_USER, app.EMAIL_PASSWORD, to, msg): log.debug('Update notification sent to {recipient}', @@ -258,6 +268,7 @@ def notify_login(self, ipaddress=''): ipaddress: The ip Medusa was logged into from """ if app.USE_EMAIL: + title = notifyStrings[NOTIFY_LOGIN] to = self._generate_recipients(None) if not to: log.debug('Skipping email notify because there are no configured recipients') @@ -277,12 +288,13 @@ def notify_login(self, ipaddress=''): try: msg = MIMEText(ipaddress) except Exception: - msg = MIMEText('Medusa Remote Login') + msg = MIMEText(title) - msg['Subject'] = 'New Login from IP: {0}'.format(ipaddress) + msg['Subject'] = '{0}: {1}'.format(title, ipaddress) msg['From'] = app.EMAIL_FROM msg['To'] = ','.join(to) msg['Date'] = formatdate(localtime=True) + if self._sendmail(app.EMAIL_HOST, app.EMAIL_PORT, app.EMAIL_FROM, app.EMAIL_TLS, app.EMAIL_USER, app.EMAIL_PASSWORD, to, msg): log.debug('Login notification sent to {recipient}', {'recipient': to}) @@ -371,8 +383,6 @@ def _sendmail(self, host, port, smtp_from, use_tls, user, pwd, to, msg, smtp_deb @classmethod def _parse_name(cls, ep_name): - ep_name = ss(ep_name) - # @TODO: Prone to issues, best solution is to have a dictionary passed to notifiers match = cls.name_pattern.match(ep_name) @@ -390,4 +400,5 @@ def _parse_name(cls, ep_name): log.debug('Email notifier parsed "{0}" into {1!r}', ep_name, result) + return result diff --git a/medusa/notifiers/freemobile.py b/medusa/notifiers/freemobile.py index e37058b16d..de7a3cbc84 100644 --- a/medusa/notifiers/freemobile.py +++ b/medusa/notifiers/freemobile.py @@ -12,8 +12,6 @@ NOTIFY_GIT_UPDATE_TEXT, NOTIFY_LOGIN, NOTIFY_LOGIN_TEXT, - NOTIFY_SNATCH, - NOTIFY_SNATCH_PROPER, NOTIFY_SUBTITLE_DOWNLOAD, notifyStrings, ) @@ -78,18 +76,17 @@ def _sendFreeMobileSMS(self, title, msg, cust_id=None, apiKey=None): log.info(message) return True, message - def notify_snatch(self, ep_name, is_proper): - title = notifyStrings[(NOTIFY_SNATCH, NOTIFY_SNATCH_PROPER)[is_proper]] + def notify_snatch(self, title, message): if app.FREEMOBILE_NOTIFY_ONSNATCH: - self._notifyFreeMobile(title, ep_name) + self._notifyFreeMobile(title, message) - def notify_download(self, ep_name, title=notifyStrings[NOTIFY_DOWNLOAD]): + def notify_download(self, ep_obj, title=notifyStrings[NOTIFY_DOWNLOAD]): if app.FREEMOBILE_NOTIFY_ONDOWNLOAD: - self._notifyFreeMobile(title, ep_name) + self._notifyFreeMobile(title, ep_obj.pretty_name_with_quality()) - def notify_subtitle_download(self, ep_name, lang, title=notifyStrings[NOTIFY_SUBTITLE_DOWNLOAD]): + def notify_subtitle_download(self, ep_obj, lang, title=notifyStrings[NOTIFY_SUBTITLE_DOWNLOAD]): if app.FREEMOBILE_NOTIFY_ONSUBTITLEDOWNLOAD: - self._notifyFreeMobile(title, ep_name + ': ' + lang) + self._notifyFreeMobile(title, ep_obj.pretty_name() + ': ' + lang) def notify_git_update(self, new_version='??'): if app.USE_FREEMOBILE: diff --git a/medusa/notifiers/growl.py b/medusa/notifiers/growl.py index f412d1ba49..f03319cc99 100644 --- a/medusa/notifiers/growl.py +++ b/medusa/notifiers/growl.py @@ -23,20 +23,17 @@ def test_notify(self, host, password): return self._sendGrowl('Test Growl', 'Testing Growl settings from Medusa', 'Test', host, password, force=True) - def notify_snatch(self, ep_name, is_proper): + def notify_snatch(self, title, message): if app.GROWL_NOTIFY_ONSNATCH: - self._sendGrowl( - common.notifyStrings[ - (common.NOTIFY_SNATCH, common.NOTIFY_SNATCH_PROPER)[is_proper] - ], ep_name) + self._sendGrowl(title, message) - def notify_download(self, ep_name): + def notify_download(self, ep_obj): if app.GROWL_NOTIFY_ONDOWNLOAD: - self._sendGrowl(common.notifyStrings[common.NOTIFY_DOWNLOAD], ep_name) + self._sendGrowl(common.notifyStrings[common.NOTIFY_DOWNLOAD], ep_obj.pretty_name_with_quality()) - def notify_subtitle_download(self, ep_name, lang): + def notify_subtitle_download(self, ep_obj, lang): if app.GROWL_NOTIFY_ONSUBTITLEDOWNLOAD: - self._sendGrowl(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_name + ': ' + lang) + self._sendGrowl(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_obj.pretty_name() + ': ' + lang) def notify_git_update(self, new_version='??'): update_text = common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT] @@ -69,7 +66,7 @@ def _send_growl(self, options, message=None): if message: notice.add_header('Notification-Text', message) - response = self._send(options['host'], options['port'], notice.encode('utf-8'), options['debug']) + response = self._send(options['host'], options['port'], notice.encode(), options['debug']) return True if isinstance(response, gntp.core.GNTPOK) else False @staticmethod @@ -184,7 +181,7 @@ def _sendRegistration(self, host=None, password=None): register.set_password(opts['password']) try: - return self._send(opts['host'], opts['port'], register.encode('utf-8'), opts['debug']) + return self._send(opts['host'], opts['port'], register.encode(), opts['debug']) except Exception as error: log.warning( u'GROWL: Unable to send growl to {host}:{port} - {msg!r}', diff --git a/medusa/notifiers/join.py b/medusa/notifiers/join.py new file mode 100644 index 0000000000..88cd9e546e --- /dev/null +++ b/medusa/notifiers/join.py @@ -0,0 +1,105 @@ +# coding=utf-8 +# Author: Kevin Ould email: ouldsmobile1@gmail.com +"""Adds Join Notifications.""" +from __future__ import unicode_literals + +import logging +from builtins import object + +from medusa import app, common +from medusa.logger.adapters.style import BraceAdapter +from medusa.session.core import MedusaSession + +import requests + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + + +class Notifier(object): + """Notifier class for Join.""" + + def __init__(self): + """Init method.""" + self.session = MedusaSession() + self.url = 'https://joinjoaomgcd.appspot.com/_ah/api/messaging/v1/sendPush?' + + def test_notify(self, join_api, join_device): + """Sends test notification from config screen.""" + log.debug('Sending a test Join notification.') + return self._sendjoin( + title='Test', + message='Testing Join settings from Medusa', + join_api=join_api, + join_device=join_device, + force=True + ) + + def notify_snatch(self, title, message): + """Send Join notification when nzb snatched if selected in config.""" + if app.JOIN_NOTIFY_ONSNATCH: + self._sendjoin( + title=title, + message=message + ) + + def notify_download(self, ep_obj): + """Send Join notification when nzb download completed if selected in config.""" + if app.JOIN_NOTIFY_ONDOWNLOAD: + self._sendjoin( + title=common.notifyStrings[common.NOTIFY_DOWNLOAD], + message=ep_obj.pretty_name_with_quality() + ) + + def notify_subtitle_download(self, ep_obj, lang): + """Send Join notification when subtitles downloaded if selected in config.""" + if app.JOIN_NOTIFY_ONSUBTITLEDOWNLOAD: + self._sendjoin( + title=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], + message=ep_obj.pretty_name() + ': ' + lang + ) + + def notify_git_update(self, new_version='??'): + """Send Join notification when new version available from git.""" + self._sendjoin( + title=common.notifyStrings[common.NOTIFY_GIT_UPDATE], + message=common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT] + new_version, + ) + + def notify_login(self, ipaddress=''): + """Send Join notification when login detected.""" + self._sendjoin( + title=common.notifyStrings[common.NOTIFY_LOGIN], + message=common.notifyStrings[common.NOTIFY_LOGIN_TEXT].format(ipaddress) + ) + + def _sendjoin(self, title, message, join_api=None, join_device=None, force=False): + """Compose and send Join notification.""" + push_result = {'success': False, 'error': ''} + + if not (app.USE_JOIN or force): + return False + + join_api = join_api or app.JOIN_API + join_device = join_device or app.JOIN_DEVICE + icon_url = 'https://cdn.pymedusa.com/images/ico/favicon-310.png' + + post_data = {'title': title, 'text': message, 'deviceId': join_device, 'apikey': join_api, 'icon': icon_url} + + r = requests.get(self.url, params=post_data) + try: + response = r.json() + except ValueError: + log.warning('Join notification failed. Could not parse join response.') + push_result['error'] = 'Join notification failed. Could not parse join response.' + return push_result + + failed = response.pop('errorMessage', {}) + if failed: + log.warning('Join notification failed: {0}', failed.get('message')) + push_result['error'] = 'Join notification failed: {0}'.format(failed.get('message')) + else: + log.debug('Join notification sent.') + push_result['success'] = True + + return push_result diff --git a/medusa/notifiers/kodi.py b/medusa/notifiers/kodi.py index 1dadea4a8f..2ebe5056d9 100644 --- a/medusa/notifiers/kodi.py +++ b/medusa/notifiers/kodi.py @@ -65,7 +65,7 @@ def _get_kodi_version(self, host, username, password, dest_app='KODI'): else: return False - def _notify_kodi(self, message, title='Medusa', host=None, username=None, password=None, + def _notify_kodi(self, title, message, host=None, username=None, password=None, force=False, dest_app='KODI'): """Private wrapper for the notify_snatch and notify_download functions. @@ -421,39 +421,38 @@ def _update_library(self, host=None, series_name=None): # pylint: disable=too-m # Public functions which will call the JSON or Legacy HTTP API methods ############################################################################## - def notify_snatch(self, ep_name, is_proper): + def notify_snatch(self, title, message): """Send the snatch message.""" if app.KODI_NOTIFY_ONSNATCH: - self._notify_kodi(ep_name, common.notifyStrings[(common.NOTIFY_SNATCH, - common.NOTIFY_SNATCH_PROPER)[is_proper]]) + self._notify_kodi(title, message) - def notify_download(self, ep_name): + def notify_download(self, ep_obj): """Send the download message.""" if app.KODI_NOTIFY_ONDOWNLOAD: - self._notify_kodi(ep_name, common.notifyStrings[common.NOTIFY_DOWNLOAD]) + self._notify_kodi(common.notifyStrings[common.NOTIFY_DOWNLOAD], ep_obj.pretty_name_with_quality()) - def notify_subtitle_download(self, ep_name, lang): + def notify_subtitle_download(self, ep_obj, lang): """Send the subtitle download message.""" if app.KODI_NOTIFY_ONSUBTITLEDOWNLOAD: - self._notify_kodi(ep_name + ': ' + lang, common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD]) + self._notify_kodi(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_obj.pretty_name() + ': ' + lang) def notify_git_update(self, new_version='??'): """Send update available message.""" if app.USE_KODI: update_text = common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT] title = common.notifyStrings[common.NOTIFY_GIT_UPDATE] - self._notify_kodi(update_text + new_version, title) + self._notify_kodi(title, update_text + new_version) def notify_login(self, ipaddress=''): """Send the new login message.""" if app.USE_KODI: update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] title = common.notifyStrings[common.NOTIFY_LOGIN] - self._notify_kodi(update_text.format(ipaddress), title) + self._notify_kodi(title, update_text.format(ipaddress)) def test_notify(self, host, username, password): """Test notifier.""" - return self._notify_kodi('Testing KODI notifications from Medusa', 'Test Notification', host, username, + return self._notify_kodi('Test Notification', 'Testing KODI notifications from Medusa', host, username, password, force=True) def update_library(self, series_name=None): diff --git a/medusa/notifiers/libnotify.py b/medusa/notifiers/libnotify.py index f049c1e00f..9020a4dd4a 100644 --- a/medusa/notifiers/libnotify.py +++ b/medusa/notifiers/libnotify.py @@ -73,17 +73,17 @@ def init_notify(self): self.gobject = GObject return True - def notify_snatch(self, ep_name, is_proper): + def notify_snatch(self, title, message): if app.LIBNOTIFY_NOTIFY_ONSNATCH: - self._notify(common.notifyStrings[(common.NOTIFY_SNATCH, common.NOTIFY_SNATCH_PROPER)[is_proper]], ep_name) + self._notify(title, message) - def notify_download(self, ep_name): + def notify_download(self, ep_obj): if app.LIBNOTIFY_NOTIFY_ONDOWNLOAD: - self._notify(common.notifyStrings[common.NOTIFY_DOWNLOAD], ep_name) + self._notify(common.notifyStrings[common.NOTIFY_DOWNLOAD], ep_obj.pretty_name_with_quality()) - def notify_subtitle_download(self, ep_name, lang): + def notify_subtitle_download(self, ep_obj, lang): if app.LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD: - self._notify(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_name + ': ' + lang) + self._notify(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_obj.pretty_name() + ': ' + lang) def notify_git_update(self, new_version='??'): if app.USE_LIBNOTIFY: diff --git a/medusa/notifiers/nmj.py b/medusa/notifiers/nmj.py index bf6ab7df1b..6338048e1c 100644 --- a/medusa/notifiers/nmj.py +++ b/medusa/notifiers/nmj.py @@ -74,15 +74,15 @@ def notify_settings(self, host): return True - def notify_snatch(self, ep_name, is_proper): + def notify_snatch(self, title, message): return False # Not implemented: Start the scanner when snatched does not make any sense - def notify_download(self, ep_name): + def notify_download(self, ep_obj): if app.USE_NMJ: self._notifyNMJ() - def notify_subtitle_download(self, ep_name, lang): + def notify_subtitle_download(self, ep_obj, lang): if app.USE_NMJ: self._notifyNMJ() diff --git a/medusa/notifiers/nmjv2.py b/medusa/notifiers/nmjv2.py index 45e75c3f89..930eb880b8 100644 --- a/medusa/notifiers/nmjv2.py +++ b/medusa/notifiers/nmjv2.py @@ -22,21 +22,21 @@ class Notifier(object): - def notify_snatch(self, ep_name, is_proper): # pylint: disable=unused-argument + def notify_snatch(self, title, message): return False # Not implemented: Start the scanner when snatched does not make any sense - def notify_download(self, ep_name): # pylint: disable=unused-argument + def notify_download(self, ep_obj): self._notifyNMJ() - def notify_subtitle_download(self, ep_name, lang): # pylint: disable=unused-argument + def notify_subtitle_download(self, ep_obj, lang): self._notifyNMJ() - def notify_git_update(self, new_version): # pylint: disable=unused-argument + def notify_git_update(self, new_version): return False # Not implemented, no reason to start scanner. - def notify_login(self, ipaddress=''): # pylint: disable=unused-argument + def notify_login(self, ipaddress=''): return False def test_notify(self, host): diff --git a/medusa/notifiers/plex.py b/medusa/notifiers/plex.py index 1df32717aa..66c37d6aac 100644 --- a/medusa/notifiers/plex.py +++ b/medusa/notifiers/plex.py @@ -37,7 +37,7 @@ def __init__(self): }) @staticmethod - def _notify_pht(message, title='Medusa', host=None, username=None, password=None, force=False): # pylint: disable=too-many-arguments + def _notify_pht(title, message, host=None, username=None, password=None, force=False): # pylint: disable=too-many-arguments """Internal wrapper for the notify_snatch and notify_download functions Args: @@ -68,35 +68,35 @@ def _notify_pht(message, title='Medusa', host=None, username=None, password=None # Public functions ############################################################################## - def notify_snatch(self, ep_name, is_proper): + def notify_snatch(self, title, message): if app.PLEX_NOTIFY_ONSNATCH: - self._notify_pht(ep_name, common.notifyStrings[(common.NOTIFY_SNATCH, common.NOTIFY_SNATCH_PROPER)[is_proper]]) + self._notify_pht(title, message) - def notify_download(self, ep_name): + def notify_download(self, ep_obj): if app.PLEX_NOTIFY_ONDOWNLOAD: - self._notify_pht(ep_name, common.notifyStrings[common.NOTIFY_DOWNLOAD]) + self._notify_pht(common.notifyStrings[common.NOTIFY_DOWNLOAD], ep_obj.pretty_name_with_quality()) - def notify_subtitle_download(self, ep_name, lang): + def notify_subtitle_download(self, ep_obj, lang): if app.PLEX_NOTIFY_ONSUBTITLEDOWNLOAD: - self._notify_pht(ep_name + ': ' + lang, common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD]) + self._notify_pht(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_obj.pretty_name() + ': ' + lang) def notify_git_update(self, new_version='??'): if app.NOTIFY_ON_UPDATE: update_text = common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT] title = common.notifyStrings[common.NOTIFY_GIT_UPDATE] if update_text and title and new_version: - self._notify_pht(update_text + new_version, title) + self._notify_pht(title, update_text + new_version) def notify_login(self, ipaddress=''): if app.NOTIFY_ON_LOGIN: update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] title = common.notifyStrings[common.NOTIFY_LOGIN] if update_text and title and ipaddress: - self._notify_pht(update_text.format(ipaddress), title) + self._notify_pht(title, update_text.format(ipaddress)) def test_notify_pht(self, host, username, password): - return self._notify_pht('This is a test notification from Medusa', - 'Test Notification', host, username, password, force=True) + return self._notify_pht('Test Notification', 'This is a test notification from Medusa', + host, username, password, force=True) def test_notify_pms(self, host, username, password, plex_server_token): return self.update_library(hosts=host, username=username, password=password, diff --git a/medusa/notifiers/prowl.py b/medusa/notifiers/prowl.py index 81794f4963..f7bfcc8283 100644 --- a/medusa/notifiers/prowl.py +++ b/medusa/notifiers/prowl.py @@ -5,11 +5,9 @@ import ast import logging import socket -import time from builtins import object from medusa import app, common, db -from medusa.helper.encoding import ss from medusa.logger.adapters.style import BraceAdapter from requests.compat import urlencode @@ -32,20 +30,20 @@ class Notifier(object): def test_notify(self, prowl_api, prowl_priority): return self._send_prowl(prowl_api, prowl_priority, event='Test', message='Testing Prowl settings from Medusa', force=True) - def notify_snatch(self, ep_name, is_proper): - ep_name = ss(ep_name) + def notify_snatch(self, title, message): if app.PROWL_NOTIFY_ONSNATCH: - show = self._parse_episode(ep_name) + show = self._parse_episode(message) recipients = self._generate_recipients(show) if not recipients: log.debug('Skipping prowl notify because there are no configured recipients') else: for api in recipients: - self._send_prowl(prowl_api=api, prowl_priority=None, event=common.notifyStrings[(common.NOTIFY_SNATCH, common.NOTIFY_SNATCH_PROPER)[is_proper]], - message=ep_name + ' :: ' + time.strftime(app.DATE_PRESET + ' ' + app.TIME_PRESET)) + self._send_prowl(prowl_api=api, prowl_priority=None, + event=title, + message=message) - def notify_download(self, ep_name): - ep_name = ss(ep_name) + def notify_download(self, ep_obj): + ep_name = ep_obj.pretty_name_with_quality() if app.PROWL_NOTIFY_ONDOWNLOAD: show = self._parse_episode(ep_name) recipients = self._generate_recipients(show) @@ -53,11 +51,12 @@ def notify_download(self, ep_name): log.debug('Skipping prowl notify because there are no configured recipients') else: for api in recipients: - self._send_prowl(prowl_api=api, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], - message=ep_name + ' :: ' + time.strftime(app.DATE_PRESET + ' ' + app.TIME_PRESET)) + self._send_prowl(prowl_api=api, prowl_priority=None, + event=common.notifyStrings[common.NOTIFY_DOWNLOAD], + message=ep_name) - def notify_subtitle_download(self, ep_name, lang): - ep_name = ss(ep_name) + def notify_subtitle_download(self, ep_obj, lang): + ep_name = ep_obj.pretty_name() if app.PROWL_NOTIFY_ONSUBTITLEDOWNLOAD: show = self._parse_episode(ep_name) recipients = self._generate_recipients(show) @@ -65,8 +64,9 @@ def notify_subtitle_download(self, ep_name, lang): log.debug('Skipping prowl notify because there are no configured recipients') else: for api in recipients: - self._send_prowl(prowl_api=api, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], - message=ep_name + ' [' + lang + '] :: ' + time.strftime(app.DATE_PRESET + ' ' + app.TIME_PRESET)) + self._send_prowl(prowl_api=api, prowl_priority=None, + event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], + message=ep_name + ' [' + lang + ']') def notify_git_update(self, new_version='??'): if app.USE_PROWL: @@ -157,10 +157,9 @@ def _send_prowl(prowl_api=None, prowl_priority=None, event=None, message=None, f @staticmethod def _parse_episode(ep_name): - ep_name = ss(ep_name) - sep = ' - ' titles = ep_name.split(sep) titles.sort(key=len, reverse=True) log.debug('TITLES: {0}', titles) + return titles diff --git a/medusa/notifiers/pushalot.py b/medusa/notifiers/pushalot.py index 4123044b95..b5b0c6bd3f 100644 --- a/medusa/notifiers/pushalot.py +++ b/medusa/notifiers/pushalot.py @@ -25,28 +25,28 @@ def test_notify(self, pushalot_authorizationtoken): force=True ) - def notify_snatch(self, ep_name, is_proper): + def notify_snatch(self, title, message): if app.PUSHALOT_NOTIFY_ONSNATCH: self._sendPushalot( pushalot_authorizationtoken=None, - event=common.notifyStrings[(common.NOTIFY_SNATCH, common.NOTIFY_SNATCH_PROPER)[is_proper]], - message=ep_name + event=title, + message=message ) - def notify_download(self, ep_name): + def notify_download(self, ep_obj): if app.PUSHALOT_NOTIFY_ONDOWNLOAD: self._sendPushalot( pushalot_authorizationtoken=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], - message=ep_name + message=ep_obj.pretty_name_with_quality() ) - def notify_subtitle_download(self, ep_name, lang): + def notify_subtitle_download(self, ep_obj, lang): if app.PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD: self._sendPushalot( pushalot_authorizationtoken=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], - message='{}:{}'.format(ep_name, lang) + message='{}:{}'.format(ep_obj.pretty_name(), lang) ) def notify_git_update(self, new_version='??'): diff --git a/medusa/notifiers/pushbullet.py b/medusa/notifiers/pushbullet.py index 691fb96ac5..e3e7440a41 100644 --- a/medusa/notifiers/pushbullet.py +++ b/medusa/notifiers/pushbullet.py @@ -41,28 +41,28 @@ def get_devices(self, pushbullet_api): except ValueError: return {} - def notify_snatch(self, ep_name, is_proper): + def notify_snatch(self, title, message): if app.PUSHBULLET_NOTIFY_ONSNATCH: self._sendPushbullet( pushbullet_api=None, - event=common.notifyStrings[(common.NOTIFY_SNATCH, common.NOTIFY_SNATCH_PROPER)[is_proper]] + ' : ' + ep_name, - message=ep_name + event=title, + message=message ) - def notify_download(self, ep_name): + def notify_download(self, ep_obj): if app.PUSHBULLET_NOTIFY_ONDOWNLOAD: self._sendPushbullet( pushbullet_api=None, - event=common.notifyStrings[common.NOTIFY_DOWNLOAD] + ' : ' + ep_name, - message=ep_name + event=common.notifyStrings[common.NOTIFY_DOWNLOAD] + ': ' + ep_obj.pretty_name_with_quality(), + message=ep_obj.pretty_name_with_quality() ) - def notify_subtitle_download(self, ep_name, lang): + def notify_subtitle_download(self, ep_obj, lang): if app.PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD: self._sendPushbullet( pushbullet_api=None, - event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD] + ' : ' + ep_name + ' : ' + lang, - message=ep_name + ': ' + lang + event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD] + ': ' + ep_obj.pretty_name() + ': ' + lang, + message=ep_obj.pretty_name() + ': ' + lang ) def notify_git_update(self, new_version='??'): diff --git a/medusa/notifiers/pushover.py b/medusa/notifiers/pushover.py index cfe688e0c7..352cda24aa 100644 --- a/medusa/notifiers/pushover.py +++ b/medusa/notifiers/pushover.py @@ -1,10 +1,10 @@ # coding=utf-8 +"""Pushover notifier module.""" from __future__ import unicode_literals import logging import time -from builtins import object from medusa import app from medusa.common import ( @@ -13,18 +13,14 @@ NOTIFY_GIT_UPDATE_TEXT, NOTIFY_LOGIN, NOTIFY_LOGIN_TEXT, - NOTIFY_SNATCH, - NOTIFY_SNATCH_PROPER, NOTIFY_SUBTITLE_DOWNLOAD, notifyStrings, ) -from medusa.helper.exceptions import ex from medusa.logger.adapters.style import BraceAdapter from requests.compat import urlencode from six.moves.http_client import HTTPSConnection -from six.moves.urllib.error import HTTPError log = BraceAdapter(logging.getLogger(__name__)) log.logger.addHandler(logging.NullHandler()) @@ -33,149 +29,177 @@ class Notifier(object): + """Pushover notifier class.""" + def __init__(self): + """Initialize Pushover notifier.""" pass - def test_notify(self, userKey=None, apiKey=None): - return self._notifyPushover('This is a test notification from Medusa', 'Test', userKey=userKey, apiKey=apiKey, force=True) - - def _sendPushover(self, msg, title, sound=None, userKey=None, apiKey=None): + def test_notify(self, user_key=None, api_key=None): """ - Sends a pushover notification to the address provided + Send a test notification. - msg: The message to send (unicode) - title: The title of the message - sound: The notification sound to use - userKey: The pushover user id to send the message to (or to subscribe with) - apiKey: The pushover api key to use - returns: True if the message succeeded, False otherwise + :return: True for no issue or False if there was an error """ + return self._notify_pushover('This is a test notification from Medusa', 'Test', user_key=user_key, api_key=api_key, force=True) - if userKey is None: - userKey = app.PUSHOVER_USERKEY + def _send_pushover(self, msg, title, sound=None, user_key=None, api_key=None, priority=None): + """ + Send a pushover notification to the address provided. + + :param msg: The message to send (unicode) + :param title: The title of the message + :param sound: The notification sound to use + :param user_key: The pushover user id to send the message to (or to subscribe with) + :param api_key: The pushover api key to use + :param priority: The pushover priority to use + :return: True if the message succeeded, False otherwise + """ + if user_key is None: + user_key = app.PUSHOVER_USERKEY - if apiKey is None: - apiKey = app.PUSHOVER_APIKEY + if api_key is None: + api_key = app.PUSHOVER_APIKEY if sound is None: sound = app.PUSHOVER_SOUND - log.debug(u'Pushover API KEY in use: {0}', apiKey) + if priority is None: + priority = app.PUSHOVER_PRIORITY # build up the URL and parameters msg = msg.strip() - # send the request to pushover - try: - if app.PUSHOVER_SOUND != 'default': - args = { - 'token': apiKey, - 'user': userKey, - 'title': title.encode('utf-8'), - 'message': msg.encode('utf-8'), - 'timestamp': int(time.time()), - 'retry': 60, - 'expire': 3600, - 'sound': sound, - } - else: - # sound is default, so don't send it - args = { - 'token': apiKey, - 'user': userKey, - 'title': title.encode('utf-8'), - 'message': msg.encode('utf-8'), - 'timestamp': int(time.time()), - 'retry': 60, - 'expire': 3600, - } - - if app.PUSHOVER_DEVICE: - args['device'] = ','.join(app.PUSHOVER_DEVICE) - - conn = HTTPSConnection('api.pushover.net:443') - conn.request('POST', '/1/messages.json', - urlencode(args), {'Content-type': 'application/x-www-form-urlencoded'}) - - except HTTPError as e: - # if we get an error back that doesn't have an error code then who knows what's really happening - if not hasattr(e, 'code'): - log.error(u'Pushover notification failed. {}', ex(e)) - return False + # default args + args = { + 'token': api_key, + 'user': user_key, + 'title': title.encode('utf-8'), + 'message': msg.encode('utf-8'), + 'timestamp': int(time.time()), + 'retry': 60, + 'expire': 3600, + 'priority': priority, + } + + # If sound is not default, add it. + if sound != 'default': + args['sound'] = sound + + if app.PUSHOVER_DEVICE: + args['device'] = ','.join(app.PUSHOVER_DEVICE) + + log.debug('PUSHOVER: Sending notice with details: title="{0}" message="{1}", priority={2}, sound={3}', + args['title'], args['message'], priority, sound) + + conn = HTTPSConnection('api.pushover.net:443') + conn.request('POST', '/1/messages.json', + urlencode(args), {'Content-type': 'application/x-www-form-urlencoded'}) + conn_resp = conn.getresponse() + + if conn_resp.status == 200: + log.info('Pushover notification successful.') + return True + + # HTTP status 404 if the provided email address isn't a Pushover user. + elif conn_resp.status == 404: + log.warning('Username is wrong/not a pushover email. Pushover will send an email to it') + return False + + # For HTTP status code 401's, it is because you are passing in either an invalid token, or the user has not added your service. + elif conn_resp.status == 401: + # HTTP status 401 if the user doesn't have the service added + subscribe_note = self._send_pushover(msg, title, sound=sound, user_key=user_key, api_key=api_key) + if subscribe_note: + log.debug('Subscription sent') + return True else: - log.error(u'Pushover notification failed. Error code: {0}', e.code) - - # HTTP status 404 if the provided email address isn't a Pushover user. - if e.code == 404: - log.warning(u'Username is wrong/not a pushover email. Pushover will send an email to it') - return False - - # For HTTP status code 401's, it is because you are passing in either an invalid token, or the user has not added your service. - elif e.code == 401: - - # HTTP status 401 if the user doesn't have the service added - subscribeNote = self._sendPushover(msg, title, sound=sound, userKey=userKey, apiKey=apiKey) - if subscribeNote: - log.debug(u'Subscription sent') - return True - else: - log.error(u'Subscription could not be sent') - return False - - # If you receive an HTTP status code of 400, it is because you failed to send the proper parameters - elif e.code == 400: - log.error(u'Wrong data sent to pushover') - return False - - # If you receive a HTTP status code of 429, it is because the message limit has been reached (free limit is 7,500) - elif e.code == 429: - log.error(u'Pushover API message limit reached - try a different API key') - return False - - log.info(u'Pushover notification successful.') - return True - - def notify_snatch(self, ep_name, is_proper): - title=notifyStrings[(NOTIFY_SNATCH, NOTIFY_SNATCH_PROPER)[is_proper]] + log.error('Subscription could not be sent') + + # If you receive an HTTP status code of 400, it is because you failed to send the proper parameters + elif conn_resp.status == 400: + log.error('Wrong keys sent to pushover') + return False + + # If you receive a HTTP status code of 429, it is because the message limit has been reached (free limit is 7,500) + elif conn_resp.status == 429: + log.error('Pushover API message limit reached - try a different API key') + return False + + # Something else has gone wrong... who knows what's really happening + else: + log.error('Pushover notification failed. HTTP response code: {0}', conn_resp.status) + return False + + def notify_snatch(self, title, message): + """ + Send a notification that an episode was snatched. + + :param ep_obj: The object of the episode snatched + :param is_proper: Boolean. If snatch is proper or not + """ if app.PUSHOVER_NOTIFY_ONSNATCH: - self._notifyPushover(title, ep_name) + self._notify_pushover(title, message) + + def notify_download(self, ep_obj, title=notifyStrings[NOTIFY_DOWNLOAD]): + """ + Send a notification that an episode was downloaded. - def notify_download(self, ep_name, title=notifyStrings[NOTIFY_DOWNLOAD]): + :param ep_obj: The object of the episode downloaded + :param title: The title of the notification to send + """ if app.PUSHOVER_NOTIFY_ONDOWNLOAD: - self._notifyPushover(title, ep_name) + self._notify_pushover(title, ep_obj.pretty_name_with_quality()) - def notify_subtitle_download(self, ep_name, lang, title=notifyStrings[NOTIFY_SUBTITLE_DOWNLOAD]): + def notify_subtitle_download(self, ep_obj, lang, title=notifyStrings[NOTIFY_SUBTITLE_DOWNLOAD]): + """ + Send a notification that subtitles for an episode were downloaded. + + :param ep_obj: The object of the episode subtitles were downloaded for + :param lang: The language of the downloaded subtitles + :param title: The title of the notification to send + """ if app.PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD: - self._notifyPushover(title, ep_name + ': ' + lang) + self._notify_pushover(title, ep_obj.pretty_name() + ': ' + lang) def notify_git_update(self, new_version='??'): + """ + Send a notification that Medusa was updated. + + :param new_version: The commit Medusa was updated to + """ if app.USE_PUSHOVER: update_text = notifyStrings[NOTIFY_GIT_UPDATE_TEXT] title = notifyStrings[NOTIFY_GIT_UPDATE] - self._notifyPushover(title, update_text + new_version) + self._notify_pushover(title, update_text + new_version) def notify_login(self, ipaddress=''): + """ + Send a notification that Medusa was logged into remotely. + + :param ipaddress: The IP address Medusa was logged into from + """ if app.USE_PUSHOVER: update_text = notifyStrings[NOTIFY_LOGIN_TEXT] title = notifyStrings[NOTIFY_LOGIN] - self._notifyPushover(title, update_text.format(ipaddress)) + self._notify_pushover(title, update_text.format(ipaddress)) - def _notifyPushover(self, title, message, sound=None, userKey=None, apiKey=None, force=False): + def _notify_pushover(self, title, message, sound=None, user_key=None, api_key=None, priority=None, force=False): """ - Sends a pushover notification based on the provided info or Medusa config - - title: The title of the notification to send - message: The message string to send - sound: The notification sound to use - userKey: The userKey to send the notification to - apiKey: The apiKey to use to send the notification - force: Enforce sending, for instance for testing + Send a pushover notification based on the provided info or Medusa config. + + :param title: The title of the notification to send + :param message: The message string to send + :param sound: The notification sound to use + :param user_key: The userKey to send the notification to + :param api_key: The apiKey to use to send the notification + :param priority: The pushover priority to use + :param force: Enforce sending, for instance for testing """ - if not app.USE_PUSHOVER and not force: - log.debug(u'Notification for Pushover not enabled, skipping this notification') + log.debug('Notification for Pushover not enabled, skipping this notification') return False - log.debug(u'Sending notification for {0}', message) + log.debug('Sending notification for {0}', message) - return self._sendPushover(message, title, sound=sound, userKey=userKey, apiKey=apiKey) + return self._send_pushover(message, title, sound=sound, user_key=user_key, api_key=api_key, priority=priority) diff --git a/medusa/notifiers/pytivo.py b/medusa/notifiers/pytivo.py index 1d4b0afeb6..498c16fdaf 100644 --- a/medusa/notifiers/pytivo.py +++ b/medusa/notifiers/pytivo.py @@ -20,13 +20,13 @@ class Notifier(object): - def notify_snatch(self, ep_name, is_proper): + def notify_snatch(self, title, message): pass - def notify_download(self, ep_name): + def notify_download(self, ep_obj): pass - def notify_subtitle_download(self, ep_name, lang): + def notify_subtitle_download(self, ep_obj, lang): pass def notify_git_update(self, new_version): diff --git a/medusa/notifiers/slack.py b/medusa/notifiers/slack.py index a3a3bfa798..db925c75a6 100644 --- a/medusa/notifiers/slack.py +++ b/medusa/notifiers/slack.py @@ -21,7 +21,7 @@ class Notifier(object): """Slack notifier class.""" - def notify_snatch(self, ep_name, is_proper): + def notify_snatch(self, title, message): """ Send a notification to a Slack channel when an episode is snatched. @@ -29,10 +29,10 @@ def notify_snatch(self, ep_name, is_proper): :param is_proper: Boolean. If snatch is proper or not """ if app.SLACK_NOTIFY_SNATCH: - message = common.notifyStrings[(common.NOTIFY_SNATCH, common.NOTIFY_SNATCH_PROPER)[is_proper]] - self._notify_slack('{message} : {ep_name}'.format(message=message, ep_name=ep_name)) + self._notify_slack('{title}: {message}'.format(title=title, + message=message)) - def notify_download(self, ep_name): + def notify_download(self, ep_obj): """ Send a notification to a slack channel when an episode is downloaded. @@ -40,9 +40,10 @@ def notify_download(self, ep_name): """ if app.SLACK_NOTIFY_DOWNLOAD: message = common.notifyStrings[common.NOTIFY_DOWNLOAD] - self._notify_slack('{message} : {ep_name}'.format(message=message, ep_name=ep_name)) + self._notify_slack('{message}: {ep_name}'.format(message=message, + ep_name=ep_obj.pretty_name_with_quality())) - def notify_subtitle_download(self, ep_name, lang): + def notify_subtitle_download(self, ep_obj, lang): """ Send a notification to a Slack channel when subtitles for an episode are downloaded. @@ -51,7 +52,8 @@ def notify_subtitle_download(self, ep_name, lang): """ if app.SLACK_NOTIFY_SUBTITLEDOWNLOAD: message = common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD] - self._notify_slack('{message} {ep_name}: {lang}'.format(message=message, ep_name=ep_name, lang=lang)) + self._notify_slack('{message} {ep_name}: {lang}'.format(message=message, ep_name=ep_obj.pretty_name(), + lang=lang)) def notify_git_update(self, new_version='??'): """ @@ -62,7 +64,8 @@ def notify_git_update(self, new_version='??'): if app.USE_SLACK: message = common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT] title = common.notifyStrings[common.NOTIFY_GIT_UPDATE] - self._notify_slack('{title} - {message} {version}'.format(title=title, message=message, version=new_version)) + self._notify_slack('{title} - {message} {version}'.format(title=title, message=message, + version=new_version)) def notify_login(self, ipaddress=''): """ diff --git a/medusa/notifiers/synoindex.py b/medusa/notifiers/synoindex.py index 7f1f24a83d..5ead8707dd 100644 --- a/medusa/notifiers/synoindex.py +++ b/medusa/notifiers/synoindex.py @@ -16,13 +16,13 @@ class Notifier(object): - def notify_snatch(self, ep_name, is_proper): + def notify_snatch(self, title, message): pass - def notify_download(self, ep_name): + def notify_download(self, ep_obj): pass - def notify_subtitle_download(self, ep_name, lang): + def notify_subtitle_download(self, ep_obj, lang): pass def notify_git_update(self, new_version): diff --git a/medusa/notifiers/synology_notifier.py b/medusa/notifiers/synology_notifier.py index 51a47645bf..614b936e95 100644 --- a/medusa/notifiers/synology_notifier.py +++ b/medusa/notifiers/synology_notifier.py @@ -16,31 +16,33 @@ class Notifier(object): - def notify_snatch(self, ep_name, is_proper): + def notify_snatch(self, title, message): if app.SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH: - self._send_synologyNotifier(ep_name, common.notifyStrings[(common.NOTIFY_SNATCH, common.NOTIFY_SNATCH_PROPER)[is_proper]]) + self._send_synologyNotifier(title, message) - def notify_download(self, ep_name): + def notify_download(self, ep_obj): if app.SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD: - self._send_synologyNotifier(ep_name, common.notifyStrings[common.NOTIFY_DOWNLOAD]) + self._send_synologyNotifier(common.notifyStrings[common.NOTIFY_DOWNLOAD], + ep_obj.pretty_name_with_quality()) - def notify_subtitle_download(self, ep_name, lang): + def notify_subtitle_download(self, ep_obj, lang): if app.SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD: - self._send_synologyNotifier(ep_name + ': ' + lang, common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD]) + self._send_synologyNotifier(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], + ep_obj.pretty_name() + ': ' + lang) def notify_git_update(self, new_version='??'): if app.USE_SYNOLOGYNOTIFIER: update_text = common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT] title = common.notifyStrings[common.NOTIFY_GIT_UPDATE] - self._send_synologyNotifier(update_text + new_version, title) + self._send_synologyNotifier(title, update_text + new_version) def notify_login(self, ipaddress=''): if app.USE_SYNOLOGYNOTIFIER: update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] title = common.notifyStrings[common.NOTIFY_LOGIN] - self._send_synologyNotifier(update_text.format(ipaddress), title) + self._send_synologyNotifier(title, update_text.format(ipaddress)) - def _send_synologyNotifier(self, message, title): + def _send_synologyNotifier(self, title, message): synodsmnotify_cmd = ['/usr/syno/bin/synodsmnotify', '@administrators', title, message] log.info(u'Executing command {0}', synodsmnotify_cmd) log.debug(u'Absolute path to command: {0}', os.path.abspath(synodsmnotify_cmd[0])) diff --git a/medusa/notifiers/telegram.py b/medusa/notifiers/telegram.py index d3b62980e1..1243d52963 100644 --- a/medusa/notifiers/telegram.py +++ b/medusa/notifiers/telegram.py @@ -12,8 +12,6 @@ NOTIFY_GIT_UPDATE_TEXT, NOTIFY_LOGIN, NOTIFY_LOGIN_TEXT, - NOTIFY_SNATCH, - NOTIFY_SNATCH_PROPER, NOTIFY_SUBTITLE_DOWNLOAD, notifyStrings, ) @@ -89,18 +87,17 @@ def _send_telegram_msg(self, title, msg, user_id=None, api_key=None): log.info(message) return success, message - def notify_snatch(self, ep_name, is_proper): + def notify_snatch(self, title, message): """ Sends a Telegram notification when an episode is snatched :param ep_name: The name of the episode snatched :param is_proper: Boolean. If snatch is proper or not """ - title = notifyStrings[(NOTIFY_SNATCH, NOTIFY_SNATCH_PROPER)[is_proper]] if app.TELEGRAM_NOTIFY_ONSNATCH: - self._notify_telegram(title, ep_name) + self._notify_telegram(title, message) - def notify_download(self, ep_name, title=notifyStrings[NOTIFY_DOWNLOAD]): + def notify_download(self, ep_obj, title=notifyStrings[NOTIFY_DOWNLOAD]): """ Sends a Telegram notification when an episode is downloaded @@ -108,9 +105,9 @@ def notify_download(self, ep_name, title=notifyStrings[NOTIFY_DOWNLOAD]): :param title: The title of the notification to send """ if app.TELEGRAM_NOTIFY_ONDOWNLOAD: - self._notify_telegram(title, ep_name) + self._notify_telegram(title, ep_obj.pretty_name_with_quality()) - def notify_subtitle_download(self, ep_name, lang, title=notifyStrings[NOTIFY_SUBTITLE_DOWNLOAD]): + def notify_subtitle_download(self, ep_obj, lang, title=notifyStrings[NOTIFY_SUBTITLE_DOWNLOAD]): """ Sends a Telegram notification when subtitles for an episode are downloaded @@ -119,7 +116,7 @@ def notify_subtitle_download(self, ep_name, lang, title=notifyStrings[NOTIFY_SUB :param title: The title of the notification to send """ if app.TELEGRAM_NOTIFY_ONSUBTITLEDOWNLOAD: - self._notify_telegram(title, '%s: %s' % (ep_name, lang)) + self._notify_telegram(title, '%s: %s' % (ep_obj.pretty_name(), lang)) def notify_git_update(self, new_version='??'): """ diff --git a/medusa/notifiers/trakt.py b/medusa/notifiers/trakt.py index bfb8b3dad5..2ebf252510 100644 --- a/medusa/notifiers/trakt.py +++ b/medusa/notifiers/trakt.py @@ -20,15 +20,15 @@ class Notifier(object): """A "notifier" for trakt.tv which keeps track of what has and hasn't been added to your library.""" - def notify_snatch(self, ep_name, is_proper): + def notify_snatch(self, title, message): """Trakt don't support this method.""" pass - def notify_download(self, ep_name): + def notify_download(self, ep_obj): """Trakt don't support this method.""" pass - def notify_subtitle_download(self, ep_name, lang): + def notify_subtitle_download(self, ep_obj, lang): """Trakt don't support this method.""" pass diff --git a/medusa/notifiers/tweet.py b/medusa/notifiers/tweet.py index 90ffa9c735..c3805cde49 100644 --- a/medusa/notifiers/tweet.py +++ b/medusa/notifiers/tweet.py @@ -30,7 +30,7 @@ class Notifier(object): ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token' AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authorize' - def notify_snatch(self, ep_name, is_proper): + def notify_snatch(self, title, message): """ Send a notification that an episode was snatched. @@ -38,18 +38,19 @@ def notify_snatch(self, ep_name, is_proper): :param is_proper: Boolean. If snatch is proper or not """ if app.TWITTER_NOTIFY_ONSNATCH: - self._notify_twitter('{0}: {1}'.format(common.notifyStrings[(common.NOTIFY_SNATCH, common.NOTIFY_SNATCH_PROPER)[is_proper]], ep_name)) + self._notify_twitter('{0}: {1}'.format(title, message)) - def notify_download(self, ep_name): + def notify_download(self, ep_obj): """ Send a notification that an episode was downloaded. :param ep_name: The name of the episode downloaded """ if app.TWITTER_NOTIFY_ONDOWNLOAD: - self._notify_twitter('{0}: {1}'.format(common.notifyStrings[common.NOTIFY_DOWNLOAD], ep_name)) + self._notify_twitter('{0}: {1}'.format(common.notifyStrings[common.NOTIFY_DOWNLOAD], + ep_obj.pretty_name_with_quality())) - def notify_subtitle_download(self, ep_name, lang): + def notify_subtitle_download(self, ep_obj, lang): """ Send a notification that subtitles for an episode were downloaded. @@ -57,7 +58,8 @@ def notify_subtitle_download(self, ep_name, lang): :param lang: The language of the downloaded subtitles """ if app.TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD: - self._notify_twitter('{0} {1}: {2}'.format(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_name, lang)) + self._notify_twitter('{0} {1}: {2}'.format(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], + ep_obj.pretty_name(), lang)) def notify_git_update(self, new_version='??'): """ diff --git a/medusa/post_processor.py b/medusa/post_processor.py index 2b85b84af8..095cd84628 100644 --- a/medusa/post_processor.py +++ b/medusa/post_processor.py @@ -1286,7 +1286,7 @@ def process(self): history.log_download(cur_ep, self.file_path, new_ep_quality, self.release_group, new_ep_version) # send notifications - notifiers.notify_download(ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN')) + notifiers.notify_download(ep_obj) # do the library update for KODI notifiers.kodi_notifier.update_library(ep_obj.series.name) # do the library update for Plex diff --git a/medusa/providers/generic_provider.py b/medusa/providers/generic_provider.py index 56f14e84af..8782394384 100644 --- a/medusa/providers/generic_provider.py +++ b/medusa/providers/generic_provider.py @@ -76,12 +76,12 @@ def __init__(self, name): self.anime_only = False self.bt_cache_urls = [ 'http://reflektor.karmorra.info/torrent/{info_hash}.torrent', - 'https://torrent.cd/torrents/download/{info_hash}/.torrent', 'https://asnet.pw/download/{info_hash}/', 'http://p2pdl.com/download/{info_hash}', 'http://itorrents.org/torrent/{info_hash}.torrent', 'http://thetorrent.org/torrent/{info_hash}.torrent', 'https://cache.torrentgalaxy.org/get/{info_hash}', + 'https://www.seedpeer.me/torrent/{info_hash}', ] self.cache = tv.Cache(self) self.enable_backlog = False diff --git a/medusa/providers/torrent/html/anidex.py b/medusa/providers/torrent/html/anidex.py index 11fdb9fb3b..7940fbd907 100644 --- a/medusa/providers/torrent/html/anidex.py +++ b/medusa/providers/torrent/html/anidex.py @@ -36,7 +36,6 @@ def __init__(self): # Miscellaneous Options self.supports_absolute_numbering = True - self.anime_only = True # Torrent Stats self.minseed = None @@ -51,13 +50,17 @@ def search(self, search_strings, age=0, ep_obj=None, **kwargs): :param search_strings: A dict with mode (key) and the search value (value) :param age: Not used - :param ep_obj: Not used + :param ep_obj: An episode object :returns: A list of search results (structure) """ results = [] + category = '1,2,3' + if ep_obj and not ep_obj.series.is_anime: + category = '4,5' + search_params = { - 'id': '1,2,3' + 'id': category } for mode in search_strings: diff --git a/medusa/providers/torrent/html/btdb.py b/medusa/providers/torrent/html/btdb.py index 2083d0afdf..259bc6cfcc 100644 --- a/medusa/providers/torrent/html/btdb.py +++ b/medusa/providers/torrent/html/btdb.py @@ -112,7 +112,7 @@ def parse(self, data, mode): try: title = row.find('h2').find('a').get('title') - download_url = row.find('div').find('a').get('href') + download_url = row.find('div').find('a').get('href') + self._custom_trackers if not all([title, download_url]): continue diff --git a/medusa/providers/torrent/html/hdtorrents.py b/medusa/providers/torrent/html/hdtorrents.py index 813713b3ed..809a5f1deb 100644 --- a/medusa/providers/torrent/html/hdtorrents.py +++ b/medusa/providers/torrent/html/hdtorrents.py @@ -117,6 +117,9 @@ def parse(self, data, mode): :return: A list of items found """ + # Units + units = ['B', 'KIB', 'MIB', 'GIB', 'TIB', 'PIB'] + items = [] with BS4Parser(data, 'html5lib') as html: @@ -158,7 +161,7 @@ def parse(self, data, mode): continue torrent_size = cells[labels.index('Size')].get_text() - size = convert_size(torrent_size) or -1 + size = convert_size(torrent_size, units=units) or -1 pubdate_raw = cells[labels.index('Added')].get_text() pubdate = self.parse_pubdate(pubdate_raw) diff --git a/medusa/providers/torrent/html/torrentbytes.py b/medusa/providers/torrent/html/torrentbytes.py index bba94f24b7..7b896a48d9 100644 --- a/medusa/providers/torrent/html/torrentbytes.py +++ b/medusa/providers/torrent/html/torrentbytes.py @@ -42,7 +42,7 @@ def __init__(self): } # Proper Strings - self.proper_strings = ['PROPER', 'REPACK'] + self.proper_strings = ['PROPER', 'REPACK', 'REAL', 'RERIP'] # Miscellaneous Options self.freeleech = False diff --git a/medusa/providers/torrent/json/btn.py b/medusa/providers/torrent/json/btn.py index be3b4ea8d6..feb42ea434 100644 --- a/medusa/providers/torrent/json/btn.py +++ b/medusa/providers/torrent/json/btn.py @@ -280,7 +280,7 @@ def _api_call(self, params=None, results_per_page=300, offset=0): log.warning('Incorrect authentication credentials.') elif (code, message) == (-32002, 'Call Limit Exceeded'): log.warning('You have exceeded the limit of 150 calls per hour.') - elif code in (500, 502, 524): + elif code in (500, 502, 521, 524): log.warning('Provider is currently unavailable. Error: {code} {text}', {'code': code, 'text': message}) else: diff --git a/medusa/providers/torrent/rss/nyaa.py b/medusa/providers/torrent/rss/nyaa.py index f06a5afb65..068ce39ba3 100644 --- a/medusa/providers/torrent/rss/nyaa.py +++ b/medusa/providers/torrent/rss/nyaa.py @@ -32,7 +32,6 @@ def __init__(self): # Miscellaneous Options self.supports_absolute_numbering = True - self.anime_only = True self.confirmed = False # Torrent Stats @@ -48,15 +47,19 @@ def search(self, search_strings, age=0, ep_obj=None, **kwargs): :param search_strings: A dict with mode (key) and the search value (value) :param age: Not used - :param ep_obj: Not used + :param ep_obj: An episode object :returns: A list of search results (structure) """ results = [] # Search Params + category = '1_0' + if ep_obj and not ep_obj.series.is_anime: + category = '4_0' + search_params = { 'page': 'rss', - 'c': '1_0', # All Anime + 'c': category, 'f': 0, # No filter 'q': '', } diff --git a/medusa/search/__init__.py b/medusa/search/__init__.py index 0f09b6c588..17dd7210fe 100644 --- a/medusa/search/__init__.py +++ b/medusa/search/__init__.py @@ -3,9 +3,23 @@ """Search module for all Medusa searches.""" from __future__ import unicode_literals +from enum import Enum + BACKLOG_SEARCH = 10 DAILY_SEARCH = 20 FAILED_SEARCH = 30 FORCED_SEARCH = 40 MANUAL_SEARCH = 50 PROPER_SEARCH = 60 +SNATCH_RESULT = 70 + + +class SearchType(Enum): + """Enum with search types.""" + + BACKLOG_SEARCH = 10 + DAILY_SEARCH = 20 + FAILED_SEARCH = 30 + FORCED_SEARCH = 40 + MANUAL_SEARCH = 50 + PROPER_SEARCH = 60 diff --git a/medusa/search/core.py b/medusa/search/core.py index d747c0965a..a39d9614f5 100644 --- a/medusa/search/core.py +++ b/medusa/search/core.py @@ -211,7 +211,7 @@ def snatch_episode(result): curEpObj.subtitles_lastsearch = u'0001-01-01 00:00:00' # Need to store the correct is_proper. Not use the old one - curEpObj.is_proper = True if result.proper_tags else False + curEpObj.is_proper = is_proper curEpObj.version = 0 curEpObj.manually_searched = result.manually_searched @@ -219,14 +219,7 @@ def snatch_episode(result): sql_l.append(curEpObj.get_sql()) if curEpObj.status != common.DOWNLOADED: - notify_message = curEpObj.formatted_filename(u'%SN - %Sx%0E - %EN - %QN') - if all([app.SEEDERS_LEECHERS_IN_NOTIFY, result.seeders not in (-1, None), - result.leechers not in (-1, None)]): - notifiers.notify_snatch(u'{0} with {1} seeders and {2} leechers from {3}'.format - (notify_message, result.seeders, - result.leechers, result.provider.name), is_proper) - else: - notifiers.notify_snatch(u'{0} from {1}'.format(notify_message, result.provider.name), is_proper) + notifiers.notify_snatch(curEpObj, result) if app.USE_TRAKT and app.TRAKT_SYNC_WATCHLIST: trakt_data.append((curEpObj.season, curEpObj.episode)) diff --git a/medusa/search/queue.py b/medusa/search/queue.py index edd96070fe..31bff45fd5 100644 --- a/medusa/search/queue.py +++ b/medusa/search/queue.py @@ -8,12 +8,11 @@ import threading import time import traceback -from builtins import str -from medusa import app, common, failed_history, generic_queue, history, providers, ui +from medusa import app, common, failed_history, generic_queue, history, ui from medusa.helpers import pretty_file_size from medusa.logger.adapters.style import BraceAdapter -from medusa.search import BACKLOG_SEARCH, DAILY_SEARCH, FAILED_SEARCH, FORCED_SEARCH, MANUAL_SEARCH +from medusa.search import BACKLOG_SEARCH, DAILY_SEARCH, FAILED_SEARCH, FORCED_SEARCH, SNATCH_RESULT, SearchType from medusa.search.core import ( search_for_needed_episodes, search_providers, @@ -43,7 +42,7 @@ def is_in_queue(self, show, segment): """Check if item is in queue.""" for cur_item in self.queue: if isinstance(cur_item, (BacklogQueueItem, FailedQueueItem, - ForcedSearchQueueItem, ManualSnatchQueueItem)) \ + ForcedSearchQueueItem, SnatchQueueItem)) \ and cur_item.show == show and cur_item.segment == segment: return True return False @@ -91,7 +90,7 @@ def add_item(self, item): # daily searches generic_queue.GenericQueue.add_item(self, item) elif isinstance(item, (BacklogQueueItem, FailedQueueItem, - ManualSnatchQueueItem, ForcedSearchQueueItem)) \ + SnatchQueueItem, ForcedSearchQueueItem)) \ and not self.is_in_queue(item.show, item.segment): generic_queue.GenericQueue.add_item(self, item) else: @@ -187,7 +186,7 @@ def add_item(self, item): class SnatchQueue(generic_queue.GenericQueue): - """Queue for queuing ManualSnatchQueueItem objects (snatch jobs).""" + """Queue for queuing SnatchQueueItem objects (snatch jobs).""" def __init__(self): """Initialize the SnatchQueue object.""" @@ -231,9 +230,9 @@ def queue_length(self): def add_item(self, item): """ - Add a ManualSnatchQueueItem queue item. + Add a SnatchQueueItem queue item. - @param item: ManualSnatchQueueItem gueue object + @param item: SnatchQueueItem gueue object """ if not self.is_in_queue(item.show, item.segment): # backlog searches @@ -286,7 +285,21 @@ def run(self): 'provider': result.provider.name, } ) - self.success = snatch_episode(result) + + # Set the search_type for the result. + result.search_type = SearchType.DAILY_SEARCH + + # Create the queue item + snatch_queue_item = SnatchQueueItem(result.series, result.episodes, result) + + # Add the queue item to the queue + app.manual_snatch_scheduler.action.add_item(snatch_queue_item) + + self.success = False + while snatch_queue_item.success is False: + if snatch_queue_item.started and snatch_queue_item.success: + self.success = True + time.sleep(1) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) @@ -373,7 +386,21 @@ def run(self): 'provider': result.provider.name, } ) - self.success = snatch_episode(result) + + # Set the search_type for the result. + result.search_type = SearchType.FORCED_SEARCH + + # Create the queue item + snatch_queue_item = SnatchQueueItem(result.series, result.episodes, result) + + # Add the queue item to the queue + app.manual_snatch_scheduler.action.add_item(snatch_queue_item) + + self.success = False + while snatch_queue_item.success is False: + if snatch_queue_item.started and snatch_queue_item.success: + self.success = True + time.sleep(1) # Give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) @@ -415,12 +442,10 @@ def run(self): self.finish() -class ManualSnatchQueueItem(generic_queue.QueueItem): +class SnatchQueueItem(generic_queue.QueueItem): """ A queue item that can be used to queue the snatch of a search result. - Currently used for the snatchSelection feature. - @param show: A show object @param segment: A list of episode objects @param provider: The provider id. For example nyaatorrent and not NyaaTorrent. Or usernet_crawler and not Usenet-Crawler @@ -429,60 +454,49 @@ class ManualSnatchQueueItem(generic_queue.QueueItem): @return: The run() methods snatches the episode(s) if possible. """ - def __init__(self, show, segment, provider, cached_result): + def __init__(self, show, segment, search_result): """Initialize the class.""" - generic_queue.QueueItem.__init__(self, u'Manual Search', MANUAL_SEARCH) + generic_queue.QueueItem.__init__(self, u'Snatch Result', SNATCH_RESULT) self.priority = generic_queue.QueuePriorities.HIGH - self.name = 'MANUALSNATCH-' + str(show.indexerid) + self.name = 'SNATCH-{indexer_id}'.format(indexer_id=search_result.series.indexerid) self.success = None self.started = None - self.results = None - self.provider = provider self.segment = segment self.show = show - self.cached_result = cached_result + self.results = None + self.search_result = search_result def run(self): """Run manual snatch job.""" generic_queue.QueueItem.run(self) self.started = True - result = providers.get_provider_class(self.provider).get_result(self.segment) - result.series = self.show - result.url = self.cached_result['url'] - result.quality = int(self.cached_result['quality']) - result.name = self.cached_result['name'] - result.size = int(self.cached_result['size']) - result.seeders = int(self.cached_result['seeders']) - result.leechers = int(self.cached_result['leechers']) - result.release_group = self.cached_result['release_group'] - result.version = int(self.cached_result['version']) - result.proper_tags = self.cached_result['proper_tags'].split('|') \ - if self.cached_result['proper_tags'] else '' - result.manually_searched = True + result = self.search_result try: - log.info('Beginning to manual snatch release: {name}', + log.info('Beginning to snatch release: {name}', {'name': result.name}) if result: if result.seeders not in (-1, None) and result.leechers not in (-1, None): log.info( 'Downloading {name} with {seeders} seeders and {leechers} leechers' - ' and size {size} from {provider}', { + ' and size {size} from {provider}, through a {search_type} search', { 'name': result.name, 'seeders': result.seeders, 'leechers': result.leechers, 'size': pretty_file_size(result.size), 'provider': result.provider.name, + 'search_type': result.search_type } ) else: log.info( - 'Downloading {name} with size: {size} from {provider}', { + 'Downloading {name} with size: {size} from {provider}, through a {search_type} search', { 'name': result.name, 'size': pretty_file_size(result.size), 'provider': result.provider.name, + 'search_type': result.search_type } ) self.success = snatch_episode(result) @@ -495,7 +509,7 @@ def run(self): except Exception: self.success = False - log.exception('Manual snatch failed! For result: {name}', {'name': result.name}) + log.exception('Snatch failed! For result: {name}', {'name': result.name}) ui.notifications.message('Error while snatching selected result', 'Unable to snatch the result for {name}'.format(name=result.name)) @@ -512,7 +526,7 @@ def __init__(self, show, segment): """Initialize the class.""" generic_queue.QueueItem.__init__(self, u'Backlog', BACKLOG_SEARCH) self.priority = generic_queue.QueuePriorities.LOW - self.name = 'BACKLOG-' + str(show.indexerid) + self.name = 'BACKLOG-{indexer_id}'.format(indexer_id=show.indexerid) self.success = None self.started = None @@ -553,7 +567,21 @@ def run(self): 'provider': result.provider.name, } ) - self.success = snatch_episode(result) + + # Set the search_type for the result. + result.search_type = SearchType.BACKLOG_SEARCH + + # Create the queue item + snatch_queue_item = SnatchQueueItem(result.series, result.episodes, result) + + # Add the queue item to the queue + app.manual_snatch_scheduler.action.add_item(snatch_queue_item) + + self.success = False + while snatch_queue_item.success is False: + if snatch_queue_item.started and snatch_queue_item.success: + self.success = True + time.sleep(1) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) @@ -579,7 +607,7 @@ def __init__(self, show, segment, down_cur_quality=False): """Initialize the class.""" generic_queue.QueueItem.__init__(self, u'Retry', FAILED_SEARCH) self.priority = generic_queue.QueuePriorities.HIGH - self.name = 'RETRY-' + str(show.indexerid) + self.name = 'RETRY-{indexer_id}'.format(indexer_id=show.indexerid) self.success = None self.started = None @@ -636,7 +664,21 @@ def run(self): 'provider': result.provider.name, } ) - self.success = snatch_episode(result) + + # Set the search_type for the result. + result.search_type = SearchType.FAILED_SEARCH + + # Create the queue item + snatch_queue_item = SnatchQueueItem(result.series, result.episodes, result) + + # Add the queue item to the queue + app.manual_snatch_scheduler.action.add_item(snatch_queue_item) + + self.success = False + while snatch_queue_item.success is False: + if snatch_queue_item.started and snatch_queue_item.success: + self.success = True + time.sleep(1) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) diff --git a/medusa/server/api/v2/base.py b/medusa/server/api/v2/base.py index 82af8f08ba..2af8ce71f0 100644 --- a/medusa/server/api/v2/base.py +++ b/medusa/server/api/v2/base.py @@ -319,7 +319,7 @@ def _get_page(self): except ValueError: self._raise_bad_request_error('Invalid page parameter') - def _get_limit(self, default=20, maximum=1000): + def _get_limit(self, default=20, maximum=10000): try: limit = self._parse(self.get_argument('limit', default=default)) if limit < 1 or limit > maximum: diff --git a/medusa/server/api/v2/config.py b/medusa/server/api/v2/config.py index f5fb768cd4..959f726bfa 100644 --- a/medusa/server/api/v2/config.py +++ b/medusa/server/api/v2/config.py @@ -72,43 +72,43 @@ class ConfigHandler(BaseRequestHandler): patches = { 'anonRedirect': StringField(app, 'ANON_REDIRECT'), 'emby.enabled': BooleanField(app, 'USE_EMBY'), - 'torrents.authType': StringField(app, 'TORRENT_AUTH_TYPE'), - 'torrents.dir': StringField(app, 'TORRENT_DIR'), - 'torrents.enabled': BooleanField(app, 'USE_TORRENTS'), - 'torrents.highBandwidth': StringField(app, 'TORRENT_HIGH_BANDWIDTH'), - 'torrents.host': StringField(app, 'TORRENT_HOST'), - 'torrents.label': StringField(app, 'TORRENT_LABEL'), - 'torrents.labelAnime': StringField(app, 'TORRENT_LABEL_ANIME'), - 'torrents.method': StringField(app, 'TORRENT_METHOD'), - 'torrents.password': StringField(app, 'TORRENT_PASSWORD'), - 'torrents.path': BooleanField(app, 'TORRENT_PATH'), - 'torrents.paused': BooleanField(app, 'TORRENT_PAUSED'), - 'torrents.rpcurl': StringField(app, 'TORRENT_RPCURL'), - 'torrents.seedLocation': StringField(app, 'TORRENT_SEED_LOCATION'), - 'torrents.seedTime': StringField(app, 'TORRENT_SEED_TIME'), - 'torrents.username': StringField(app, 'TORRENT_USERNAME'), - 'torrents.verifySSL': BooleanField(app, 'TORRENT_VERIFY_CERT'), - 'nzb.enabled': BooleanField(app, 'USE_NZBS'), - 'nzb.dir': StringField(app, 'NZB_DIR'), - 'nzb.method': StringField(app, 'NZB_METHOD'), - 'nzb.nzbget.category': StringField(app, 'NZBGET_CATEGORY'), - 'nzb.nzbget.categoryAnime': StringField(app, 'NZBGET_CATEGORY_ANIME'), - 'nzb.nzbget.categoryAnimeBacklog': StringField(app, 'NZBGET_CATEGORY_ANIME_BACKLOG'), - 'nzb.nzbget.categoryBacklog': StringField(app, 'NZBGET_CATEGORY_BACKLOG'), - 'nzb.nzbget.host': StringField(app, 'NZBGET_HOST'), - 'nzb.nzbget.password': StringField(app, 'NZBGET_PASSWORD'), - 'nzb.nzbget.priority': StringField(app, 'NZBGET_PRIORITY'), - 'nzb.nzbget.useHttps': BooleanField(app, 'NZBGET_USE_HTTPS'), - 'nzb.nzbget.username': StringField(app, 'NZBGET_USERNAME'), - 'nzb.sabnzbd.apiKey': StringField(app, 'SAB_APIKEY'), - 'nzb.sabnzbd.category': StringField(app, 'SAB_CATEGORY'), - 'nzb.sabnzbd.categoryAnime': StringField(app, 'SAB_CATEGORY_ANIME'), - 'nzb.sabnzbd.categoryAnimeBacklog': StringField(app, 'SAB_CATEGORY_ANIME_BACKLOG'), - 'nzb.sabnzbd.categoryBacklog': StringField(app, 'SAB_CATEGORY_BACKLOG'), - 'nzb.sabnzbd.forced': BooleanField(app, 'SAB_FORCED'), - 'nzb.sabnzbd.host': StringField(app, 'SAB_HOST'), - 'nzb.sabnzbd.password': StringField(app, 'SAB_PASSWORD'), - 'nzb.sabnzbd.username': StringField(app, 'SAB_USERNAME'), + 'clients.torrents.authType': StringField(app, 'TORRENT_AUTH_TYPE'), + 'clients.torrents.dir': StringField(app, 'TORRENT_DIR'), + 'clients.torrents.enabled': BooleanField(app, 'USE_TORRENTS'), + 'clients.torrents.highBandwidth': BooleanField(app, 'TORRENT_HIGH_BANDWIDTH'), + 'clients.torrents.host': StringField(app, 'TORRENT_HOST'), + 'clients.torrents.label': StringField(app, 'TORRENT_LABEL'), + 'clients.torrents.labelAnime': StringField(app, 'TORRENT_LABEL_ANIME'), + 'clients.torrents.method': StringField(app, 'TORRENT_METHOD'), + 'clients.torrents.password': StringField(app, 'TORRENT_PASSWORD'), + 'clients.torrents.path': BooleanField(app, 'TORRENT_PATH'), + 'clients.torrents.paused': BooleanField(app, 'TORRENT_PAUSED'), + 'clients.torrents.rpcurl': StringField(app, 'TORRENT_RPCURL'), + 'clients.torrents.seedLocation': StringField(app, 'TORRENT_SEED_LOCATION'), + 'clients.torrents.seedTime': IntegerField(app, 'TORRENT_SEED_TIME'), + 'clients.torrents.username': StringField(app, 'TORRENT_USERNAME'), + 'clients.torrents.verifySSL': BooleanField(app, 'TORRENT_VERIFY_CERT'), + 'clients.nzb.enabled': BooleanField(app, 'USE_NZBS'), + 'clients.nzb.dir': StringField(app, 'NZB_DIR'), + 'clients.nzb.method': StringField(app, 'NZB_METHOD'), + 'clients.nzb.nzbget.category': StringField(app, 'NZBGET_CATEGORY'), + 'clients.nzb.nzbget.categoryAnime': StringField(app, 'NZBGET_CATEGORY_ANIME'), + 'clients.nzb.nzbget.categoryAnimeBacklog': StringField(app, 'NZBGET_CATEGORY_ANIME_BACKLOG'), + 'clients.nzb.nzbget.categoryBacklog': StringField(app, 'NZBGET_CATEGORY_BACKLOG'), + 'clients.nzb.nzbget.host': StringField(app, 'NZBGET_HOST'), + 'clients.nzb.nzbget.password': StringField(app, 'NZBGET_PASSWORD'), + 'clients.nzb.nzbget.priority': IntegerField(app, 'NZBGET_PRIORITY'), + 'clients.nzb.nzbget.useHttps': BooleanField(app, 'NZBGET_USE_HTTPS'), + 'clients.nzb.nzbget.username': StringField(app, 'NZBGET_USERNAME'), + 'clients.nzb.sabnzbd.apiKey': StringField(app, 'SAB_APIKEY'), + 'clients.nzb.sabnzbd.category': StringField(app, 'SAB_CATEGORY'), + 'clients.nzb.sabnzbd.categoryAnime': StringField(app, 'SAB_CATEGORY_ANIME'), + 'clients.nzb.sabnzbd.categoryAnimeBacklog': StringField(app, 'SAB_CATEGORY_ANIME_BACKLOG'), + 'clients.nzb.sabnzbd.categoryBacklog': StringField(app, 'SAB_CATEGORY_BACKLOG'), + 'clients.nzb.sabnzbd.forced': BooleanField(app, 'SAB_FORCED'), + 'clients.nzb.sabnzbd.host': StringField(app, 'SAB_HOST'), + 'clients.nzb.sabnzbd.password': StringField(app, 'SAB_PASSWORD'), + 'clients.nzb.sabnzbd.username': StringField(app, 'SAB_USERNAME'), 'selectedRootIndex': IntegerField(app, 'SELECTED_ROOT'), 'layout.schedule': EnumField(app, 'COMING_EPS_LAYOUT', ('poster', 'banner', 'list', 'calendar'), default_value='banner', post_processor=layout_schedule_post_processor), @@ -191,6 +191,176 @@ class ConfigHandler(BaseRequestHandler): 'search.filters.required': ListField(app, 'REQUIRE_WORDS'), 'search.filters.ignoredSubsList': ListField(app, 'IGNORED_SUBS_LIST'), 'search.filters.ignoreUnknownSubs': BooleanField(app, 'IGNORE_UND_SUBS'), + + 'notifiers.kodi.enabled': BooleanField(app, 'USE_KODI'), + 'notifiers.kodi.alwaysOn': BooleanField(app, 'USE_KODI'), + 'notifiers.kodi.notifyOnSnatch': BooleanField(app, 'KODI_NOTIFY_ONSNATCH'), + 'notifiers.kodi.notifyOnDownload': BooleanField(app, 'KODI_NOTIFY_ONDOWNLOAD'), + 'notifiers.kodi.notifyOnSubtitleDownload': BooleanField(app, 'KODI_NOTIFY_ONSUBTITLEDOWNLOAD'), + 'notifiers.kodi.update.library': BooleanField(app, 'KODI_UPDATE_LIBRARY'), + 'notifiers.kodi.update.full': BooleanField(app, 'KODI_UPDATE_FULL'), + 'notifiers.kodi.update.onlyFirst': BooleanField(app, 'KODI_UPDATE_ONLYFIRST'), + 'notifiers.kodi.host': ListField(app, 'KODI_HOST'), + 'notifiers.kodi.username': StringField(app, 'KODI_USERNAME'), + 'notifiers.kodi.password': StringField(app, 'KODI_PASSWORD'), + 'notifiers.kodi.libraryCleanPending': BooleanField(app, 'KODI_LIBRARY_CLEAN_PENDING'), + 'notifiers.kodi.cleanLibrary': BooleanField(app, 'KODI_CLEAN_LIBRARY'), + + 'notifiers.plex.server.enabled': BooleanField(app, 'USE_PLEX_SERVER'), + 'notifiers.plex.server.updateLibrary': BooleanField(app, 'PLEX_UPDATE_LIBRARY'), + 'notifiers.plex.server.host': ListField(app, 'PLEX_SERVER_HOST'), + 'notifiers.plex.server.https': BooleanField(app, 'PLEX_SERVER_HTTPS'), + 'notifiers.plex.server.username': StringField(app, 'PLEX_SERVER_HOST'), + 'notifiers.plex.server.password': StringField(app, 'PLEX_SERVER_HOST'), + 'notifiers.plex.server.token': StringField(app, 'PLEX_SERVER_HOST'), + 'notifiers.plex.client.enabled': BooleanField(app, 'USE_PLEX_CLIENT'), + 'notifiers.plex.client.username': StringField(app, 'PLEX_CLIENT_USERNAME'), + 'notifiers.plex.client.host': ListField(app, 'PLEX_CLIENT_HOST'), + 'notifiers.plex.client.notifyOnSnatch': BooleanField(app, 'PLEX_NOTIFY_ONSNATCH'), + 'notifiers.plex.client.notifyOnDownload': BooleanField(app, 'PLEX_NOTIFY_ONDOWNLOAD'), + 'notifiers.plex.client.notifyOnSubtitleDownload': BooleanField(app, 'PLEX_NOTIFY_ONSUBTITLEDOWNLOAD'), + + 'notifiers.emby.enabled': BooleanField(app, 'USE_EMBY'), + 'notifiers.emby.host': StringField(app, 'EMBY_HOST'), + 'notifiers.emby.apiKey': StringField(app, 'EMBY_APIKEY'), + + 'notifiers.nmj.enabled': BooleanField(app, 'USE_NMJ'), + 'notifiers.nmj.host': StringField(app, 'NMJ_HOST'), + 'notifiers.nmj.database': StringField(app, 'NMJ_DATABASE'), + 'notifiers.nmj.mount': StringField(app, 'NMJ_MOUNT'), + + 'notifiers.nmjv2.enabled': BooleanField(app, 'USE_NMJv2'), + 'notifiers.nmjv2.host': StringField(app, 'NMJv2_HOST'), + 'notifiers.nmjv2.dbloc': StringField(app, 'NMJv2_DBLOC'), + 'notifiers.nmjv2.database': StringField(app, 'NMJv2_DATABASE'), + + 'notifiers.synologyIndex.enabled': BooleanField(app, 'USE_SYNOINDEX'), + + 'notifiers.synology.enabled': BooleanField(app, 'USE_SYNOLOGYNOTIFIER'), + 'notifiers.synology.notifyOnSnatch': BooleanField(app, 'SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH'), + 'notifiers.synology.notifyOnDownload': BooleanField(app, 'SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD'), + 'notifiers.synology.notifyOnSubtitleDownload': BooleanField(app, 'SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD'), + + 'notifiers.pyTivo.enabled': BooleanField(app, 'USE_PYTIVO'), + 'notifiers.pyTivo.host': StringField(app, 'PYTIVO_HOST'), + 'notifiers.pyTivo.name': StringField(app, 'PYTIVO_TIVO_NAME'), + 'notifiers.pyTivo.shareName': StringField(app, 'PYTIVO_SHARE_NAME'), + + 'notifiers.growl.enabled': BooleanField(app, 'USE_GROWL'), + 'notifiers.growl.host': StringField(app, 'GROWL_HOST'), + 'notifiers.growl.password': StringField(app, 'GROWL_PASSWORD'), + 'notifiers.growl.notifyOnSnatch': BooleanField(app, 'GROWL_NOTIFY_ONSNATCH'), + 'notifiers.growl.notifyOnDownload': BooleanField(app, 'GROWL_NOTIFY_ONDOWNLOAD'), + 'notifiers.growl.notifyOnSubtitleDownload': BooleanField(app, 'GROWL_NOTIFY_ONSUBTITLEDOWNLOAD'), + + 'notifiers.prowl.enabled': BooleanField(app, 'USE_PROWL'), + 'notifiers.prowl.api': ListField(app, 'PROWL_API'), + 'notifiers.prowl.messageTitle': StringField(app, 'PROWL_MESSAGE_TITLE'), + 'notifiers.prowl.priority': IntegerField(app, 'PROWL_PRIORITY'), + 'notifiers.prowl.notifyOnSnatch': BooleanField(app, 'PROWL_NOTIFY_ONSNATCH'), + 'notifiers.prowl.notifyOnDownload': BooleanField(app, 'PROWL_NOTIFY_ONDOWNLOAD'), + 'notifiers.prowl.notifyOnSubtitleDownload': BooleanField(app, 'PROWL_NOTIFY_ONSUBTITLEDOWNLOAD'), + + 'notifiers.libnotify.enabled': BooleanField(app, 'USE_LIBNOTIFY'), + 'notifiers.libnotify.notifyOnSnatch': BooleanField(app, 'LIBNOTIFY_NOTIFY_ONSNATCH'), + 'notifiers.libnotify.notifyOnDownload': BooleanField(app, 'LIBNOTIFY_NOTIFY_ONDOWNLOAD'), + 'notifiers.libnotify.notifyOnSubtitleDownload': BooleanField(app, 'LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD'), + + 'notifiers.pushover.enabled': BooleanField(app, 'USE_PUSHOVER'), + 'notifiers.pushover.apiKey': StringField(app, 'PUSHOVER_APIKEY'), + 'notifiers.pushover.userKey': StringField(app, 'PUSHOVER_USERKEY'), + 'notifiers.pushover.device': ListField(app, 'PUSHOVER_DEVICE'), + 'notifiers.pushover.sound': StringField(app, 'PUSHOVER_SOUND'), + 'notifiers.pushover.priority': IntegerField(app, 'PUSHOVER_PRIORITY'), + 'notifiers.pushover.notifyOnSnatch': BooleanField(app, 'PUSHOVER_NOTIFY_ONSNATCH'), + 'notifiers.pushover.notifyOnDownload': BooleanField(app, 'PUSHOVER_NOTIFY_ONDOWNLOAD'), + 'notifiers.pushover.notifyOnSubtitleDownload': BooleanField(app, 'PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD'), + + 'notifiers.boxcar2.enabled': BooleanField(app, 'USE_BOXCAR2'), + 'notifiers.boxcar2.accessToken': StringField(app, 'BOXCAR2_ACCESSTOKEN'), + 'notifiers.boxcar2.notifyOnSnatch': BooleanField(app, 'BOXCAR2_NOTIFY_ONSNATCH'), + 'notifiers.boxcar2.notifyOnDownload': BooleanField(app, 'BOXCAR2_NOTIFY_ONDOWNLOAD'), + 'notifiers.boxcar2.notifyOnSubtitleDownload': BooleanField(app, 'BOXCAR2_NOTIFY_ONSUBTITLEDOWNLOAD'), + + 'notifiers.pushalot.enabled': BooleanField(app, 'USE_PUSHALOT'), + 'notifiers.pushalot.authToken': StringField(app, 'PUSHALOT_AUTHORIZATIONTOKEN'), + 'notifiers.pushalot.notifyOnSnatch': BooleanField(app, 'PUSHALOT_NOTIFY_ONSNATCH'), + 'notifiers.pushalot.notifyOnDownload': BooleanField(app, 'PUSHALOT_NOTIFY_ONDOWNLOAD'), + 'notifiers.pushalot.notifyOnSubtitleDownload': BooleanField(app, 'PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD'), + + 'notifiers.pushbullet.enabled': BooleanField(app, 'USE_PUSHBULLET'), + 'notifiers.pushbullet.api': StringField(app, 'PUSHBULLET_API'), + 'notifiers.pushbullet.device': StringField(app, 'PUSHBULLET_DEVICE'), + 'notifiers.pushbullet.notifyOnSnatch': BooleanField(app, 'PUSHBULLET_NOTIFY_ONSNATCH'), + 'notifiers.pushbullet.notifyOnDownload': BooleanField(app, 'PUSHBULLET_NOTIFY_ONDOWNLOAD'), + 'notifiers.pushbullet.notifyOnSubtitleDownload': BooleanField(app, 'PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD'), + + 'notifiers.join.enabled': BooleanField(app, 'USE_JOIN'), + 'notifiers.join.api': StringField(app, 'JOIN_API'), + 'notifiers.join.device': StringField(app, 'JOIN_DEVICE'), + 'notifiers.join.notifyOnSnatch': BooleanField(app, 'JOIN_NOTIFY_ONSNATCH'), + 'notifiers.join.notifyOnDownload': BooleanField(app, 'JOIN_NOTIFY_ONDOWNLOAD'), + 'notifiers.join.notifyOnSubtitleDownload': BooleanField(app, 'JOIN_NOTIFY_ONSUBTITLEDOWNLOAD'), + + 'notifiers.freemobile.enabled': BooleanField(app, 'USE_FREEMOBILE'), + 'notifiers.freemobile.api': StringField(app, 'FREEMOBILE_APIKEY'), + 'notifiers.freemobile.id': StringField(app, 'FREEMOBILE_ID'), + 'notifiers.freemobile.notifyOnSnatch': BooleanField(app, 'FREEMOBILE_NOTIFY_ONSNATCH'), + 'notifiers.freemobile.notifyOnDownload': BooleanField(app, 'FREEMOBILE_NOTIFY_ONDOWNLOAD'), + 'notifiers.freemobile.notifyOnSubtitleDownload': BooleanField(app, 'FREEMOBILE_NOTIFY_ONSUBTITLEDOWNLOAD'), + + 'notifiers.telegram.enabled': BooleanField(app, 'USE_TELEGRAM'), + 'notifiers.telegram.api': StringField(app, 'TELEGRAM_APIKEY'), + 'notifiers.telegram.id': StringField(app, 'TELEGRAM_ID'), + 'notifiers.telegram.notifyOnSnatch': BooleanField(app, 'TELEGRAM_NOTIFY_ONSNATCH'), + 'notifiers.telegram.notifyOnDownload': BooleanField(app, 'TELEGRAM_NOTIFY_ONDOWNLOAD'), + 'notifiers.telegram.notifyOnSubtitleDownload': BooleanField(app, 'TELEGRAM_NOTIFY_ONSUBTITLEDOWNLOAD'), + + 'notifiers.twitter.enabled': BooleanField(app, 'USE_TWITTER'), + 'notifiers.twitter.dmto': StringField(app, 'TWITTER_DMTO'), + 'notifiers.twitter.username': StringField(app, 'TWITTER_USERNAME'), + 'notifiers.twitter.password': StringField(app, 'TWITTER_PASSWORD'), + 'notifiers.twitter.prefix': StringField(app, 'TWITTER_PREFIX'), + 'notifiers.twitter.directMessage': BooleanField(app, 'TWITTER_USEDM'), + 'notifiers.twitter.notifyOnSnatch': BooleanField(app, 'TWITTER_NOTIFY_ONSNATCH'), + 'notifiers.twitter.notifyOnDownload': BooleanField(app, 'TWITTER_NOTIFY_ONDOWNLOAD'), + 'notifiers.twitter.notifyOnSubtitleDownload': BooleanField(app, 'TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD'), + + 'notifiers.trakt.enabled': BooleanField(app, 'USE_TRAKT'), + 'notifiers.trakt.pinUrl': StringField(app, 'TRAKT_PIN_URL'), + 'notifiers.trakt.username': StringField(app, 'TRAKT_USERNAME'), + 'notifiers.trakt.accessToken': StringField(app, 'TRAKT_ACCESS_TOKEN'), + 'notifiers.trakt.timeout': IntegerField(app, 'TRAKT_TIMEOUT'), + 'notifiers.trakt.defaultIndexer': IntegerField(app, 'TRAKT_DEFAULT_INDEXER'), + 'notifiers.trakt.sync': BooleanField(app, 'TRAKT_SYNC'), + 'notifiers.trakt.syncRemove': BooleanField(app, 'TRAKT_SYNC_REMOVE'), + 'notifiers.trakt.syncWatchlist': BooleanField(app, 'TRAKT_SYNC_WATCHLIST'), + 'notifiers.trakt.methodAdd': IntegerField(app, 'TRAKT_METHOD_ADD'), + 'notifiers.trakt.removeWatchlist': BooleanField(app, 'TRAKT_REMOVE_WATCHLIST'), + 'notifiers.trakt.removeSerieslist': BooleanField(app, 'TRAKT_REMOVE_SERIESLIST'), + 'notifiers.trakt.removeShowFromApplication': BooleanField(app, 'TRAKT_REMOVE_SHOW_FROM_APPLICATION'), + 'notifiers.trakt.startPaused': BooleanField(app, 'TRAKT_START_PAUSED'), + 'notifiers.trakt.blacklistName': StringField(app, 'TRAKT_BLACKLIST_NAME'), + + 'notifiers.email.enabled': BooleanField(app, 'USE_EMAIL'), + 'notifiers.email.host': StringField(app, 'EMAIL_HOST'), + 'notifiers.email.port': IntegerField(app, 'EMAIL_PORT'), + 'notifiers.email.from': StringField(app, 'EMAIL_FROM'), + 'notifiers.email.tls': BooleanField(app, 'EMAIL_TLS'), + 'notifiers.email.username': StringField(app, 'EMAIL_USER'), + 'notifiers.email.password': StringField(app, 'EMAIL_PASSWORD'), + 'notifiers.email.addressList': ListField(app, 'EMAIL_LIST'), + 'notifiers.email.subject': StringField(app, 'EMAIL_SUBJECT'), + 'notifiers.email.notifyOnSnatch': BooleanField(app, 'EMAIL_NOTIFY_ONSNATCH'), + 'notifiers.email.notifyOnDownload': BooleanField(app, 'EMAIL_NOTIFY_ONDOWNLOAD'), + 'notifiers.email.notifyOnSubtitleDownload': BooleanField(app, 'EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD'), + + 'notifiers.slack.enabled': BooleanField(app, 'USE_SLACK'), + 'notifiers.slack.webhook': StringField(app, 'SLACK_WEBHOOK'), + 'notifiers.slack.notifyOnSnatch': BooleanField(app, 'SLACK_NOTIFY_SNATCH'), + 'notifiers.slack.notifyOnDownload': BooleanField(app, 'SLACK_NOTIFY_DOWNLOAD'), + 'notifiers.slack.notifyOnSubtitleDownload': BooleanField(app, 'SLACK_NOTIFY_SUBTITLEDOWNLOAD'), + } def http_get(self, identifier, path_param=None): @@ -329,6 +499,7 @@ def data_main(): section_data['logDir'] = app.LOG_DIR section_data['appArgs'] = app.MY_ARGS section_data['webRoot'] = app.WEB_ROOT + section_data['runsInDocker'] = bool(app.RUNS_IN_DOCKER) section_data['githubUrl'] = app.GITHUB_IO_URL section_data['wikiUrl'] = app.WIKI_URL section_data['donationsUrl'] = app.DONATIONS_URL @@ -363,81 +534,6 @@ def data_main(): section_data['failedDownloads']['enabled'] = bool(app.USE_FAILED_DOWNLOADS) section_data['failedDownloads']['deleteFailed'] = bool(app.DELETE_FAILED) - section_data['kodi'] = NonEmptyDict() - section_data['kodi']['enabled'] = bool(app.USE_KODI) - section_data['kodi']['alwaysOn'] = bool(app.KODI_ALWAYS_ON) - section_data['kodi']['notify'] = NonEmptyDict() - section_data['kodi']['notify']['snatch'] = bool(app.KODI_NOTIFY_ONSNATCH) - section_data['kodi']['notify']['download'] = bool(app.KODI_NOTIFY_ONDOWNLOAD) - section_data['kodi']['notify']['subtitleDownload'] = bool(app.KODI_NOTIFY_ONSUBTITLEDOWNLOAD) - section_data['kodi']['update'] = NonEmptyDict() - section_data['kodi']['update']['library'] = bool(app.KODI_UPDATE_LIBRARY) - section_data['kodi']['update']['full'] = bool(app.KODI_UPDATE_FULL) - section_data['kodi']['update']['onlyFirst'] = bool(app.KODI_UPDATE_ONLYFIRST) - section_data['kodi']['host'] = app.KODI_HOST - section_data['kodi']['username'] = app.KODI_USERNAME - section_data['kodi']['libraryCleanPending'] = bool(app.KODI_LIBRARY_CLEAN_PENDING) - section_data['kodi']['cleanLibrary'] = bool(app.KODI_CLEAN_LIBRARY) - - section_data['plex'] = NonEmptyDict() - section_data['plex']['server'] = NonEmptyDict() - section_data['plex']['server']['enabled'] = bool(app.USE_PLEX_SERVER) - section_data['plex']['server']['notify'] = NonEmptyDict() - section_data['plex']['server']['notify']['snatch'] = bool(app.PLEX_NOTIFY_ONSNATCH) - section_data['plex']['server']['notify']['download'] = bool(app.PLEX_NOTIFY_ONDOWNLOAD) - section_data['plex']['server']['notify']['subtitleDownload'] = bool(app.PLEX_NOTIFY_ONSUBTITLEDOWNLOAD) - section_data['plex']['server']['updateLibrary'] = bool(app.PLEX_UPDATE_LIBRARY) - section_data['plex']['server']['host'] = app.PLEX_SERVER_HOST - section_data['plex']['server']['username'] = app.PLEX_SERVER_USERNAME - section_data['plex']['client'] = NonEmptyDict() - section_data['plex']['client']['enabled'] = bool(app.USE_PLEX_CLIENT) - section_data['plex']['client']['username'] = app.PLEX_CLIENT_USERNAME - section_data['plex']['client']['host'] = app.PLEX_CLIENT_HOST - - section_data['emby'] = NonEmptyDict() - section_data['emby']['enabled'] = bool(app.USE_EMBY) - section_data['emby']['host'] = app.EMBY_HOST - - section_data['torrents'] = NonEmptyDict() - section_data['torrents']['authType'] = app.TORRENT_AUTH_TYPE - section_data['torrents']['dir'] = app.TORRENT_DIR - section_data['torrents']['enabled'] = bool(app.USE_TORRENTS) - section_data['torrents']['highBandwidth'] = app.TORRENT_HIGH_BANDWIDTH - section_data['torrents']['host'] = app.TORRENT_HOST - section_data['torrents']['label'] = app.TORRENT_LABEL - section_data['torrents']['labelAnime'] = app.TORRENT_LABEL_ANIME - section_data['torrents']['method'] = app.TORRENT_METHOD - section_data['torrents']['path'] = app.TORRENT_PATH - section_data['torrents']['paused'] = bool(app.TORRENT_PAUSED) - section_data['torrents']['rpcurl'] = app.TORRENT_RPCURL - section_data['torrents']['seedLocation'] = app.TORRENT_SEED_LOCATION - section_data['torrents']['seedTime'] = app.TORRENT_SEED_TIME - section_data['torrents']['username'] = app.TORRENT_USERNAME - section_data['torrents']['verifySSL'] = bool(app.TORRENT_VERIFY_CERT) - - section_data['nzb'] = NonEmptyDict() - section_data['nzb']['enabled'] = bool(app.USE_NZBS) - section_data['nzb']['dir'] = app.NZB_DIR - section_data['nzb']['method'] = app.NZB_METHOD - section_data['nzb']['nzbget'] = NonEmptyDict() - section_data['nzb']['nzbget']['category'] = app.NZBGET_CATEGORY - section_data['nzb']['nzbget']['categoryAnime'] = app.NZBGET_CATEGORY_ANIME - section_data['nzb']['nzbget']['categoryAnimeBacklog'] = app.NZBGET_CATEGORY_ANIME_BACKLOG - section_data['nzb']['nzbget']['categoryBacklog'] = app.NZBGET_CATEGORY_BACKLOG - section_data['nzb']['nzbget']['host'] = app.NZBGET_HOST - section_data['nzb']['nzbget']['priority'] = app.NZBGET_PRIORITY - section_data['nzb']['nzbget']['useHttps'] = bool(app.NZBGET_USE_HTTPS) - section_data['nzb']['nzbget']['username'] = app.NZBGET_USERNAME - - section_data['nzb']['sabnzbd'] = NonEmptyDict() - section_data['nzb']['sabnzbd']['category'] = app.SAB_CATEGORY - section_data['nzb']['sabnzbd']['categoryAnime'] = app.SAB_CATEGORY_ANIME - section_data['nzb']['sabnzbd']['categoryAnimeBacklog'] = app.SAB_CATEGORY_ANIME_BACKLOG - section_data['nzb']['sabnzbd']['categoryBacklog'] = app.SAB_CATEGORY_BACKLOG - section_data['nzb']['sabnzbd']['forced'] = bool(app.SAB_FORCED) - section_data['nzb']['sabnzbd']['host'] = app.SAB_HOST - section_data['nzb']['sabnzbd']['username'] = app.SAB_USERNAME - section_data['layout'] = NonEmptyDict() section_data['layout']['schedule'] = app.COMING_EPS_LAYOUT section_data['layout']['history'] = app.HISTORY_LAYOUT @@ -614,3 +710,251 @@ def data_search(): section_data['filters']['ignoreUnknownSubs'] = bool(app.IGNORE_UND_SUBS) return section_data + + @staticmethod + def data_notifiers(): + """Notifications.""" + section_data = NonEmptyDict() + + section_data['kodi'] = NonEmptyDict() + section_data['kodi']['enabled'] = bool(app.USE_KODI) + section_data['kodi']['alwaysOn'] = bool(app.KODI_ALWAYS_ON) + section_data['kodi']['notifyOnSnatch'] = bool(app.KODI_NOTIFY_ONSNATCH) + section_data['kodi']['notifyOnDownload'] = bool(app.KODI_NOTIFY_ONDOWNLOAD) + section_data['kodi']['notifyOnSubtitleDownload'] = bool(app.KODI_NOTIFY_ONSUBTITLEDOWNLOAD) + section_data['kodi']['update'] = NonEmptyDict() + section_data['kodi']['update']['library'] = bool(app.KODI_UPDATE_LIBRARY) + section_data['kodi']['update']['full'] = bool(app.KODI_UPDATE_FULL) + section_data['kodi']['update']['onlyFirst'] = bool(app.KODI_UPDATE_ONLYFIRST) + section_data['kodi']['host'] = app.KODI_HOST + section_data['kodi']['username'] = app.KODI_USERNAME + section_data['kodi']['password'] = app.KODI_PASSWORD + section_data['kodi']['libraryCleanPending'] = bool(app.KODI_LIBRARY_CLEAN_PENDING) + section_data['kodi']['cleanLibrary'] = bool(app.KODI_CLEAN_LIBRARY) + + section_data['plex'] = NonEmptyDict() + section_data['plex']['server'] = NonEmptyDict() + section_data['plex']['server']['enabled'] = bool(app.USE_PLEX_SERVER) + section_data['plex']['server']['updateLibrary'] = bool(app.PLEX_UPDATE_LIBRARY) + section_data['plex']['server']['host'] = app.PLEX_SERVER_HOST + section_data['plex']['server']['https'] = bool(app.PLEX_SERVER_HTTPS) + section_data['plex']['server']['username'] = app.PLEX_SERVER_USERNAME + section_data['plex']['server']['password'] = app.PLEX_SERVER_PASSWORD + section_data['plex']['server']['token'] = app.PLEX_SERVER_TOKEN + section_data['plex']['client'] = NonEmptyDict() + section_data['plex']['client']['enabled'] = bool(app.USE_PLEX_CLIENT) + section_data['plex']['client']['username'] = app.PLEX_CLIENT_USERNAME + section_data['plex']['client']['host'] = app.PLEX_CLIENT_HOST + section_data['plex']['client']['notifyOnSnatch'] = bool(app.PLEX_NOTIFY_ONSNATCH) + section_data['plex']['client']['notifyOnDownload'] = bool(app.PLEX_NOTIFY_ONDOWNLOAD) + section_data['plex']['client']['notifyOnSubtitleDownload'] = bool(app.PLEX_NOTIFY_ONSUBTITLEDOWNLOAD) + + section_data['emby'] = NonEmptyDict() + section_data['emby']['enabled'] = bool(app.USE_EMBY) + section_data['emby']['host'] = app.EMBY_HOST + section_data['emby']['apiKey'] = app.EMBY_APIKEY + + section_data['nmj'] = NonEmptyDict() + section_data['nmj']['enabled'] = bool(app.USE_NMJ) + section_data['nmj']['host'] = app.NMJ_HOST + section_data['nmj']['database'] = app.NMJ_DATABASE + section_data['nmj']['mount'] = app.NMJ_MOUNT + + section_data['nmjv2'] = NonEmptyDict() + section_data['nmjv2']['enabled'] = bool(app.USE_NMJv2) + section_data['nmjv2']['host'] = app.NMJv2_HOST + section_data['nmjv2']['dbloc'] = app.NMJv2_DBLOC + section_data['nmjv2']['database'] = app.NMJv2_DATABASE + + section_data['synologyIndex'] = NonEmptyDict() + section_data['synologyIndex']['enabled'] = bool(app.USE_SYNOINDEX) + + section_data['synology'] = NonEmptyDict() + section_data['synology']['enabled'] = bool(app.USE_SYNOLOGYNOTIFIER) + section_data['synology']['notifyOnSnatch'] = bool(app.SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH) + section_data['synology']['notifyOnDownload'] = bool(app.SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD) + section_data['synology']['notifyOnSubtitleDownload'] = bool(app.SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD) + + section_data['pyTivo'] = NonEmptyDict() + section_data['pyTivo']['enabled'] = bool(app.USE_PYTIVO) + section_data['pyTivo']['host'] = app.PYTIVO_HOST + section_data['pyTivo']['name'] = app.PYTIVO_TIVO_NAME + section_data['pyTivo']['shareName'] = app.PYTIVO_SHARE_NAME + + section_data['growl'] = NonEmptyDict() + section_data['growl']['enabled'] = bool(app.USE_GROWL) + section_data['growl']['host'] = app.GROWL_HOST + section_data['growl']['password'] = app.GROWL_PASSWORD + section_data['growl']['notifyOnSnatch'] = bool(app.GROWL_NOTIFY_ONSNATCH) + section_data['growl']['notifyOnDownload'] = bool(app.GROWL_NOTIFY_ONDOWNLOAD) + section_data['growl']['notifyOnSubtitleDownload'] = bool(app.GROWL_NOTIFY_ONSUBTITLEDOWNLOAD) + + section_data['prowl'] = NonEmptyDict() + section_data['prowl']['enabled'] = bool(app.USE_PROWL) + section_data['prowl']['api'] = app.PROWL_API + section_data['prowl']['messageTitle'] = app.PROWL_MESSAGE_TITLE + section_data['prowl']['priority'] = int(app.PROWL_PRIORITY) + section_data['prowl']['notifyOnSnatch'] = bool(app.PROWL_NOTIFY_ONSNATCH) + section_data['prowl']['notifyOnDownload'] = bool(app.PROWL_NOTIFY_ONDOWNLOAD) + section_data['prowl']['notifyOnSubtitleDownload'] = bool(app.PROWL_NOTIFY_ONSUBTITLEDOWNLOAD) + + section_data['libnotify'] = NonEmptyDict() + section_data['libnotify']['enabled'] = bool(app.USE_LIBNOTIFY) + section_data['libnotify']['notifyOnSnatch'] = bool(app.LIBNOTIFY_NOTIFY_ONSNATCH) + section_data['libnotify']['notifyOnDownload'] = bool(app.LIBNOTIFY_NOTIFY_ONDOWNLOAD) + section_data['libnotify']['notifyOnSubtitleDownload'] = bool(app.LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD) + + section_data['pushover'] = NonEmptyDict() + section_data['pushover']['enabled'] = bool(app.USE_PUSHOVER) + section_data['pushover']['apiKey'] = app.PUSHOVER_APIKEY + section_data['pushover']['userKey'] = app.PUSHOVER_USERKEY + section_data['pushover']['device'] = app.PUSHOVER_DEVICE + section_data['pushover']['sound'] = app.PUSHOVER_SOUND + section_data['pushover']['priority'] = int(app.PUSHOVER_PRIORITY) + section_data['pushover']['notifyOnSnatch'] = bool(app.PUSHOVER_NOTIFY_ONSNATCH) + section_data['pushover']['notifyOnDownload'] = bool(app.PUSHOVER_NOTIFY_ONDOWNLOAD) + section_data['pushover']['notifyOnSubtitleDownload'] = bool(app.PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD) + + section_data['boxcar2'] = NonEmptyDict() + section_data['boxcar2']['enabled'] = bool(app.USE_BOXCAR2) + section_data['boxcar2']['notifyOnSnatch'] = bool(app.BOXCAR2_NOTIFY_ONSNATCH) + section_data['boxcar2']['notifyOnDownload'] = bool(app.BOXCAR2_NOTIFY_ONDOWNLOAD) + section_data['boxcar2']['notifyOnSubtitleDownload'] = bool(app.BOXCAR2_NOTIFY_ONSUBTITLEDOWNLOAD) + section_data['boxcar2']['accessToken'] = app.BOXCAR2_ACCESSTOKEN + + section_data['pushalot'] = NonEmptyDict() + section_data['pushalot']['enabled'] = bool(app.USE_PUSHALOT) + section_data['pushalot']['notifyOnSnatch'] = bool(app.PUSHALOT_NOTIFY_ONSNATCH) + section_data['pushalot']['notifyOnDownload'] = bool(app.PUSHALOT_NOTIFY_ONDOWNLOAD) + section_data['pushalot']['notifyOnSubtitleDownload'] = bool(app.PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD) + section_data['pushalot']['authToken'] = app.PUSHALOT_AUTHORIZATIONTOKEN + + section_data['pushbullet'] = NonEmptyDict() + section_data['pushbullet']['enabled'] = bool(app.USE_PUSHBULLET) + section_data['pushbullet']['notifyOnSnatch'] = bool(app.PUSHBULLET_NOTIFY_ONSNATCH) + section_data['pushbullet']['notifyOnDownload'] = bool(app.PUSHBULLET_NOTIFY_ONDOWNLOAD) + section_data['pushbullet']['notifyOnSubtitleDownload'] = bool(app.PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD) + section_data['pushbullet']['api'] = app.PUSHBULLET_API + section_data['pushbullet']['device'] = app.PUSHBULLET_DEVICE + + section_data['join'] = NonEmptyDict() + section_data['join']['enabled'] = bool(app.USE_JOIN) + section_data['join']['notifyOnSnatch'] = bool(app.JOIN_NOTIFY_ONSNATCH) + section_data['join']['notifyOnDownload'] = bool(app.JOIN_NOTIFY_ONDOWNLOAD) + section_data['join']['notifyOnSubtitleDownload'] = bool(app.JOIN_NOTIFY_ONSUBTITLEDOWNLOAD) + section_data['join']['api'] = app.JOIN_API + section_data['join']['device'] = app.JOIN_DEVICE + + section_data['freemobile'] = NonEmptyDict() + section_data['freemobile']['enabled'] = bool(app.USE_FREEMOBILE) + section_data['freemobile']['notifyOnSnatch'] = bool(app.FREEMOBILE_NOTIFY_ONSNATCH) + section_data['freemobile']['notifyOnDownload'] = bool(app.FREEMOBILE_NOTIFY_ONDOWNLOAD) + section_data['freemobile']['notifyOnSubtitleDownload'] = bool(app.FREEMOBILE_NOTIFY_ONSUBTITLEDOWNLOAD) + section_data['freemobile']['api'] = app.FREEMOBILE_APIKEY + section_data['freemobile']['id'] = app.FREEMOBILE_ID + + section_data['telegram'] = NonEmptyDict() + section_data['telegram']['enabled'] = bool(app.USE_TELEGRAM) + section_data['telegram']['notifyOnSnatch'] = bool(app.TELEGRAM_NOTIFY_ONSNATCH) + section_data['telegram']['notifyOnDownload'] = bool(app.TELEGRAM_NOTIFY_ONDOWNLOAD) + section_data['telegram']['notifyOnSubtitleDownload'] = bool(app.TELEGRAM_NOTIFY_ONSUBTITLEDOWNLOAD) + section_data['telegram']['api'] = app.TELEGRAM_APIKEY + section_data['telegram']['id'] = app.TELEGRAM_ID + + section_data['twitter'] = NonEmptyDict() + section_data['twitter']['enabled'] = bool(app.USE_TWITTER) + section_data['twitter']['notifyOnSnatch'] = bool(app.TWITTER_NOTIFY_ONSNATCH) + section_data['twitter']['notifyOnDownload'] = bool(app.TWITTER_NOTIFY_ONDOWNLOAD) + section_data['twitter']['notifyOnSubtitleDownload'] = bool(app.TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD) + section_data['twitter']['dmto'] = app.TWITTER_DMTO + section_data['twitter']['username'] = app.TWITTER_USERNAME + section_data['twitter']['password'] = app.TWITTER_PASSWORD + section_data['twitter']['prefix'] = app.TWITTER_PREFIX + section_data['twitter']['directMessage'] = bool(app.TWITTER_USEDM) + + section_data['trakt'] = NonEmptyDict() + section_data['trakt']['enabled'] = bool(app.USE_TRAKT) + section_data['trakt']['pinUrl'] = app.TRAKT_PIN_URL + section_data['trakt']['username'] = app.TRAKT_USERNAME + section_data['trakt']['accessToken'] = app.TRAKT_ACCESS_TOKEN + section_data['trakt']['timeout'] = int(app.TRAKT_TIMEOUT) + section_data['trakt']['defaultIndexer'] = int(app.TRAKT_DEFAULT_INDEXER) + section_data['trakt']['sync'] = bool(app.TRAKT_SYNC) + section_data['trakt']['syncRemove'] = bool(app.TRAKT_SYNC_REMOVE) + section_data['trakt']['syncWatchlist'] = bool(app.TRAKT_SYNC_WATCHLIST) + section_data['trakt']['methodAdd'] = int(app.TRAKT_METHOD_ADD) + section_data['trakt']['removeWatchlist'] = bool(app.TRAKT_REMOVE_WATCHLIST) + section_data['trakt']['removeSerieslist'] = bool(app.TRAKT_REMOVE_SERIESLIST) + section_data['trakt']['removeShowFromApplication'] = bool(app.TRAKT_REMOVE_SHOW_FROM_APPLICATION) + section_data['trakt']['startPaused'] = bool(app.TRAKT_START_PAUSED) + section_data['trakt']['blacklistName'] = app.TRAKT_BLACKLIST_NAME + + section_data['email'] = NonEmptyDict() + section_data['email']['enabled'] = bool(app.USE_EMAIL) + section_data['email']['notifyOnSnatch'] = bool(app.EMAIL_NOTIFY_ONSNATCH) + section_data['email']['notifyOnDownload'] = bool(app.EMAIL_NOTIFY_ONDOWNLOAD) + section_data['email']['notifyOnSubtitleDownload'] = bool(app.EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD) + section_data['email']['host'] = app.EMAIL_HOST + section_data['email']['port'] = app.EMAIL_PORT + section_data['email']['from'] = app.EMAIL_FROM + section_data['email']['tls'] = bool(app.EMAIL_TLS) + section_data['email']['username'] = app.EMAIL_USER + section_data['email']['password'] = app.EMAIL_PASSWORD + section_data['email']['addressList'] = app.EMAIL_LIST + section_data['email']['subject'] = app.EMAIL_SUBJECT + + section_data['slack'] = NonEmptyDict() + section_data['slack']['enabled'] = bool(app.USE_SLACK) + section_data['slack']['notifyOnSnatch'] = bool(app.SLACK_NOTIFY_SNATCH) + section_data['slack']['notifyOnDownload'] = bool(app.SLACK_NOTIFY_DOWNLOAD) + section_data['slack']['notifyOnSubtitleDownload'] = bool(app.SLACK_NOTIFY_SUBTITLEDOWNLOAD) + section_data['slack']['webhook'] = app.SLACK_WEBHOOK + + return section_data + + @staticmethod + def data_clients(): + """Notifications.""" + section_data = NonEmptyDict() + + section_data['torrents'] = NonEmptyDict() + section_data['torrents']['authType'] = app.TORRENT_AUTH_TYPE + section_data['torrents']['dir'] = app.TORRENT_DIR + section_data['torrents']['enabled'] = bool(app.USE_TORRENTS) + section_data['torrents']['highBandwidth'] = bool(app.TORRENT_HIGH_BANDWIDTH) + section_data['torrents']['host'] = app.TORRENT_HOST + section_data['torrents']['label'] = app.TORRENT_LABEL + section_data['torrents']['labelAnime'] = app.TORRENT_LABEL_ANIME + section_data['torrents']['method'] = app.TORRENT_METHOD + section_data['torrents']['path'] = app.TORRENT_PATH + section_data['torrents']['paused'] = bool(app.TORRENT_PAUSED) + section_data['torrents']['rpcurl'] = app.TORRENT_RPCURL + section_data['torrents']['seedLocation'] = app.TORRENT_SEED_LOCATION + section_data['torrents']['seedTime'] = app.TORRENT_SEED_TIME + section_data['torrents']['username'] = app.TORRENT_USERNAME + section_data['torrents']['verifySSL'] = bool(app.TORRENT_VERIFY_CERT) + + section_data['nzb'] = NonEmptyDict() + section_data['nzb']['enabled'] = bool(app.USE_NZBS) + section_data['nzb']['dir'] = app.NZB_DIR + section_data['nzb']['method'] = app.NZB_METHOD + section_data['nzb']['nzbget'] = NonEmptyDict() + section_data['nzb']['nzbget']['category'] = app.NZBGET_CATEGORY + section_data['nzb']['nzbget']['categoryAnime'] = app.NZBGET_CATEGORY_ANIME + section_data['nzb']['nzbget']['categoryAnimeBacklog'] = app.NZBGET_CATEGORY_ANIME_BACKLOG + section_data['nzb']['nzbget']['categoryBacklog'] = app.NZBGET_CATEGORY_BACKLOG + section_data['nzb']['nzbget']['host'] = app.NZBGET_HOST + section_data['nzb']['nzbget']['priority'] = int(app.NZBGET_PRIORITY) + section_data['nzb']['nzbget']['useHttps'] = bool(app.NZBGET_USE_HTTPS) + section_data['nzb']['nzbget']['username'] = app.NZBGET_USERNAME + + section_data['nzb']['sabnzbd'] = NonEmptyDict() + section_data['nzb']['sabnzbd']['category'] = app.SAB_CATEGORY + section_data['nzb']['sabnzbd']['categoryAnime'] = app.SAB_CATEGORY_ANIME + section_data['nzb']['sabnzbd']['categoryAnimeBacklog'] = app.SAB_CATEGORY_ANIME_BACKLOG + section_data['nzb']['sabnzbd']['categoryBacklog'] = app.SAB_CATEGORY_BACKLOG + section_data['nzb']['sabnzbd']['forced'] = bool(app.SAB_FORCED) + section_data['nzb']['sabnzbd']['host'] = app.SAB_HOST + section_data['nzb']['sabnzbd']['username'] = app.SAB_USERNAME + + return section_data diff --git a/medusa/server/api/v2/episodes.py b/medusa/server/api/v2/episodes.py index b682decda1..f3a340460f 100644 --- a/medusa/server/api/v2/episodes.py +++ b/medusa/server/api/v2/episodes.py @@ -4,6 +4,7 @@ import logging +from medusa.helper.exceptions import EpisodeDeletedException from medusa.logger.adapters.style import BraceAdapter from medusa.server.api.v2.base import ( BaseRequestHandler, @@ -36,7 +37,7 @@ class EpisodeHandler(BaseRequestHandler): #: path param path_param = ('path_param', r'\w+') #: allowed HTTP methods - allowed_methods = ('GET', 'PATCH', ) + allowed_methods = ('GET', 'PATCH', 'DELETE',) def http_get(self, series_slug, episode_slug, path_param): """Query episode information. @@ -156,3 +157,31 @@ def _patch_episode(episode, data): ) return accepted + + def http_delete(self, series_slug, episode_slug, **kwargs): + """Delete the episode.""" + if not series_slug: + return self._method_not_allowed('Deleting multiple series are not allowed') + + identifier = SeriesIdentifier.from_slug(series_slug) + if not identifier: + return self._bad_request('Invalid series identifier') + + series = Series.find_by_identifier(identifier) + if not series: + return self._not_found('Series not found') + + episode_number = EpisodeNumber.from_slug(episode_slug) + if not episode_number: + return self._bad_request('Invalid episode number') + + episode = Episode.find_by_series_and_episode(series, episode_number) + if not episode: + return self._not_found('Episode not found') + + try: + episode.delete_episode() + except EpisodeDeletedException: + return self._no_content() + else: + return self._conflict('Unable to delete episode') diff --git a/medusa/server/web/config/notifications.py b/medusa/server/web/config/notifications.py index b9e7c6f8ab..8edd2cd9b2 100644 --- a/medusa/server/web/config/notifications.py +++ b/medusa/server/web/config/notifications.py @@ -55,9 +55,9 @@ def saveNotifications(self, use_kodi=None, kodi_always_on=None, kodi_notify_onsn use_boxcar2=None, boxcar2_notify_onsnatch=None, boxcar2_notify_ondownload=None, boxcar2_notify_onsubtitledownload=None, boxcar2_accesstoken=None, use_pushover=None, pushover_notify_onsnatch=None, pushover_notify_ondownload=None, - pushover_notify_onsubtitledownload=None, pushover_userkey=None, pushover_apikey=None, pushover_device=None, pushover_sound=None, - use_libnotify=None, libnotify_notify_onsnatch=None, libnotify_notify_ondownload=None, - libnotify_notify_onsubtitledownload=None, + pushover_notify_onsubtitledownload=None, pushover_userkey=None, pushover_apikey=None, pushover_device=None, + pushover_sound=None, pushover_priority=None, use_libnotify=None, libnotify_notify_onsnatch=None, + libnotify_notify_ondownload=None, libnotify_notify_onsubtitledownload=None, use_nmj=None, nmj_host=None, nmj_database=None, nmj_mount=None, use_synoindex=None, use_nmjv2=None, nmjv2_host=None, nmjv2_dbloc=None, nmjv2_database=None, use_trakt=None, trakt_username=None, trakt_pin=None, @@ -74,6 +74,8 @@ def saveNotifications(self, use_kodi=None, kodi_always_on=None, kodi_notify_onsn use_pushbullet=None, pushbullet_notify_onsnatch=None, pushbullet_notify_ondownload=None, pushbullet_notify_onsubtitledownload=None, pushbullet_api=None, pushbullet_device=None, pushbullet_device_list=None, + use_join=None, join_notify_onsnatch=None, join_notify_ondownload=None, + join_notify_onsubtitledownload=None, join_api=None, join_device=None, use_email=None, email_notify_onsnatch=None, email_notify_ondownload=None, email_notify_onsubtitledownload=None, email_host=None, email_port=25, email_from=None, email_tls=None, email_user=None, email_password=None, email_list=None, email_subject=None, email_show_list=None, @@ -171,6 +173,7 @@ def saveNotifications(self, use_kodi=None, kodi_always_on=None, kodi_notify_onsn app.PUSHOVER_APIKEY = pushover_apikey app.PUSHOVER_DEVICE = [_.strip() for _ in pushover_device.split(',')] app.PUSHOVER_SOUND = pushover_sound + app.PUSHOVER_PRIORITY = pushover_priority app.USE_LIBNOTIFY = config.checkbox_to_value(use_libnotify) app.LIBNOTIFY_NOTIFY_ONSNATCH = config.checkbox_to_value(libnotify_notify_onsnatch) @@ -251,6 +254,13 @@ def saveNotifications(self, use_kodi=None, kodi_always_on=None, kodi_notify_onsn app.PUSHBULLET_API = pushbullet_api app.PUSHBULLET_DEVICE = pushbullet_device_list + app.USE_JOIN = config.checkbox_to_value(use_join) + app.JOIN_NOTIFY_ONSNATCH = config.checkbox_to_value(join_notify_onsnatch) + app.JOIN_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(join_notify_ondownload) + app.JOIN_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(join_notify_onsubtitledownload) + app.JOIN_API = join_api + app.JOIN_DEVICE = join_device + app.instance.save_config() if results: diff --git a/medusa/server/web/home/add_shows.py b/medusa/server/web/home/add_shows.py index 2953908f92..04ab202b36 100644 --- a/medusa/server/web/home/add_shows.py +++ b/medusa/server/web/home/add_shows.py @@ -24,7 +24,6 @@ from medusa.show.show import Show from requests import RequestException -from requests.compat import unquote_plus from simpleanidb import REQUEST_HOT @@ -532,7 +531,7 @@ def addExistingShows(self, shows_to_add=None, promptForSettings=None): elif not isinstance(shows_to_add, list): shows_to_add = [shows_to_add] - shows_to_add = [unquote_plus(x) for x in shows_to_add] + shows_to_add = [text_type(x, 'utf-8') if not isinstance(x, text_type) else x for x in shows_to_add] prompt_for_settings = config.checkbox_to_value(prompt_for_settings) diff --git a/medusa/server/web/home/handler.py b/medusa/server/web/home/handler.py index a050605d11..b8ba3599c7 100644 --- a/medusa/server/web/home/handler.py +++ b/medusa/server/web/home/handler.py @@ -2,7 +2,6 @@ from __future__ import unicode_literals -import ast import json import os import time @@ -82,6 +81,7 @@ set_scene_numbering, xem_refresh, ) +from medusa.search import SearchType from medusa.search.manual import ( SEARCH_STATUS_FINISHED, SEARCH_STATUS_QUEUED, @@ -94,7 +94,7 @@ BacklogQueueItem, FailedQueueItem, ForcedSearchQueueItem, - ManualSnatchQueueItem, + SnatchQueueItem, ) from medusa.server.web.core import ( PageTemplate, @@ -104,6 +104,7 @@ from medusa.show.show import Show from medusa.system.restart import Restart from medusa.system.shutdown import Shutdown +from medusa.tv.series import Series, SeriesIdentifier from medusa.version_checker import CheckVersion from requests.compat import ( @@ -564,30 +565,20 @@ def forceTraktSync(): @staticmethod def loadShowNotifyLists(): - main_db_con = db.DBConnection() - rows = main_db_con.select( - 'SELECT show_id, show_name, notify_list ' - 'FROM tv_shows ' - 'ORDER BY show_name ASC' - ) - data = {} size = 0 - for r in rows: + for show in app.showList: notify_list = { 'emails': '', 'prowlAPIs': '', } - if r['notify_list']: - # First, handle legacy format (emails only) - if not r['notify_list'][0] == '{': - notify_list['emails'] = r['notify_list'] - else: - notify_list = dict(ast.literal_eval(r['notify_list'])) + if show.notify_list: + notify_list = show.notify_list - data[r['show_id']] = { - 'id': r['show_id'], - 'name': r['show_name'], + data[show.identifier.slug] = { + 'id': show.show_id, + 'name': show.name, + 'slug': show.identifier.slug, 'list': notify_list['emails'], 'prowl_notify_list': notify_list['prowlAPIs'] } @@ -598,42 +589,23 @@ def loadShowNotifyLists(): @staticmethod def saveShowNotifyList(show=None, emails=None, prowlAPIs=None): entries = {'emails': '', 'prowlAPIs': ''} - main_db_con = db.DBConnection() - # Get current data - sql_results = main_db_con.select( - 'SELECT notify_list ' - 'FROM tv_shows ' - 'WHERE show_id = ?', - [show] - ) - for subs in sql_results: - if subs['notify_list']: - # First, handle legacy format (emails only) - if not subs['notify_list'][0] == '{': - entries['emails'] = subs['notify_list'] - else: - entries = dict(ast.literal_eval(subs['notify_list'])) + series_identifier = SeriesIdentifier.from_slug(show) + series_obj = Series.find_by_identifier(series_identifier) + + if series_obj: + if series_obj.notify_list: + entries = series_obj.notify_list if emails is not None: entries['emails'] = emails - if not main_db_con.action( - 'UPDATE tv_shows ' - 'SET notify_list = ? ' - 'WHERE show_id = ?', - [str(entries), show] - ): - return 'ERROR' + series_obj.notify_list = entries if prowlAPIs is not None: entries['prowlAPIs'] = prowlAPIs - if not main_db_con.action( - 'UPDATE tv_shows ' - 'SET notify_list = ? ' - 'WHERE show_id = ?', - [str(entries), show] - ): - return 'ERROR' + series_obj.notify_list = entries + + series_obj.save_to_db() return 'OK' @@ -661,6 +633,14 @@ def testPushbullet(api=None): else: return 'Error sending Pushbullet notification: {0}'.format(result.get('error')) + @staticmethod + def testJoin(api=None, device=None): + result = notifiers.join_notifier.test_notify(api, device) + if result.get('success'): + return 'Join notification succeeded. Check your device to make sure it worked' + else: + return 'Error sending Join notification: {0}'.format(result.get('error')) + @staticmethod def getPushbulletDevices(api=None): result = notifiers.pushbullet_notifier.get_devices(api) @@ -1032,8 +1012,15 @@ def pickManualSearch(self, provider=None, rowid=None): else: ep_objs.extend(series_obj.get_all_episodes([int(cached_result['season'])])) + search_result = providers.get_provider_class(provider).get_result(ep_objs) + + # Map the db fields to result attributes + search_result.update_from_db(series_obj, ep_objs, cached_result) + + search_result.search_type = SearchType.MANUAL_SEARCH + # Create the queue item - snatch_queue_item = ManualSnatchQueueItem(series_obj, ep_objs, provider, cached_result) + snatch_queue_item = SnatchQueueItem(search_result.series, search_result.episodes, search_result) # Add the queue item to the queue app.manual_snatch_scheduler.action.add_item(snatch_queue_item) @@ -1410,7 +1397,6 @@ def editShow(self, indexername=None, seriesid=None, location=None, allowed_quali preferred_qualities = preferred_qualities or [] exceptions = exceptions_list or set() - anidb_failed = False errors = 0 if not indexername or not seriesid: diff --git a/medusa/server/web/home/post_process.py b/medusa/server/web/home/post_process.py index 1418de6798..79dad634ed 100644 --- a/medusa/server/web/home/post_process.py +++ b/medusa/server/web/home/post_process.py @@ -3,11 +3,10 @@ from __future__ import unicode_literals from medusa import process_tv -from medusa.helper.encoding import ss from medusa.server.web.core import PageTemplate from medusa.server.web.home.handler import Home -from six import string_types +from six import string_types, text_type from tornroutes import route @@ -38,12 +37,19 @@ def argToBool(argument): return argument + def _decode(value): + if not value or isinstance(value, text_type): + return value + + return text_type(value, 'utf-8') + if not proc_dir: return self.redirect('/home/postprocess/') else: - resource_name = ss(nzbName) if nzbName else None + proc_dir = _decode(proc_dir) + resource_name = _decode(nzbName) - result = process_tv.ProcessResult(ss(proc_dir), process_method=process_method).process( + result = process_tv.ProcessResult(proc_dir, process_method=process_method).process( resource_name=resource_name, force=argToBool(force), is_priority=argToBool(is_priority), delete_on=argToBool(delete_on), failed=argToBool(failed), proc_type=type, ignore_subs=argToBool(ignore_subs) diff --git a/medusa/tv/episode.py b/medusa/tv/episode.py index 8932ebd394..f08ed41b0b 100644 --- a/medusa/tv/episode.py +++ b/medusa/tv/episode.py @@ -95,8 +95,8 @@ class EpisodeNumber(Identifier): date_fmt = '%Y-%m-%d' regex = re.compile(r'\b(?:(?P\d{4}-\d{2}-\d{2})|' - r'(?:s(?P\d{1,4}))(?:e(?P\d{1,2}))|' - r'(?:e(?P\d{1,3})))\b', re.IGNORECASE) + r'(?:s(?P\d{1,4}))(?:e(?P\d{1,4}))|' + r'(?:e(?P\d{1,4})))\b', re.IGNORECASE) @classmethod def from_slug(cls, slug): @@ -503,7 +503,7 @@ def download_subtitles(self, lang=None): episode_num(self.season, self.episode, numbering='absolute')), } ) - notifiers.notify_subtitle_download(self.pretty_name(), subtitle_list) + notifiers.notify_subtitle_download(self, subtitle_list) else: log.info( '{id}: No subtitles found for {series} {ep}', { @@ -1374,6 +1374,21 @@ def pretty_name(self): return self._format_pattern('%SN - S%0SE%0E - %EN') + def pretty_name_with_quality(self): + """Return the name of this episode in a "pretty" human-readable format, with quality information. + + Used for notifications. + + :return: A string representing the episode's name, season/ep numbers and quality + :rtype: str + """ + if self.series.anime and not self.series.scene: + return self._format_pattern('%SN - %AB - %EN - %QN') + elif self.series.air_by_date: + return self._format_pattern('%SN - %AD - %EN - %QN') + + return self._format_pattern('%SN - %Sx%0E - %EN - %QN') + def __ep_name(self): """Return the name of the episode to use during renaming. diff --git a/medusa/tv/series.py b/medusa/tv/series.py index c59aaab4da..7e992d942b 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -3,6 +3,7 @@ """Series classes.""" from __future__ import unicode_literals +import ast import copy import datetime import glob @@ -97,6 +98,7 @@ ) from medusa.sbdatetime import sbdatetime from medusa.scene_exceptions import get_scene_exceptions, update_scene_exceptions +from medusa.scene_numbering import get_xem_numbering_for_show from medusa.show.show import Show from medusa.subtitles import ( code_from_code, @@ -196,6 +198,7 @@ def __init__(self, indexer, indexerid, lang='', quality=None, """ super(Series, self).__init__(indexer, indexerid, {'episodes', 'next_aired', 'release_groups', 'exceptions', 'external', 'imdb_info'}) + self.show_id = None self.name = '' self.imdb_id = '' self.network = '' @@ -211,6 +214,7 @@ def __init__(self, indexer, indexerid, lang='', quality=None, self.paused = 0 self.air_by_date = 0 self.subtitles = enabled_subtitles or int(app.SUBTITLES_DEFAULT) + self.notify_list = {} self.dvd_order = 0 self.lang = lang self.last_update_indexer = 1 @@ -554,6 +558,11 @@ def aliases(self, exceptions): update_scene_exceptions(self, exceptions) build_name_cache(self) + @property + def xem_numbering(self): + """Return series episode xem numbering.""" + return get_xem_numbering_for_show(self) + @property def release_ignore_words(self): """Return release ignore words.""" @@ -1299,7 +1308,7 @@ def make_ep_from_file(self, filepath): parse_result = NameParser(series=self, try_indexers=True, parse_method=( 'normal', 'anime')[self.is_anime]).parse(filepath) except (InvalidNameException, InvalidShowException) as error: - log.debug(u'{indexerid}: {error}', + log.debug(u'{indexer_id}: {error}', {'indexer_id': self.series_id, 'error': error}) return None @@ -1383,6 +1392,7 @@ def _load_from_db(self): {'id': self.series_id}) return else: + self.show_id = int(sql_results[0]['show_id'] or 0) self.indexer = int(sql_results[0]['indexer'] or 0) if not self.name: @@ -1410,6 +1420,7 @@ def _load_from_db(self): self.sports = int(sql_results[0]['sports'] or 0) self.scene = int(sql_results[0]['scene'] or 0) self.subtitles = int(sql_results[0]['subtitles'] or 0) + self.notify_list = dict(ast.literal_eval(sql_results[0]['notify_list'] or '{}')) self.dvd_order = int(sql_results[0]['dvdorder'] or 0) self.quality = int(sql_results[0]['quality'] or Quality.NA) self.season_folders = int(not (sql_results[0]['flatten_folders'] or 0)) # TODO: Rename this in the DB diff --git a/medusa/version_checker.py b/medusa/version_checker.py index b781895341..45c5262ac7 100644 --- a/medusa/version_checker.py +++ b/medusa/version_checker.py @@ -365,6 +365,11 @@ def get_branch(self): class UpdateManager(object): + def __init__(self): + """Update manager initialization.""" + # Initialize the app.RUNS_IN_DOCKER variable + self.runs_in_docker() + @staticmethod def get_github_org(): return app.GIT_ORG @@ -377,9 +382,52 @@ def get_github_repo(): def get_update_url(): return app.WEB_ROOT + '/home/update/?pid=' + str(app.PID) + @staticmethod + def runs_in_docker(): + """ + Check if Medusa is run in a docker container. + + If run in a container, we don't want to use the auto update feature, but just want to inform the user + there is an update available. The user can update through getting the latest docker tag. + """ + if app.RUNS_IN_DOCKER is not None: + return app.RUNS_IN_DOCKER + + path = '/proc/{pid}/cgroup'.format(pid=os.getpid()) + try: + if not os.path.isfile(path): + return False + + with open(path) as f: + for line in f: + if re.match(r'\d+:[\w=]+:/docker(-[ce]e)?/\w+', line): + log.debug(u'Running in a docker container') + app.RUNS_IN_DOCKER = True + return True + return False + except (EnvironmentError, OSError) as error: + log.info(u'Tried to check the path {path} if we are running in a docker container, ' + u'but an error occurred: {error}', {'path': path, 'error': error}) + return False + + def set_newest_text_docker(self): + """ + Set an alternative update text, when running in a docker container. + + This method is used by the GitUpdateMananager and the SourceUpdateManager. Both should not auto update from + within the container. + """ + if app.RUNS_IN_DOCKER and (not self._cur_commit_hash or self._num_commits_behind > 0): + log.debug(u'There is an update available, Medusa is running in a docker container, so auto updating is disabled.') + app.NEWEST_VERSION_STRING = 'There is an update available: please pull the latest docker image, ' \ + 'and rebuild your container to update' + return True + return False + class GitUpdateManager(UpdateManager): def __init__(self): + super(GitUpdateManager, self).__init__() self._git_path = self._find_working_git() self.github_org = self.get_github_org() self.github_repo = self.get_github_repo() @@ -603,6 +651,9 @@ def set_newest_text(self): # if we're up to date then don't set this app.NEWEST_VERSION_STRING = None + if self.set_newest_text_docker(): + return + if self._num_commits_behind > 0 or self._is_hard_reset_allowed(): base_url = 'http://github.com/' + self.github_org + '/' + self.github_repo @@ -754,6 +805,7 @@ def update_remote_origin(self): class SourceUpdateManager(UpdateManager): def __init__(self): + super(SourceUpdateManager, self).__init__() self.github_org = self.get_github_org() self.github_repo = self.get_github_repo() @@ -865,6 +917,9 @@ def set_newest_text(self): # if we're up to date then don't set this app.NEWEST_VERSION_STRING = None + if self.set_newest_text_docker(): + return + if not self._cur_commit_hash: log.debug(u"Unknown current version number, don't know if we should update or not") diff --git a/requirements.txt b/requirements.txt index 3b96e86801..88f9df863a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,7 +14,7 @@ dirtyjson==1.0.7 dogpile.cache==0.6.7 git+https://github.com/pymedusa/enzyme.git@665cf6948aab1c249dcc99bd9624a81d17b3302a#egg=enzyme git+https://github.com/kurtmckee/feedparser.git@2b11c8028321ed43cbaf313f83b0c94820143d66#egg=feedparser -future==0.16.0 +future==0.17.1 futures==3.2.0 ; python_version >= '2.6' and python_version < '3' gntp==1.0.3 guessit==3.0.3 @@ -27,17 +27,17 @@ Mako==1.0.7 markdown2==2.3.6 git+https://github.com/joestump/python-oauth2.git@6689960ca23f79eccf9a25a39e93f6540f44ca23#egg=oauth2 profilehooks==1.10.0 -PyGithub==1.43.2 +PyGithub==1.43.3 PyJWT==1.6.4 -python-dateutil==2.7.3 -python-twitter==3.4.2 +python-dateutil==2.7.5 +python-twitter==3.5 rarfile==3.0 rebulk==1.0.0 -requests==2.20.0 +requests==2.20.1 requests-oauthlib==1.0.0 six==1.11.0 -stevedore==1.29.0 +stevedore==1.30.0 git+https://github.com/pymedusa/subliminal.git@78687f45d23b1bc47fae0a5493be0198dc1fd5b5#egg=subliminal tornado==5.1.1 tornroutes==0.5.1 -validators==0.12.2 +validators==0.12.3 diff --git a/setup.cfg b/setup.cfg index 673d658aa9..9cbeccdc5e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -96,7 +96,6 @@ flake8-ignore = medusa/notifiers/prowl.py D100 D101 D102 E501 medusa/notifiers/pushalot.py D100 D101 D102 N802 medusa/notifiers/pushbullet.py D100 D101 D102 N802 - medusa/notifiers/pushover.py D100 D101 D102 D202 D400 D401 E225 N802 N803 N806 medusa/notifiers/pytivo.py D100 D101 D102 N806 medusa/notifiers/synoindex.py D100 D101 D102 N802 medusa/notifiers/synology_notifier.py D100 D101 D102 N802 @@ -138,7 +137,7 @@ flake8-ignore = medusa/server/web/home/add_recommended.py D100 medusa/server/web/home/add_shows.py D100 D101 D102 D200 D205 D400 D401 N802 N803 N806 medusa/server/web/home/change_log.py D100 D101 D102 - medusa/server/web/home/handler.py D100 D101 D102 D202 D205 D210 D400 D401 E501 F841 N802 N803 N806 + medusa/server/web/home/handler.py D100 D101 D102 D202 D205 D210 D400 D401 E501 N802 N803 N806 medusa/server/web/home/news.py D100 D101 D102 medusa/server/web/home/post_process.py D100 D101 D102 N802 N803 medusa/server/web/manage/__init__.py D104 F401 diff --git a/tests/apiv2/test_config.py b/tests/apiv2/test_config.py index b60a4bcbf0..58c661809b 100644 --- a/tests/apiv2/test_config.py +++ b/tests/apiv2/test_config.py @@ -47,6 +47,7 @@ def config_main(monkeypatch, app_config): config_data['trimZero'] = bool(app.TRIM_ZERO) config_data['fanartBackground'] = bool(app.FANART_BACKGROUND) config_data['fanartBackgroundOpacity'] = float(app.FANART_BACKGROUND_OPACITY or 0) + config_data['gitUsername'] = app.GIT_USERNAME config_data['branch'] = app.BRANCH config_data['commitHash'] = app.CUR_COMMIT_HASH config_data['release'] = app.APP_VERSION @@ -55,8 +56,8 @@ def config_main(monkeypatch, app_config): config_data['databaseVersion'] = NonEmptyDict() config_data['databaseVersion']['major'] = app.MAJOR_DB_VERSION config_data['databaseVersion']['minor'] = app.MINOR_DB_VERSION - config_data['pid'] = app.PID config_data['os'] = platform.platform() + config_data['pid'] = app.PID config_data['locale'] = app_locale config_data['localUser'] = os_user config_data['programDir'] = app.PROG_DIR @@ -67,6 +68,7 @@ def config_main(monkeypatch, app_config): config_data['logDir'] = app.LOG_DIR config_data['appArgs'] = app.MY_ARGS config_data['webRoot'] = app.WEB_ROOT + config_data['runsInDocker'] = bool(app.RUNS_IN_DOCKER) config_data['githubUrl'] = app.GITHUB_IO_URL config_data['wikiUrl'] = app.WIKI_URL config_data['donationsUrl'] = app.DONATIONS_URL @@ -101,89 +103,6 @@ def config_main(monkeypatch, app_config): config_data['failedDownloads']['enabled'] = bool(app.USE_FAILED_DOWNLOADS) config_data['failedDownloads']['deleteFailed'] = bool(app.DELETE_FAILED) - config_data['kodi'] = NonEmptyDict() - config_data['kodi']['enabled'] = bool(app.USE_KODI) - config_data['kodi']['alwaysOn'] = bool(app.KODI_ALWAYS_ON) - config_data['kodi']['notify'] = NonEmptyDict() - config_data['kodi']['notify']['snatch'] = bool(app.KODI_NOTIFY_ONSNATCH) - config_data['kodi']['notify']['download'] = bool(app.KODI_NOTIFY_ONDOWNLOAD) - config_data['kodi']['notify']['subtitleDownload'] = bool(app.KODI_NOTIFY_ONSUBTITLEDOWNLOAD) - config_data['kodi']['update'] = NonEmptyDict() - config_data['kodi']['update']['library'] = bool(app.KODI_UPDATE_LIBRARY) - config_data['kodi']['update']['full'] = bool(app.KODI_UPDATE_FULL) - config_data['kodi']['update']['onlyFirst'] = bool(app.KODI_UPDATE_ONLYFIRST) - config_data['kodi']['host'] = app.KODI_HOST - config_data['kodi']['username'] = app.KODI_USERNAME - # config_data['kodi']['password'] = app.KODI_PASSWORD - config_data['kodi']['libraryCleanPending'] = bool(app.KODI_LIBRARY_CLEAN_PENDING) - config_data['kodi']['cleanLibrary'] = bool(app.KODI_CLEAN_LIBRARY) - - config_data['plex'] = NonEmptyDict() - config_data['plex']['server'] = NonEmptyDict() - config_data['plex']['server']['enabled'] = bool(app.USE_PLEX_SERVER) - config_data['plex']['server']['notify'] = NonEmptyDict() - config_data['plex']['server']['notify']['snatch'] = bool(app.PLEX_NOTIFY_ONSNATCH) - config_data['plex']['server']['notify']['download'] = bool(app.PLEX_NOTIFY_ONDOWNLOAD) - config_data['plex']['server']['notify']['subtitleDownload'] = bool(app.PLEX_NOTIFY_ONSUBTITLEDOWNLOAD) - config_data['plex']['server']['updateLibrary'] = bool(app.PLEX_UPDATE_LIBRARY) - config_data['plex']['server']['host'] = app.PLEX_SERVER_HOST - # config_data['plex']['server']['token'] = app.PLEX_SERVER_TOKEN - config_data['plex']['server']['username'] = app.PLEX_SERVER_USERNAME - # config_data['plex']['server']['password'] = app.PLEX_SERVER_PASSWORD - config_data['plex']['client'] = NonEmptyDict() - config_data['plex']['client']['enabled'] = bool(app.USE_PLEX_CLIENT) - config_data['plex']['client']['username'] = app.PLEX_CLIENT_USERNAME - # config_data['plex']['client']['password'] = app.PLEX_CLIENT_PASSWORD - config_data['plex']['client']['host'] = app.PLEX_CLIENT_HOST - - config_data['emby'] = NonEmptyDict() - config_data['emby']['enabled'] = bool(app.USE_EMBY) - config_data['emby']['host'] = app.EMBY_HOST - - config_data['torrents'] = NonEmptyDict() - config_data['torrents']['authType'] = app.TORRENT_AUTH_TYPE - config_data['torrents']['dir'] = app.TORRENT_DIR - config_data['torrents']['enabled'] = bool(app.USE_TORRENTS) - config_data['torrents']['highBandwidth'] = app.TORRENT_HIGH_BANDWIDTH - config_data['torrents']['host'] = app.TORRENT_HOST - config_data['torrents']['label'] = app.TORRENT_LABEL - config_data['torrents']['labelAnime'] = app.TORRENT_LABEL_ANIME - config_data['torrents']['method'] = app.TORRENT_METHOD - # config_data['torrents']['password'] = app.TORRENT_PASSWORD - config_data['torrents']['path'] = app.TORRENT_PATH - config_data['torrents']['paused'] = bool(app.TORRENT_PAUSED) - config_data['torrents']['rpcurl'] = app.TORRENT_RPCURL - config_data['torrents']['seedLocation'] = app.TORRENT_SEED_LOCATION - config_data['torrents']['seedTime'] = app.TORRENT_SEED_TIME - config_data['torrents']['username'] = app.TORRENT_USERNAME - config_data['torrents']['verifySSL'] = bool(app.TORRENT_VERIFY_CERT) - - config_data['nzb'] = NonEmptyDict() - config_data['nzb']['enabled'] = bool(app.USE_NZBS) - config_data['nzb']['dir'] = app.NZB_DIR - config_data['nzb']['method'] = app.NZB_METHOD - config_data['nzb']['nzbget'] = NonEmptyDict() - config_data['nzb']['nzbget']['category'] = app.NZBGET_CATEGORY - config_data['nzb']['nzbget']['categoryAnime'] = app.NZBGET_CATEGORY_ANIME - config_data['nzb']['nzbget']['categoryAnimeBacklog'] = app.NZBGET_CATEGORY_ANIME_BACKLOG - config_data['nzb']['nzbget']['categoryBacklog'] = app.NZBGET_CATEGORY_BACKLOG - config_data['nzb']['nzbget']['host'] = app.NZBGET_HOST - # config_data['nzb']['nzbget']['password'] = app.NZBGET_PASSWORD - config_data['nzb']['nzbget']['priority'] = app.NZBGET_PRIORITY - config_data['nzb']['nzbget']['useHttps'] = bool(app.NZBGET_USE_HTTPS) - config_data['nzb']['nzbget']['username'] = app.NZBGET_USERNAME - - config_data['nzb']['sabnzbd'] = NonEmptyDict() - # config_data['nzb']['sabnzbd']['apiKey'] = app.SAB_APIKEY - config_data['nzb']['sabnzbd']['category'] = app.SAB_CATEGORY - config_data['nzb']['sabnzbd']['categoryAnime'] = app.SAB_CATEGORY_ANIME - config_data['nzb']['sabnzbd']['categoryAnimeBacklog'] = app.SAB_CATEGORY_ANIME_BACKLOG - config_data['nzb']['sabnzbd']['categoryBacklog'] = app.SAB_CATEGORY_BACKLOG - config_data['nzb']['sabnzbd']['forced'] = bool(app.SAB_FORCED) - config_data['nzb']['sabnzbd']['host'] = app.SAB_HOST - # config_data['nzb']['sabnzbd']['password'] = app.SAB_PASSWORD - config_data['nzb']['sabnzbd']['username'] = app.SAB_USERNAME - config_data['layout'] = NonEmptyDict() config_data['layout']['schedule'] = app.COMING_EPS_LAYOUT config_data['layout']['history'] = app.HISTORY_LAYOUT @@ -230,7 +149,7 @@ def config_main(monkeypatch, app_config): config_data['postProcessing']['noDelete'] = bool(app.NO_DELETE) config_data['postProcessing']['processMethod'] = app.PROCESS_METHOD config_data['postProcessing']['reflinkAvailable'] = bool(pkgutil.find_loader('reflink')) - config_data['postProcessing']['autoPostprocessorFrequency'] = app.AUTOPOSTPROCESSOR_FREQUENCY + config_data['postProcessing']['autoPostprocessorFrequency'] = int(app.AUTOPOSTPROCESSOR_FREQUENCY) config_data['postProcessing']['syncFiles'] = app.SYNC_FILES config_data['postProcessing']['fileTimestampTimezone'] = app.FILE_TIMESTAMP_TIMEZONE config_data['postProcessing']['allowedExtensions'] = list(app.ALLOWED_EXTENSIONS) diff --git a/tests/test_clients.py b/tests/test_clients.py index 198be0c6e6..2242ae4c27 100644 --- a/tests/test_clients.py +++ b/tests/test_clients.py @@ -3,7 +3,7 @@ import medusa.clients.torrent as sut from medusa.clients.torrent import ( - deluge_client, deluged_client, download_station_client, mlnet_client, + deluge_client, deluged_client, downloadstation_client, mlnet_client, qbittorrent_client, rtorrent_client, transmission_client, utorrent_client ) import pytest @@ -19,8 +19,8 @@ 'expected': deluged_client }, { # p2 - 'client': 'download_station', - 'expected': download_station_client + 'client': 'downloadstation', + 'expected': downloadstation_client }, { # p3 'client': 'mlnet', @@ -74,8 +74,8 @@ def test_get_client_module__non_existent(): 'expected': deluged_client.DelugeDAPI }, { # p2 - 'client': 'download_station', - 'expected': download_station_client.DownloadStationAPI + 'client': 'downloadstation', + 'expected': downloadstation_client.DownloadStationAPI }, { # p3 'client': 'mlnet', diff --git a/tests/test_guessit.yml b/tests/test_guessit.yml index b0c1919c86..e9b6cab8cf 100644 --- a/tests/test_guessit.yml +++ b/tests/test_guessit.yml @@ -4310,6 +4310,16 @@ mimetype: video/x-matroska type: episode +? Rugrats Season 1/Rugrats - 01x01 - Tommy's First Birthday[JM].avi +: title: Rugrats + season: 1 + episode: 1 + episode_title: Tommy's First Birthday + release_group: JM + container: avi + mimetype: video/avi + type: episode + ? shes.gotta.have.it.s01e08.720p.web.x264-strife.mkv : title: shes gotta have it season: 1 diff --git a/tests/test_tv_identifiers.py b/tests/test_tv_identifiers.py index a5a4fa7a98..af1691f793 100644 --- a/tests/test_tv_identifiers.py +++ b/tests/test_tv_identifiers.py @@ -130,7 +130,7 @@ def test_series_identifier(p): }, { # p11: e1234 'slug': 'e1234', - 'expected': None, + 'expected': AbsoluteNumber(1234), }, { # p12: E15 'slug': 'E15', @@ -144,27 +144,31 @@ def test_series_identifier(p): 'slug': 's2017e02', 'expected': RelativeNumber(2017, 2), }, - { # p15: 2017-07-16 + { # p15: s01e9999 + 'slug': 's01e9999', + 'expected': RelativeNumber(1, 9999), + }, + { # p16: 2017-07-16 'slug': '2017-07-16', 'expected': AirByDateNumber(datetime(year=2017, month=7, day=16)), }, - { # p16: 2017-17-16 (invalid date) + { # p17: 2017-17-16 (invalid date) 'slug': '2017-17-16', 'expected': None, }, - { # p17: Invalid + { # p18: Invalid 'slug': 's01e022017-07-16', 'expected': None, }, - { # p18: Invalid + { # p19: Invalid 'slug': '22017-07-16', 'expected': None, }, - { # p19: Invalid + { # p20: Invalid 'slug': 'ss01', 'expected': None, }, - { # p20: Invalid + { # p21: Invalid 'slug': 'ee01', 'expected': None, }, diff --git a/themes-default/slim/.babelrc b/themes-default/slim/.babelrc index 3ce4a35d2a..deb10346d1 100644 --- a/themes-default/slim/.babelrc +++ b/themes-default/slim/.babelrc @@ -3,6 +3,7 @@ "@babel/preset-env" ], "plugins": [ + "@babel/plugin-syntax-dynamic-import", "@babel/plugin-proposal-object-rest-spread" ], "env": { diff --git a/themes-default/slim/package.json b/themes-default/slim/package.json index f664ec8ba0..7c49901457 100644 --- a/themes-default/slim/package.json +++ b/themes-default/slim/package.json @@ -28,35 +28,37 @@ }, "dependencies": {}, "devDependencies": { - "@babel/core": "7.1.2", + "@babel/core": "7.1.6", "@babel/plugin-proposal-object-rest-spread": "7.0.0", + "@babel/plugin-syntax-dynamic-import": "7.0.0", "@babel/polyfill": "7.0.0", - "@babel/preset-env": "7.1.0", + "@babel/preset-env": "7.1.6", "@babel/register": "7.0.0", "@mapbox/stylelint-processor-arbitrary-tags": "0.2.0", "@vue/test-utils": "1.0.0-beta.25", - "ava": "1.0.0-rc.1", + "ava": "1.0.0-rc.2", "axios": "0.18.0", + "babel-eslint": "10.0.1", "babel-loader": "8.0.4", "babel-plugin-istanbul": "5.1.0", "bootstrap": "3.3.7", "browser-env": "3.2.5", - "clean-webpack-plugin": "0.1.19", + "clean-webpack-plugin": "1.0.0", "codecov": "3.1.0", - "copy-webpack-plugin": "4.5.4", + "copy-webpack-plugin": "4.6.0", "cross-env": "5.2.0", - "css-loader": "1.0.0", + "css-loader": "1.0.1", "date-fns": "1.29.0", - "eslint": "5.8.0", + "eslint": "5.9.0", "eslint-config-xo": "0.25.0", - "eslint-plugin-vue": "5.0.0-beta.3", + "eslint-plugin-vue": "5.0.0-beta.4", "esm": "3.0.84", "file-loader": "2.0.0", "filemanager-webpack-plugin": "2.0.5", "glob": "7.1.3", "gulp": "3.9.1", "gulp-changed": "3.2.0", - "gulp-imagemin": "4.1.0", + "gulp-imagemin": "5.0.3", "imagemin-pngquant": "6.0.0", "is-visible": "2.2.0", "jquery": "3.3.1", @@ -67,11 +69,11 @@ "require-extension-hooks-babel": "1.0.0-beta.1", "require-extension-hooks-vue": "1.1.0", "run-sequence": "2.2.1", - "stylelint": "9.7.0", + "stylelint": "9.8.0", "stylelint-config-standard": "18.2.0", "tablesorter": "2.31.0", "vue": "2.5.17", - "vue-async-computed": "3.4.1", + "vue-async-computed": "3.5.0", "vue-js-toggle-button": "1.3.0", "vue-loader": "15.4.2", "vue-meta": "1.5.5", @@ -82,7 +84,7 @@ "vue-template-compiler": "2.5.17", "vue-truncate-collapsed": "2.1.0", "vuex": "3.0.1", - "webpack": "4.21.0", + "webpack": "4.25.1", "webpack-cli": "3.1.2", "xo": "0.23.0" }, @@ -94,6 +96,9 @@ "extensions": [ "vue" ], + "parserOptions": { + "parser": "babel-eslint" + }, "rules": { "object-curly-spacing": [ "error", @@ -202,11 +207,11 @@ "sourceMap": false, "instrument": false, "extension": [ - ".vue" + ".vue" ], "include": [ - "src/**/*.vue", - "src/**/*.js" + "src/**/*.vue", + "src/**/*.js" ], "reporter": [ "lcov", diff --git a/themes-default/slim/resources/32x_sprite_colored_notifiers.psd b/themes-default/slim/resources/32x_sprite_colored_notifiers.psd index 38012ca1f6..1c07388bd4 100644 Binary files a/themes-default/slim/resources/32x_sprite_colored_notifiers.psd and b/themes-default/slim/resources/32x_sprite_colored_notifiers.psd differ diff --git a/themes-default/slim/src/components/add-show-options.vue b/themes-default/slim/src/components/add-show-options.vue index 084df9b778..fda4474936 100644 --- a/themes-default/slim/src/components/add-show-options.vue +++ b/themes-default/slim/src/components/add-show-options.vue @@ -13,7 +13,7 @@
-
@@ -44,12 +44,12 @@ - + - +
@@ -65,8 +65,8 @@
- +
@@ -223,7 +223,7 @@ export default { ); }).catch(error => { this.$snotify.error( - 'Error while trying to save "add show" defaults: ' + error.message || 'Unknown', + 'Error while trying to save "add show" defaults: ' + (error.message || 'Unknown'), 'Error' ); }).finally(() => { diff --git a/themes-default/slim/src/components/app-header.vue b/themes-default/slim/src/components/app-header.vue index 655443d082..6855514a47 100644 --- a/themes-default/slim/src/components/app-header.vue +++ b/themes-default/slim/src/components/app-header.vue @@ -114,21 +114,11 @@ export default { components: { AppLink }, - data() { - return { - topMenuMapping: [ - ['system', ['/home/restart', '/home/status', '/errorlogs', '/changes', '/news', '/IRC']], - ['home', ['/home', '/addShows', '/addRecommended']], - ['config', ['/config']], - ['history', ['/history']], - ['schedule', ['/schedule']], - ['manage', ['/manage']], - ['login', ['/login']] - ] - }; - }, computed: { - ...mapState(['config']), + ...mapState([ + 'config', + 'notifiers' + ]), ...mapState({ isAuthenticated: state => state.auth.isAuthenticated, username: state => state.auth.user.username, @@ -144,20 +134,7 @@ export default { }); }, topMenu() { - // This is a workaround, until we're able to use VueRouter to determine that. - // The possible `topmenu` values are: config, history, schedule, system, home, manage, login [unused] - const { topMenuMapping } = this; - const { pathname } = window.location; - - for (const item of topMenuMapping) { - const [topMenu, routes] = item; // Unpacking - for (const route of routes) { - if (pathname.includes(route)) { - return topMenu; - } - } - } - return null; + return this.$route.meta.topMenu; }, toolsBadgeCount() { const { config } = this; @@ -176,8 +153,9 @@ export default { return ''; }, linkVisible() { - const { config } = this; - const { plex, kodi, emby, torrents, failedDownloads, subtitles, postProcessing } = config; + const { config, notifiers } = this; + const { torrents, failedDownloads, subtitles, postProcessing } = config; + const { kodi, plex, emby } = notifiers; return { plex: plex.server.enabled && plex.server.host.length !== 0, diff --git a/themes-default/slim/src/components/config-post-processing.vue b/themes-default/slim/src/components/config-post-processing.vue index 64b1be88a8..0552aae5cc 100644 --- a/themes-default/slim/src/components/config-post-processing.vue +++ b/themes-default/slim/src/components/config-post-processing.vue @@ -566,11 +566,10 @@ export default { }, 'metadata.metadataProviders': { handler(newValue) { - const { getFirstEnabledMetadataProvider } = this; - // Map the state values to local data. this.metadataProviders = Object.assign({}, this.metadataProviders, newValue); - this.metadataProviderSelected = getFirstEnabledMetadataProvider(); + // This is not properly worked out. metadata.metadataProviders watch is triggered, when a users clicks on one of the chechboxes. + // this.metadataProviderSelected = getFirstEnabledMetadataProvider(); }, deep: true, immediate: false diff --git a/themes-default/slim/src/components/config.vue b/themes-default/slim/src/components/config.vue index d3d02c45ec..f2ba51c87c 100644 --- a/themes-default/slim/src/components/config.vue +++ b/themes-default/slim/src/components/config.vue @@ -34,7 +34,8 @@ Cache Folder:{{config.cacheDir}} Log Folder:{{config.logDir}} Arguments:
{{config.appArgs | prettyPrintJSON}}
- Web Root:{{config.webRoot}} + Web Root:{{config.webRoot}} + Runs in docker:{{config.runsInDocker ? 'yes' : 'no'}}       Website:{{config.githubUrl}} diff --git a/themes-default/slim/src/components/helpers/app-link.vue b/themes-default/slim/src/components/helpers/app-link.vue index 75610bac32..18c9fda752 100644 --- a/themes-default/slim/src/components/helpers/app-link.vue +++ b/themes-default/slim/src/components/helpers/app-link.vue @@ -6,12 +6,14 @@ :target="linkProperties.target" :rel="linkProperties.rel" :false-link="linkProperties.falseLink" + :class="{ 'router-link': linkProperties.is === 'router-link' }" > + + diff --git a/themes-default/slim/src/components/helpers/config-textbox-number.vue b/themes-default/slim/src/components/helpers/config-textbox-number.vue index 1269bfc52e..8948661a7b 100644 --- a/themes-default/slim/src/components/helpers/config-textbox-number.vue +++ b/themes-default/slim/src/components/helpers/config-textbox-number.vue @@ -6,8 +6,9 @@ {{ label }}
- +

{{ explanation }}

+
@@ -31,8 +32,8 @@ export default { default: () => [] }, value: { - type: String, - default: '' + type: Number, + default: 10 }, /** * Overwrite the default configured class on the element. @@ -45,22 +46,42 @@ export default { type: Number, default: 10 }, + max: { + type: Number, + default: null + }, step: { type: Number, default: 1 + }, + placeholder: { + type: String, + default: '' + }, + disabled: { + type: Boolean, + default: false } }, data() { return { - localValue: '' + localValue: null }; }, mounted() { - this.localValue = this.value; + const { value } = this; + this.localValue = value; }, watch: { - localValue() { - this.$emit('update', this.localValue); + value() { + const { value } = this; + this.localValue = value; + } + }, + methods: { + updateValue() { + const { localValue } = this; + this.$emit('input', Number(localValue)); } } }; diff --git a/themes-default/slim/src/components/helpers/config-textbox.vue b/themes-default/slim/src/components/helpers/config-textbox.vue index 38aa7722b0..3724f8afa2 100644 --- a/themes-default/slim/src/components/helpers/config-textbox.vue +++ b/themes-default/slim/src/components/helpers/config-textbox.vue @@ -1,13 +1,14 @@ - + diff --git a/tox.ini b/tox.ini index c9c3550cd4..892e16f5bd 100644 --- a/tox.ini +++ b/tox.ini @@ -1,15 +1,17 @@ [tox] skipsdist = true -envlist = py27,lint +envlist = + py{27,36} + lint -[testenv:py27] +[testenv] passenv = TOXENV CI TRAVIS TRAVIS_* deps = codecov commands = python setup.py test -a "tests --cov=medusa" - codecov -e TOXENV -F backend + codecov -e TOXENV -F backend_{envname} [testenv:lint] commands = diff --git a/yarn.lock b/yarn.lock index 4108dbdd20..7dbdf75930 100644 --- a/yarn.lock +++ b/yarn.lock @@ -379,6 +379,7 @@ drange@^1.0.0: dredd-transactions@6.1.5: version "6.1.5" resolved "https://registry.yarnpkg.com/dredd-transactions/-/dredd-transactions-6.1.5.tgz#6cecbf2d6d61f99408c7e12e3963b669bcd67ffb" + integrity sha512-DWiqzXx5nAqBYSf/tv4ha0LQoFOTknhgkI53tgSV/xIUnpz+S3ZFu7rgkTVXBrHpcfsg87B0aieIfE95mCki0w== dependencies: clone "2.1.1" fury "3.0.0-beta.7" @@ -389,6 +390,7 @@ dredd-transactions@6.1.5: dredd@5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/dredd/-/dredd-5.2.0.tgz#7fe060975ae7bccc35e04e0b91aee23a1f9053ac" + integrity sha512-0Dgxh/rLbxxeyhTyfOhrspf14QKg74w5v9rAr+t+ENzJtMUyePMyjPTKoy0P5DE4A2hYgNDWwlzgznb+R2md2Q== dependencies: async "2.6.1" caseless "0.12.0" @@ -510,6 +512,7 @@ execa@1.0.0: execa@^0.10.0: version "0.10.0" resolved "https://registry.yarnpkg.com/execa/-/execa-0.10.0.tgz#ff456a8f53f90f8eccc71a96d11bdfc7f082cb50" + integrity sha512-7XOMnz8Ynx1gGo/3hyV9loYNPWM94jG3+3T3Y8tsfSstFmETmENCMU/A/zj8Lyaj1lkgEepKepvd6240tBRvlw== dependencies: cross-spawn "^6.0.0" get-stream "^3.0.0" @@ -784,6 +787,7 @@ inquirer@6.0.0: invert-kv@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-2.0.0.tgz#7393f5afa59ec9ff5f67a27620d11c226e3eec02" + integrity sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA== is-fullwidth-code-point@^1.0.0: version "1.0.0" @@ -952,6 +956,7 @@ keypress@0.1.x: lcid@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/lcid/-/lcid-2.0.0.tgz#6ef5d2df60e52f82eb228a4c373e8d1f397253cf" + integrity sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA== dependencies: invert-kv "^2.0.0" @@ -994,6 +999,7 @@ lodash@^4.14.2, lodash@^4.15.0, lodash@^4.17.10, lodash@^4.3.0: map-age-cleaner@^0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.2.tgz#098fb15538fd3dbe461f12745b0ca8568d4e3f74" + integrity sha512-UN1dNocxQq44IhJyMI4TU8phc2m9BddacHRPRjKGLYaF0jqd3xLz0jS0skpAU9WgYyoR4gHtUpzytNBS385FWQ== dependencies: p-defer "^1.0.0" @@ -1018,6 +1024,7 @@ media-typer@^0.3.0: mem@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/mem/-/mem-4.0.0.tgz#6437690d9471678f6cc83659c00cbafcd6b0cdaf" + integrity sha512-WQxG/5xYc3tMbYLXoXPm81ET2WDULiU5FxbuIoNbJqLOOI8zehXFdZuiUEgfdrU2mVB1pxBZUGlYORSrpuJreA== dependencies: map-age-cleaner "^0.1.1" mimic-fn "^1.0.0" @@ -1159,6 +1166,7 @@ optionator@^0.8.1: os-locale@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.0.1.tgz#3b014fbf01d87f60a1e5348d80fe870dc82c4620" + integrity sha512-7g5e7dmXPtzcP4bgsZ8ixDVqA7oWYuEz4lOSujeWyliPai4gfVDiFIcwBg3aGCPnmSGfzOKTK3ccPn0CKv3DBw== dependencies: execa "^0.10.0" lcid "^2.0.0" @@ -1171,6 +1179,7 @@ os-tmpdir@~1.0.2: p-defer@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c" + integrity sha1-n26xgvbJqozXQwBKfU+WsZaw+ww= p-finally@^1.0.0: version "1.0.0" @@ -1179,6 +1188,7 @@ p-finally@^1.0.0: p-is-promise@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-1.1.0.tgz#9c9456989e9f6588017b0434d56097675c3da05e" + integrity sha1-nJRWmJ6fZYgBewQ01WCXZ1w9oF4= p-limit@^2.0.0: version "2.0.0" @@ -1299,6 +1309,7 @@ regenerator-runtime@^0.11.0: request@2.87.0: version "2.87.0" resolved "https://registry.yarnpkg.com/request/-/request-2.87.0.tgz#32f00235cd08d482b4d0d68db93a829c0ed5756e" + integrity sha512-fcogkm7Az5bsS6Sl0sibkbhcKsnyon/jV1kF3ajGmF0c8HrttdKTPRT9hieOaQHA5HEq6r8OyWOo/o781C1tNw== dependencies: aws-sign2 "~0.7.0" aws4 "^1.6.0" @@ -1589,6 +1600,7 @@ util-deprecate@~1.0.1: uuid@3.3.2: version "3.3.2" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131" + integrity sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA== uuid@^3.1.0: version "3.2.1" @@ -1674,6 +1686,7 @@ yargs-parser@^10.1.0: yargs@12.0.2: version "12.0.2" resolved "https://registry.yarnpkg.com/yargs/-/yargs-12.0.2.tgz#fe58234369392af33ecbef53819171eff0f5aadc" + integrity sha512-e7SkEx6N6SIZ5c5H22RTZae61qtn3PYUE8JYbBFlK9sYmh3DMQ6E5ygtaG/2BW0JZi4WGgTR2IV5ChqlqrDGVQ== dependencies: cliui "^4.0.0" decamelize "^2.0.0"